Skip to content

Commit 73c9263

Browse files
committed
feat: Add support for Anthropic Claude 4 models
fix: Error handling fixes chore: Enhance Huly-Coder configuration; add detailed sections for providers, models, and workspace settings
1 parent 7af93c2 commit 73c9263

File tree

6 files changed

+308
-202
lines changed

6 files changed

+308
-202
lines changed

huly-coder.yaml

Lines changed: 59 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,79 @@
1-
#
2-
# You can override config in ~\huly-coder.yaml file
3-
# or huly-coder-local.yaml file
4-
#
1+
# Huly-Coder Configuration
2+
# -----------------------
3+
# This configuration can be overridden in:
4+
# - ~/huly-coder.yaml
5+
# - huly-coder-local.yaml
56

6-
### Example Anthropic config
7-
# provider: Anthropic
8-
# model: claude-3-5-sonnet-latest
9-
# provider_api_key: sk-xxxxxxxxxxxxxxxxxxxxx
10-
11-
#provider: LMStudio
7+
#---------------------------------------
8+
# AI Provider Configuration
9+
#---------------------------------------
10+
# Supported providers:
11+
# - OpenRouter (default)
12+
# - Anthropic
13+
# - OpenAI
14+
# - LMStudio
1215
provider: OpenRouter
13-
#model: qwen3-8b
14-
#model: deepseek-r1-distill-llama-8b
15-
#model: gpt-4o
16+
17+
# Model Configuration
18+
# Available models depend on the selected provider
19+
# OpenRouter models:
20+
# - anthropic/claude-3.5-sonnet (default)
21+
# - gpt-4o
22+
# Local models:
23+
# - qwen3-8b
24+
# - deepseek-r1-distill-llama-8b
1625
model: anthropic/claude-3.5-sonnet
26+
27+
# Provider API Configuration
1728
#provider_api_key: sk-xxxxxxxxxxxxxxxxxxxxx
1829
#provider_base_url: http://127.0.0.1:1234/v1
30+
31+
#---------------------------------------
32+
# Workspace Configuration
33+
#---------------------------------------
1934
workspace: ./target/workspace
35+
36+
#---------------------------------------
37+
# Web Interaction Configuration
38+
#---------------------------------------
39+
# Web Fetch Options:
40+
# - direct (default): Direct web access
41+
# - chrome: Use Chrome browser
42+
web_fetch: direct
43+
44+
# Web Search Configuration
45+
# Web Search Options:
46+
# - Brave Search (default)
47+
# type: brave
48+
# api_key: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
49+
# - SearX (alternative)
50+
# type: searx
51+
# url: http://localhost:8080/
52+
web_search:
53+
type: brave
54+
api_key: BSAnIRlUpQfYjLAs4Dt23DvQyr24vL3
55+
56+
#---------------------------------------
57+
# MCP (Model Context Protocol) Configuration
58+
#---------------------------------------
2059
#mcp:
2160
# servers:
22-
# # simple MCP server from https://github.com/modelcontextprotocol/servers/tree/main/src/fetch
61+
# # Fetch server configuration
2362
# fetch:
2463
# type: stdio
2564
# command: docker
2665
# args: [ "run", "-i", "--rm", "mcp/fetch", "--ignore-robots-txt" ]
2766
# protocol_version: 2024-11-05
67+
#
68+
# # Weather server configuration
2869
# weather:
2970
# type: sse
3071
# url: http://127.0.0.1:8080/sse
3172
# protocol_version: 2024-11-05
3273

33-
web_fetch: direct
34-
#web_fetch: chrome
35-
36-
#web_search:
37-
# type: searx
38-
# url: http://localhost:8080/
39-
40-
web_search:
41-
type: brave
42-
api_key: BSAnIRlUpQfYjLAs4Dt23DvQyr24vL3
43-
74+
#---------------------------------------
75+
# AI Assistant Personality Configuration
76+
#---------------------------------------
4477
user_instructions: |
45-
You are dedicated software engineer working alone. Youre free to choose any technology, approach, and solution.
78+
You are dedicated software engineer working alone. You're free to choose any technology, approach, and solution.
4679
If in doubt please choose the best way you think. Your goal is to build working software based on user request.

src/agent/mod.rs

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
use std::fmt::Display;
12
// Copyright © 2025 Huly Labs. Use of this source code is governed by the MIT license.
23
use std::sync::Arc;
34

@@ -79,12 +80,19 @@ struct BuildAgentContext<'a> {
7980

8081
#[derive(Debug, thiserror::Error)]
8182
pub enum AgentError {
82-
#[error("ToolSetError: {0}")]
8383
ToolSetError(#[from] ToolSetError),
84-
#[error("CompletionError: {0}")]
8584
CompletionError(#[from] CompletionError),
8685
}
8786

87+
impl Display for AgentError {
88+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
89+
match self {
90+
Self::ToolSetError(e) => write!(f, "{e}"),
91+
Self::CompletionError(e) => write!(f, "CompletionError: {e}"),
92+
}
93+
}
94+
}
95+
8896
impl Agent {
8997
pub fn new(
9098
config: Config,
@@ -270,7 +278,8 @@ impl Agent {
270278
.expect("provider_api_key is required for Anthropic"),
271279
)
272280
.build()
273-
.agent(&context.config.model);
281+
.agent(&context.config.model)
282+
.max_tokens(20000);
274283
Ok(Box::new(
275284
Self::configure_agent(agent_builder, context).await?.build(),
276285
))
@@ -482,9 +491,11 @@ impl Agent {
482491
let response: CompletionResponse<
483492
Option<rig::providers::openai::StreamingCompletionResponse>,
484493
> = From::from(self.stream.take().unwrap());
485-
let usage = response.raw_response.unwrap().usage;
486-
tracing::info!("Usage: {:?}", usage);
487-
self.current_tokens = usage.total_tokens as u32;
494+
if let Some(raw_response) = response.raw_response {
495+
let usage = raw_response.usage;
496+
tracing::info!("Usage: {:?}", usage);
497+
self.current_tokens = usage.total_tokens as u32;
498+
}
488499
self.assistant_content = None;
489500
if matches!(self.state, AgentState::Completed(false)) {
490501
self.set_state(AgentState::Completed(true));
@@ -570,7 +581,7 @@ impl Agent {
570581
tracing::debug!("persist_history");
571582
persist_history(&self.messages);
572583
tracing::error!("Error processing messages: {}", e);
573-
self.set_state(AgentState::Error(format!("Error: {}", e)));
584+
self.set_state(AgentState::Error(format!("{e}")));
574585
}
575586
tokio::time::sleep(std::time::Duration::from_millis(10)).await;
576587
}

0 commit comments

Comments
 (0)