Refactor: deduplicate JSON parsing, provider constructors, and identity function

Agent: fowler

Eliminate code-path aliasing and near-duplicates across recent commits:

1. Deduplicate find_json_object_end: Three near-identical copies in
   streaming_parser.rs, context_window.rs, and acd.rs consolidated into
   a single canonical implementation in utils.rs. All callers now route
   through the canonical version. The utils.rs version uses the most
   defensive variant (with found_start guard). (-84 lines)

2. Deduplicate provider constructors: AnthropicProvider::new() and
   GeminiProvider::new() now delegate to their respective new_with_name()
   methods instead of duplicating the full constructor body.
   (OpenAI already delegated.) (-28 lines)

3. Inline convert_cache_control: Removed identity function that just
   cloned CacheControl. Call sites now use .map(|cc| cc.clone())
   directly. (-4 lines)

Net: -65 lines, 0 behavior changes, all 683 library tests pass.
This commit is contained in:
Dhanji R. Prasanna
2026-02-13 12:37:09 +11:00
parent bc98c65956
commit a7e0b0ef9e
6 changed files with 52 additions and 117 deletions

View File

@@ -145,26 +145,7 @@ impl AnthropicProvider {
enable_1m_context: Option<bool>,
thinking_budget_tokens: Option<u32>,
) -> Result<Self> {
let client = Client::builder()
.timeout(Duration::from_secs(300))
.build()
.map_err(|e| anyhow!("Failed to create HTTP client: {}", e))?;
let model = model.unwrap_or_else(|| "claude-3-5-sonnet-20241022".to_string());
debug!("Initialized Anthropic provider with model: {}", model);
Ok(Self {
client,
name: "anthropic".to_string(),
api_key,
model,
max_tokens: max_tokens.unwrap_or(32768),
temperature: temperature.unwrap_or(0.1),
cache_config,
enable_1m_context: enable_1m_context.unwrap_or(false),
thinking_budget_tokens,
})
Self::new_with_name("anthropic".to_string(), api_key, model, max_tokens, temperature, cache_config, enable_1m_context, thinking_budget_tokens)
}
/// Create a new AnthropicProvider with a custom name (e.g., "anthropic.default")
@@ -222,10 +203,7 @@ impl AnthropicProvider {
builder
}
fn convert_cache_control(cache_control: &crate::CacheControl) -> crate::CacheControl {
// Anthropic uses the same format, so just clone it
cache_control.clone()
}
// Anthropic uses the same CacheControl format — no conversion needed, just clone at call sites.
fn convert_tools(&self, tools: &[Tool]) -> Vec<AnthropicTool> {
tools
@@ -312,7 +290,7 @@ impl AnthropicProvider {
cache_control: message
.cache_control
.as_ref()
.map(Self::convert_cache_control),
.map(|cc| cc.clone()),
});
} else {
// Regular user message: images as top-level blocks, then text
@@ -330,7 +308,7 @@ impl AnthropicProvider {
cache_control: message
.cache_control
.as_ref()
.map(Self::convert_cache_control),
.map(|cc| cc.clone()),
});
}
@@ -349,7 +327,7 @@ impl AnthropicProvider {
cache_control: message
.cache_control
.as_ref()
.map(Self::convert_cache_control),
.map(|cc| cc.clone()),
});
}

View File

@@ -77,14 +77,7 @@ impl GeminiProvider {
max_tokens: Option<u32>,
temperature: Option<f32>,
) -> Result<Self> {
Ok(Self {
client: Client::new(),
api_key,
model: model.unwrap_or_else(|| "gemini-2.0-flash".to_string()),
max_tokens: max_tokens.unwrap_or(16384),
temperature: temperature.unwrap_or(0.1),
name: "gemini".to_string(),
})
Self::new_with_name("gemini".to_string(), api_key, model, max_tokens, temperature)
}
pub fn new_with_name(