Fix temperature param + add thinking for anthropic

The temperature param was not passed to the llm.
Now support anthropic models in 'thinking' mode.
This commit is contained in:
Jochen
2025-12-02 17:24:55 +11:00
parent 9ee0468b87
commit ae16243f49
4 changed files with 98 additions and 4 deletions

View File

@@ -950,6 +950,7 @@ impl<W: UiWriter> Agent<W> {
anthropic_config.temperature,
anthropic_config.cache_config.clone(),
anthropic_config.enable_1m_context,
anthropic_config.thinking_budget_tokens,
)?;
providers.register(anthropic_provider);
}
@@ -1167,6 +1168,17 @@ impl<W: UiWriter> Agent<W> {
}
}
/// Get the configured temperature for a provider from top-level config
fn provider_temperature(config: &Config, provider_name: &str) -> Option<f32> {
match provider_name {
"anthropic" => config.providers.anthropic.as_ref()?.temperature,
"openai" => config.providers.openai.as_ref()?.temperature,
"databricks" => config.providers.databricks.as_ref()?.temperature,
"embedded" => config.providers.embedded.as_ref()?.temperature,
_ => None,
}
}
/// Resolve the max_tokens to use for a given provider, applying fallbacks
fn resolve_max_tokens(&self, provider_name: &str) -> u32 {
match provider_name {
@@ -1179,6 +1191,16 @@ impl<W: UiWriter> Agent<W> {
}
}
/// Resolve the temperature to use for a given provider, applying fallbacks
fn resolve_temperature(&self, provider_name: &str) -> f32 {
match provider_name {
"databricks" => Self::provider_temperature(&self.config, "databricks")
.unwrap_or(0.1),
other => Self::provider_temperature(&self.config, other)
.unwrap_or(0.1),
}
}
/// Print provider diagnostics through the UiWriter for visibility
pub fn print_provider_banner(&self, role_label: &str) {
if let Ok((provider_name, model)) = self.get_provider_info() {
@@ -1562,7 +1584,7 @@ impl<W: UiWriter> Agent<W> {
let request = CompletionRequest {
messages,
max_tokens,
temperature: Some(0.1),
temperature: Some(self.resolve_temperature(&provider_name)),
stream: true, // Enable streaming
tools,
};