fix bad max_tokens and context_window logic

for non-databricks code
This commit is contained in:
Jochen
2025-11-19 13:51:16 +11:00
parent 3f21bdc7b2
commit 1069664e16
4 changed files with 119 additions and 44 deletions

View File

@@ -22,12 +22,13 @@ use_oauth = true
[providers.anthropic]
api_key = "your-anthropic-api-key"
model = "claude-3-haiku-20240307" # Using a faster model for player
model = "claude-sonnet-4-5"
max_tokens = 4096
temperature = 0.3 # Slightly higher temperature for more creative implementations
# cache_config = "ephemeral" # Optional: Enable prompt caching
# Options: "ephemeral", "5minute", "1hour"
# Reduces costs and latency for repeated prompts. Uses Anthropic's prompt caching with different TTLs.
# enable_1m_context = true # optional, more expensive
[agent]
fallback_default_max_tokens = 8192