Add Google Gemini provider support
- Add GeminiProvider with streaming and native tool calling - Support gemini-2.5-pro, gemini-2.0-flash, gemini-1.5-pro/flash models - Model-specific context window detection (1M-2M tokens) - Message conversion: assistant -> model role mapping - System messages extracted to system_instruction field - Tool schema conversion with functionCall/functionResponse parts - SSE streaming with JSON array buffer parsing - 8 unit tests for conversion and parsing logic - Register provider in g3-core and validate in g3-cli
This commit is contained in:
@@ -708,6 +708,18 @@ impl<W: UiWriter> Agent<W> {
|
||||
16384 // Conservative default for other Databricks models
|
||||
}
|
||||
}
|
||||
"gemini" => {
|
||||
// Gemini models - use provider's context_window_size()
|
||||
if let Some(ctx_size) = provider.context_window_size() {
|
||||
debug!(
|
||||
"Using context window size {} from Gemini provider",
|
||||
ctx_size
|
||||
);
|
||||
ctx_size
|
||||
} else {
|
||||
1_000_000 // Default for Gemini models
|
||||
}
|
||||
}
|
||||
_ => config.agent.fallback_default_max_tokens as u32,
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user