From 81cd956c20d608283e5f3587ec06bee464816782 Mon Sep 17 00:00:00 2001 From: Michael Neale Date: Mon, 10 Nov 2025 16:12:33 +1100 Subject: [PATCH] allow openai to be used to name named compatible providers --- config.example.toml | 19 +++++++++++++++++++ crates/g3-config/src/lib.rs | 5 +++++ crates/g3-core/src/lib.rs | 15 +++++++++++++++ crates/g3-providers/src/openai.rs | 24 ++++++++++++++++++++++-- 4 files changed, 61 insertions(+), 2 deletions(-) diff --git a/config.example.toml b/config.example.toml index 330ff7e..1bc0893 100644 --- a/config.example.toml +++ b/config.example.toml @@ -15,6 +15,25 @@ max_tokens = 4096 # Per-request output limit (how many tokens the model can gen temperature = 0.1 use_oauth = true +# Multiple OpenAI-compatible providers can be configured with custom names +# Each provider gets its own section under [providers.openai_compatible.] +# [providers.openai_compatible.openrouter] +# api_key = "your-openrouter-api-key" +# model = "anthropic/claude-3.5-sonnet" +# base_url = "https://openrouter.ai/api/v1" +# max_tokens = 4096 +# temperature = 0.1 + +# [providers.openai_compatible.groq] +# api_key = "your-groq-api-key" +# model = "llama-3.3-70b-versatile" +# base_url = "https://api.groq.com/openai/v1" +# max_tokens = 4096 +# temperature = 0.1 + +# To use one of these providers, set default_provider to the name you chose: +# default_provider = "openrouter" + [agent] fallback_default_max_tokens = 8192 # max_context_length: Override the context window size for all providers diff --git a/crates/g3-config/src/lib.rs b/crates/g3-config/src/lib.rs index 189a1ee..c860481 100644 --- a/crates/g3-config/src/lib.rs +++ b/crates/g3-config/src/lib.rs @@ -14,6 +14,9 @@ pub struct Config { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ProvidersConfig { pub openai: Option, + /// Multiple named OpenAI-compatible providers (e.g., openrouter, groq, etc.) + #[serde(default)] + pub openai_compatible: std::collections::HashMap, pub anthropic: Option, pub databricks: Option, pub embedded: Option, @@ -121,6 +124,7 @@ impl Default for Config { Self { providers: ProvidersConfig { openai: None, + openai_compatible: std::collections::HashMap::new(), anthropic: None, databricks: Some(DatabricksConfig { host: "https://your-workspace.cloud.databricks.com".to_string(), @@ -239,6 +243,7 @@ impl Config { Self { providers: ProvidersConfig { openai: None, + openai_compatible: std::collections::HashMap::new(), anthropic: None, databricks: None, embedded: Some(EmbeddedConfig { diff --git a/crates/g3-core/src/lib.rs b/crates/g3-core/src/lib.rs index 0560534..1ac374e 100644 --- a/crates/g3-core/src/lib.rs +++ b/crates/g3-core/src/lib.rs @@ -875,6 +875,21 @@ impl Agent { } } + // Register OpenAI-compatible providers (e.g., OpenRouter, Groq, etc.) + for (name, openai_config) in &config.providers.openai_compatible { + if providers_to_register.contains(name) { + let openai_provider = g3_providers::OpenAIProvider::new_with_name( + name.clone(), + openai_config.api_key.clone(), + Some(openai_config.model.clone()), + openai_config.base_url.clone(), + openai_config.max_tokens, + openai_config.temperature, + )?; + providers.register(openai_provider); + } + } + // Register Anthropic provider if configured AND it's the default provider if let Some(anthropic_config) = &config.providers.anthropic { if providers_to_register.contains(&"anthropic".to_string()) { diff --git a/crates/g3-providers/src/openai.rs b/crates/g3-providers/src/openai.rs index 52ad6b0..638a50e 100644 --- a/crates/g3-providers/src/openai.rs +++ b/crates/g3-providers/src/openai.rs @@ -22,6 +22,7 @@ pub struct OpenAIProvider { base_url: String, max_tokens: Option, _temperature: Option, + name: String, } impl OpenAIProvider { @@ -31,6 +32,24 @@ impl OpenAIProvider { base_url: Option, max_tokens: Option, temperature: Option, + ) -> Result { + Self::new_with_name( + "openai".to_string(), + api_key, + model, + base_url, + max_tokens, + temperature, + ) + } + + pub fn new_with_name( + name: String, + api_key: String, + model: Option, + base_url: Option, + max_tokens: Option, + temperature: Option, ) -> Result { Ok(Self { client: Client::new(), @@ -39,6 +58,7 @@ impl OpenAIProvider { base_url: base_url.unwrap_or_else(|| "https://api.openai.com/v1".to_string()), max_tokens, _temperature: temperature, + name, }) } @@ -353,7 +373,7 @@ impl LLMProvider for OpenAIProvider { } fn name(&self) -> &str { - "openai" + &self.name } fn model(&self) -> &str { @@ -492,4 +512,4 @@ struct OpenAIDeltaToolCall { struct OpenAIDeltaFunction { name: Option, arguments: Option, -} \ No newline at end of file +}