allow openai to be used to name named compatible providers

This commit is contained in:
Michael Neale
2025-11-10 16:12:33 +11:00
parent 7bb36618d8
commit 81cd956c20
4 changed files with 61 additions and 2 deletions

View File

@@ -15,6 +15,25 @@ max_tokens = 4096 # Per-request output limit (how many tokens the model can gen
temperature = 0.1 temperature = 0.1
use_oauth = true use_oauth = true
# Multiple OpenAI-compatible providers can be configured with custom names
# Each provider gets its own section under [providers.openai_compatible.<name>]
# [providers.openai_compatible.openrouter]
# api_key = "your-openrouter-api-key"
# model = "anthropic/claude-3.5-sonnet"
# base_url = "https://openrouter.ai/api/v1"
# max_tokens = 4096
# temperature = 0.1
# [providers.openai_compatible.groq]
# api_key = "your-groq-api-key"
# model = "llama-3.3-70b-versatile"
# base_url = "https://api.groq.com/openai/v1"
# max_tokens = 4096
# temperature = 0.1
# To use one of these providers, set default_provider to the name you chose:
# default_provider = "openrouter"
[agent] [agent]
fallback_default_max_tokens = 8192 fallback_default_max_tokens = 8192
# max_context_length: Override the context window size for all providers # max_context_length: Override the context window size for all providers

View File

@@ -14,6 +14,9 @@ pub struct Config {
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProvidersConfig { pub struct ProvidersConfig {
pub openai: Option<OpenAIConfig>, pub openai: Option<OpenAIConfig>,
/// Multiple named OpenAI-compatible providers (e.g., openrouter, groq, etc.)
#[serde(default)]
pub openai_compatible: std::collections::HashMap<String, OpenAIConfig>,
pub anthropic: Option<AnthropicConfig>, pub anthropic: Option<AnthropicConfig>,
pub databricks: Option<DatabricksConfig>, pub databricks: Option<DatabricksConfig>,
pub embedded: Option<EmbeddedConfig>, pub embedded: Option<EmbeddedConfig>,
@@ -121,6 +124,7 @@ impl Default for Config {
Self { Self {
providers: ProvidersConfig { providers: ProvidersConfig {
openai: None, openai: None,
openai_compatible: std::collections::HashMap::new(),
anthropic: None, anthropic: None,
databricks: Some(DatabricksConfig { databricks: Some(DatabricksConfig {
host: "https://your-workspace.cloud.databricks.com".to_string(), host: "https://your-workspace.cloud.databricks.com".to_string(),
@@ -239,6 +243,7 @@ impl Config {
Self { Self {
providers: ProvidersConfig { providers: ProvidersConfig {
openai: None, openai: None,
openai_compatible: std::collections::HashMap::new(),
anthropic: None, anthropic: None,
databricks: None, databricks: None,
embedded: Some(EmbeddedConfig { embedded: Some(EmbeddedConfig {

View File

@@ -875,6 +875,21 @@ impl<W: UiWriter> Agent<W> {
} }
} }
// Register OpenAI-compatible providers (e.g., OpenRouter, Groq, etc.)
for (name, openai_config) in &config.providers.openai_compatible {
if providers_to_register.contains(name) {
let openai_provider = g3_providers::OpenAIProvider::new_with_name(
name.clone(),
openai_config.api_key.clone(),
Some(openai_config.model.clone()),
openai_config.base_url.clone(),
openai_config.max_tokens,
openai_config.temperature,
)?;
providers.register(openai_provider);
}
}
// Register Anthropic provider if configured AND it's the default provider // Register Anthropic provider if configured AND it's the default provider
if let Some(anthropic_config) = &config.providers.anthropic { if let Some(anthropic_config) = &config.providers.anthropic {
if providers_to_register.contains(&"anthropic".to_string()) { if providers_to_register.contains(&"anthropic".to_string()) {

View File

@@ -22,6 +22,7 @@ pub struct OpenAIProvider {
base_url: String, base_url: String,
max_tokens: Option<u32>, max_tokens: Option<u32>,
_temperature: Option<f32>, _temperature: Option<f32>,
name: String,
} }
impl OpenAIProvider { impl OpenAIProvider {
@@ -31,6 +32,24 @@ impl OpenAIProvider {
base_url: Option<String>, base_url: Option<String>,
max_tokens: Option<u32>, max_tokens: Option<u32>,
temperature: Option<f32>, temperature: Option<f32>,
) -> Result<Self> {
Self::new_with_name(
"openai".to_string(),
api_key,
model,
base_url,
max_tokens,
temperature,
)
}
pub fn new_with_name(
name: String,
api_key: String,
model: Option<String>,
base_url: Option<String>,
max_tokens: Option<u32>,
temperature: Option<f32>,
) -> Result<Self> { ) -> Result<Self> {
Ok(Self { Ok(Self {
client: Client::new(), client: Client::new(),
@@ -39,6 +58,7 @@ impl OpenAIProvider {
base_url: base_url.unwrap_or_else(|| "https://api.openai.com/v1".to_string()), base_url: base_url.unwrap_or_else(|| "https://api.openai.com/v1".to_string()),
max_tokens, max_tokens,
_temperature: temperature, _temperature: temperature,
name,
}) })
} }
@@ -353,7 +373,7 @@ impl LLMProvider for OpenAIProvider {
} }
fn name(&self) -> &str { fn name(&self) -> &str {
"openai" &self.name
} }
fn model(&self) -> &str { fn model(&self) -> &str {
@@ -492,4 +512,4 @@ struct OpenAIDeltaToolCall {
struct OpenAIDeltaFunction { struct OpenAIDeltaFunction {
name: Option<String>, name: Option<String>,
arguments: Option<String>, arguments: Option<String>,
} }