Remove unused code to eliminate build warnings

- Remove unused SYSTEM_PROMPT_FOR_NATIVE_TOOL_USE and SYSTEM_PROMPT_FOR_NON_NATIVE_TOOL_USE constants
- Remove unused gpu_layers field from EmbeddedProvider struct
- Remove unused clean_stop_sequences method from EmbeddedProvider
This commit is contained in:
Dhanji R. Prasanna
2026-01-28 10:01:44 +11:00
parent a902be1562
commit ba6e1f9896
2 changed files with 0 additions and 26 deletions

View File

@@ -275,10 +275,6 @@ This ensures the TODO list is tracked against the specific version of requiremen
// ============================================================================ // ============================================================================
/// System prompt for providers with native tool calling (Anthropic, OpenAI, etc.) /// System prompt for providers with native tool calling (Anthropic, OpenAI, etc.)
/// Note: This is kept for backwards compatibility but the function is preferred
pub const SYSTEM_PROMPT_FOR_NATIVE_TOOL_USE: &str = "";
/// Generate system prompt for native tool calling providers
pub fn get_system_prompt_for_native() -> String { pub fn get_system_prompt_for_native() -> String {
format!( format!(
"{}\n\n{}\n\n{}\n\n{}\n\n{}\n\n{}", "{}\n\n{}\n\n{}\n\n{}\n\n{}\n\n{}",
@@ -292,10 +288,6 @@ pub fn get_system_prompt_for_native() -> String {
} }
/// System prompt for providers without native tool calling (embedded models) /// System prompt for providers without native tool calling (embedded models)
/// Note: This is kept for backwards compatibility but the function is preferred
pub const SYSTEM_PROMPT_FOR_NON_NATIVE_TOOL_USE: &str = "";
/// Generate system prompt for non-native tool calling providers (embedded models)
pub fn get_system_prompt_for_non_native() -> String { pub fn get_system_prompt_for_non_native() -> String {
format!( format!(
"{}\n\n{}\n\n{}\n\n{}{}\n\n{}\n\n{}\n\n{}", "{}\n\n{}\n\n{}\n\n{}{}\n\n{}\n\n{}\n\n{}",

View File

@@ -42,8 +42,6 @@ pub struct EmbeddedProvider {
temperature: f32, temperature: f32,
/// Context window size /// Context window size
context_length: u32, context_length: u32,
/// Number of GPU layers
gpu_layers: u32,
/// Number of threads /// Number of threads
threads: Option<u32>, threads: Option<u32>,
} }
@@ -138,7 +136,6 @@ impl EmbeddedProvider {
max_tokens, max_tokens,
temperature: temperature.unwrap_or(0.1), temperature: temperature.unwrap_or(0.1),
context_length: context_size, context_length: context_size,
gpu_layers: n_gpu_layers,
threads, threads,
}) })
} }
@@ -341,21 +338,6 @@ impl EmbeddedProvider {
} }
} }
/// Clean stop sequences from generated text.
fn clean_stop_sequences(&self, text: &str) -> String {
let mut cleaned = text.to_string();
let stop_sequences = self.get_stop_sequences();
for stop_seq in &stop_sequences {
if let Some(pos) = cleaned.find(stop_seq) {
cleaned.truncate(pos);
break; // Only remove the first occurrence to avoid over-truncation
}
}
cleaned.trim().to_string()
}
/// Get the effective max tokens for generation /// Get the effective max tokens for generation
fn effective_max_tokens(&self) -> u32 { fn effective_max_tokens(&self) -> u32 {
self.max_tokens self.max_tokens