cleaned up logging

This commit is contained in:
Dhanji Prasanna
2025-09-15 08:40:27 +10:00
parent 0dc3b42f38
commit 45970824d9
4 changed files with 72 additions and 25 deletions

13
Cargo.lock generated
View File

@@ -1264,6 +1264,15 @@ version = "0.4.28"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
[[package]]
name = "matchers"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
dependencies = [
"regex-automata",
]
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.7.5" version = "2.7.5"
@@ -2302,10 +2311,14 @@ version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
dependencies = [ dependencies = [
"matchers",
"nu-ansi-term", "nu-ansi-term",
"once_cell",
"regex-automata",
"sharded-slab", "sharded-slab",
"smallvec", "smallvec",
"thread_local", "thread_local",
"tracing",
"tracing-core", "tracing-core",
"tracing-log", "tracing-log",
] ]

View File

@@ -11,7 +11,7 @@ clap = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true, features = ["env-filter"] }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
rustyline = "17.0.1" rustyline = "17.0.1"

View File

@@ -36,14 +36,32 @@ pub struct Cli {
pub async fn run() -> Result<()> { pub async fn run() -> Result<()> {
let cli = Cli::parse(); let cli = Cli::parse();
// Initialize logging // Initialize logging with filtering
let level = if cli.verbose { use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
tracing::Level::DEBUG
// Create a filter that suppresses llama_cpp logs unless in verbose mode
let filter = if cli.verbose {
EnvFilter::from_default_env()
.add_directive(format!("{}=debug", env!("CARGO_PKG_NAME")).parse().unwrap())
.add_directive("g3_core=debug".parse().unwrap())
.add_directive("g3_cli=debug".parse().unwrap())
.add_directive("g3_execution=debug".parse().unwrap())
.add_directive("g3_providers=debug".parse().unwrap())
} else { } else {
tracing::Level::INFO EnvFilter::from_default_env()
.add_directive(format!("{}=info", env!("CARGO_PKG_NAME")).parse().unwrap())
.add_directive("g3_core=info".parse().unwrap())
.add_directive("g3_cli=info".parse().unwrap())
.add_directive("g3_execution=info".parse().unwrap())
.add_directive("g3_providers=info".parse().unwrap())
.add_directive("llama_cpp=off".parse().unwrap()) // Suppress all llama_cpp logs
.add_directive("llama=off".parse().unwrap()) // Suppress all llama.cpp logs
}; };
tracing_subscriber::fmt().with_max_level(level).init(); tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer())
.with(filter)
.init();
info!("Starting G3 AI Coding Agent"); info!("Starting G3 AI Coding Agent");

View File

@@ -151,8 +151,13 @@ impl StreamingToolParser {
// Try to fix mixed quote issues (single quotes in JSON) // Try to fix mixed quote issues (single quotes in JSON)
let fixed_mixed_quotes = fix_mixed_quotes_in_json(&json_str); let fixed_mixed_quotes = fix_mixed_quotes_in_json(&json_str);
if fixed_mixed_quotes != json_str { if fixed_mixed_quotes != json_str {
if let Ok(tool_call) = serde_json::from_str::<ToolCall>(&fixed_mixed_quotes) { if let Ok(tool_call) =
info!("Successfully parsed tool call after fixing mixed quotes: {:?}", tool_call); serde_json::from_str::<ToolCall>(&fixed_mixed_quotes)
{
info!(
"Successfully parsed tool call after fixing mixed quotes: {:?}",
tool_call
);
// Reset parser state // Reset parser state
self.in_tool_call = false; self.in_tool_call = false;
self.tool_start_pos = None; self.tool_start_pos = None;
@@ -160,7 +165,10 @@ impl StreamingToolParser {
return Some((tool_call, end_pos)); return Some((tool_call, end_pos));
} else { } else {
info!("Failed to parse JSON even after fixing mixed quotes: {}", fixed_mixed_quotes); info!(
"Failed to parse JSON even after fixing mixed quotes: {}",
fixed_mixed_quotes
);
} }
} else { } else {
info!("Failed to parse JSON (no fixes applied): {}", json_str); info!("Failed to parse JSON (no fixes applied): {}", json_str);
@@ -442,7 +450,7 @@ The tool will execute immediately and you'll receive the result (success or erro
- Format: {{\"tool\": \"shell\", \"args\": {{\"command\": \"your_command_here\"}}}} - Format: {{\"tool\": \"shell\", \"args\": {{\"command\": \"your_command_here\"}}}}
- Example: {{\"tool\": \"shell\", \"args\": {{\"command\": \"ls ~/Downloads\"}}}} - Example: {{\"tool\": \"shell\", \"args\": {{\"command\": \"ls ~/Downloads\"}}}}
- **final_output**: Signal task completion with a summary of work done in markdown format - **final_output**: Signal task completion with a detailed summary of work done in markdown format
- Format: {{\"tool\": \"final_output\", \"args\": {{\"summary\": \"what_was_accomplished\"}}}} - Format: {{\"tool\": \"final_output\", \"args\": {{\"summary\": \"what_was_accomplished\"}}}}
# Instructions # Instructions
@@ -769,6 +777,22 @@ The tool will execute immediately and you'll receive the result (success or erro
); );
} }
// Check if this was a final_output tool call - if so, stop the conversation
if tool_call.tool == "final_output" {
// For final_output, don't add the tool call and result to context
// Just add the display content and return immediately
full_response.push_str(final_display_content);
if let Some(summary) = tool_call.args.get("summary") {
if let Some(summary_str) = summary.as_str() {
full_response.push_str(&format!("\n\n=> {}", summary_str));
}
}
println!(); // New line after final output
let ttft =
first_token_time.unwrap_or_else(|| stream_start.elapsed());
return Ok((full_response, ttft));
}
// Closure marker with timing // Closure marker with timing
println!("└─ ⚡️ {}", Self::format_duration(exec_duration)); println!("└─ ⚡️ {}", Self::format_duration(exec_duration));
println!(); println!();
@@ -803,14 +827,6 @@ The tool will execute immediately and you'll receive the result (success or erro
tool_call.tool, tool_result tool_call.tool, tool_result
)); ));
// Check if this was a final_output tool call - if so, stop the conversation
if tool_call.tool == "final_output" {
println!(); // New line after final output
let ttft =
first_token_time.unwrap_or_else(|| stream_start.elapsed());
return Ok((full_response, ttft));
}
tool_executed = true; tool_executed = true;
// Break out of current stream to start a new one with updated context // Break out of current stream to start a new one with updated context
break; break;
@@ -907,12 +923,12 @@ The tool will execute immediately and you'll receive the result (success or erro
"final_output" => { "final_output" => {
if let Some(summary) = tool_call.args.get("summary") { if let Some(summary) = tool_call.args.get("summary") {
if let Some(summary_str) = summary.as_str() { if let Some(summary_str) = summary.as_str() {
Ok(format!("📋 Final Output: {}", summary_str)) Ok(format!("{}", summary_str))
} else { } else {
Ok("📋 Task completed".to_string()) Ok(" Task completed".to_string())
} }
} else { } else {
Ok("📋 Task completed".to_string()) Ok(" Task completed".to_string())
} }
} }
_ => { _ => {