Move fixed_filter_json from g3-core to g3-cli

Properly separates UI display concern from core library:
- fixed_filter_json module now lives in g3-cli (UI layer)
- UiWriter trait gains filter_json_tool_calls() and reset_json_filter() methods
- g3-core delegates filtering to UI layer via trait methods
- Different UiWriter implementations can choose their own filtering behavior
- ConsoleUiWriter filters JSON tool calls for clean terminal display
- MachineUiWriter/NullUiWriter use default pass-through

Benefits:
- Proper separation of concerns
- Core stays clean without display-specific logic
- Testability - filter can be tested independently in g3-cli
This commit is contained in:
Dhanji R. Prasanna
2025-12-22 10:32:21 +11:00
parent fbf31e5f68
commit 01a5284d6d
14 changed files with 297 additions and 183 deletions

View File

@@ -27,6 +27,7 @@ chrono = { version = "0.4", features = ["serde"] }
crossterm = "0.29.0"
ratatui = "0.29"
termimad = "0.34.0"
regex = "1.10"
[dev-dependencies]
tempfile = "3.8"

View File

@@ -1,3 +1,6 @@
// JSON tool call filtering for display (moved from g3-core)
pub mod fixed_filter_json;
use anyhow::Result;
use crossterm::style::{Color, ResetColor, SetForegroundColor};
use std::time::{Duration, Instant};
@@ -152,9 +155,16 @@ fn extract_coach_feedback_from_logs(
.get_session_id()
.ok_or_else(|| anyhow::anyhow!("Coach agent has no session ID"))?;
// Construct the log file path for this specific coach session
let logs_dir = std::path::Path::new("logs");
let log_file_path = logs_dir.join(format!("g3_session_{}.json", session_id));
// Try new .g3/sessions/<session_id>/session.json path first
let log_file_path = g3_core::get_session_file(&session_id);
// Fall back to old logs/ path if new path doesn't exist
let log_file_path = if log_file_path.exists() {
log_file_path
} else {
let logs_dir = std::path::Path::new("logs");
logs_dir.join(format!("g3_session_{}.json", session_id))
};
// Read the coach agent's specific log file
if log_file_path.exists() {
@@ -2001,7 +2011,7 @@ fn handle_execution_error(e: &anyhow::Error, input: &str, output: &SimpleOutput,
// If it's a stream error, provide helpful guidance
if e.to_string().contains("No response received") || e.to_string().contains("timed out") {
output.print("💡 This may be a temporary issue. Please try again or check the logs for more details.");
output.print(" Log files are saved in the 'logs/' directory.");
output.print(" Log files are saved in the '.g3/sessions/' directory.");
}
}
@@ -2468,7 +2478,7 @@ async fn run_autonomous(
let coach_config = base_config.for_coach()?;
// Reset filter suppression state before creating coach agent
g3_core::fixed_filter_json::reset_fixed_json_tool_state();
crate::fixed_filter_json::reset_fixed_json_tool_state();
let ui_writer = ConsoleUiWriter::new();
let mut coach_agent =

View File

@@ -1,3 +1,4 @@
use crate::fixed_filter_json::{fixed_filter_json_tool_calls, reset_fixed_json_tool_state};
use g3_core::ui_writer::UiWriter;
use std::io::{self, Write};
use termimad::MadSkin;
@@ -350,4 +351,14 @@ impl UiWriter for ConsoleUiWriter {
println!();
println!("\x1b[1;35m━━━━━━━━━━━━━━━\x1b[0m");
}
fn filter_json_tool_calls(&self, content: &str) -> String {
// Apply JSON tool call filtering for display
fixed_filter_json_tool_calls(content)
}
fn reset_json_filter(&self) {
// Reset the filter state for a new response
reset_fixed_json_tool_state();
}
}

View File

@@ -5,7 +5,7 @@
#[cfg(test)]
mod fixed_filter_tests {
use crate::fixed_filter_json::{fixed_filter_json_tool_calls, reset_fixed_json_tool_state};
use g3_cli::fixed_filter_json::{fixed_filter_json_tool_calls, reset_fixed_json_tool_state};
use regex::Regex;
/// Test that regular text without tool calls passes through unchanged.

View File

@@ -35,50 +35,18 @@ pub struct LogParser;
impl LogParser {
/// Parse logs from a workspace directory
pub fn parse_logs(workspace: &Path) -> Result<Vec<LogEntry>> {
let logs_dir = workspace.join("logs");
if !logs_dir.exists() {
return Ok(Vec::new());
}
let mut entries = Vec::new();
// Read all JSON log files
for entry in fs::read_dir(&logs_dir).context("Failed to read logs directory")? {
let entry = entry?;
let path = entry.path();
// Try new .g3/sessions/ directory first
let g3_sessions_dir = workspace.join(".g3").join("sessions");
if g3_sessions_dir.exists() {
Self::parse_sessions_dir(&g3_sessions_dir, &mut entries)?;
}
if path.extension().and_then(|s| s.to_str()) == Some("json") {
if let Ok(content) = fs::read_to_string(&path) {
if let Ok(json) = serde_json::from_str::<Value>(&content) {
// Try to parse as a log session
if let Some(messages) = json.get("messages").and_then(|m| m.as_array()) {
for msg in messages {
entries.push(LogEntry {
timestamp: msg
.get("timestamp")
.and_then(|t| t.as_str())
.and_then(|s| DateTime::parse_from_rfc3339(s).ok())
.map(|dt| dt.with_timezone(&Utc)),
role: msg
.get("role")
.and_then(|r| r.as_str())
.map(String::from),
content: msg
.get("content")
.and_then(|c| c.as_str())
.map(String::from),
tool_calls: msg
.get("tool_calls")
.and_then(|tc| tc.as_array())
.map(|arr| arr.clone()),
raw: msg.clone(),
});
}
}
}
}
}
// Also check old logs/ directory for backwards compatibility
let logs_dir = workspace.join("logs");
if logs_dir.exists() {
Self::parse_logs_dir(&logs_dir, &mut entries)?;
}
// Sort by timestamp
@@ -92,6 +60,76 @@ impl LogParser {
Ok(entries)
}
/// Parse logs from the new .g3/sessions/ directory structure
fn parse_sessions_dir(sessions_dir: &Path, entries: &mut Vec<LogEntry>) -> Result<()> {
for session_entry in fs::read_dir(sessions_dir).context("Failed to read sessions directory")? {
let session_entry = session_entry?;
let session_path = session_entry.path();
if session_path.is_dir() {
// Look for session.json in each session directory
let session_file = session_path.join("session.json");
if session_file.exists() {
Self::parse_session_file(&session_file, entries)?;
}
}
}
Ok(())
}
/// Parse logs from the old logs/ directory structure
fn parse_logs_dir(logs_dir: &Path, entries: &mut Vec<LogEntry>) -> Result<()> {
// Read all JSON log files
for entry in fs::read_dir(&logs_dir).context("Failed to read logs directory")? {
let entry = entry?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) == Some("json") {
Self::parse_session_file(&path, entries)?;
}
}
Ok(())
}
/// Parse a single session JSON file
fn parse_session_file(path: &Path, entries: &mut Vec<LogEntry>) -> Result<()> {
if let Ok(content) = fs::read_to_string(path) {
if let Ok(json) = serde_json::from_str::<Value>(&content) {
// Try to parse as a log session - check both "messages" and "context_window.conversation_history"
let messages = json.get("messages").and_then(|m| m.as_array())
.or_else(|| json.get("context_window")
.and_then(|cw| cw.get("conversation_history"))
.and_then(|ch| ch.as_array()));
if let Some(messages) = messages {
for msg in messages {
entries.push(LogEntry {
timestamp: msg
.get("timestamp")
.and_then(|t| t.as_str())
.and_then(|s| DateTime::parse_from_rfc3339(s).ok())
.map(|dt| dt.with_timezone(&Utc)),
role: msg
.get("role")
.and_then(|r| r.as_str())
.map(String::from),
content: msg
.get("content")
.and_then(|c| c.as_str())
.map(String::from),
tool_calls: msg
.get("tool_calls")
.and_then(|tc| tc.as_array())
.map(|arr| arr.clone()),
raw: msg.clone(),
});
}
}
}
}
Ok(())
}
/// Extract chat messages from log entries
pub fn extract_chat_messages(entries: &[LogEntry]) -> Vec<ChatMessage> {
entries

View File

@@ -139,8 +139,16 @@ fn try_extract_from_session_log(
session_id: &str,
config: &FeedbackExtractionConfig,
) -> Option<String> {
let logs_path = config.logs_dir.clone().unwrap_or_else(logs_dir);
let log_file_path = logs_path.join(format!("g3_session_{}.json", session_id));
// Try new .g3/sessions/<session_id>/session.json path first
let log_file_path = crate::get_session_file(session_id);
// Fall back to old logs/ path if new path doesn't exist
let log_file_path = if log_file_path.exists() {
log_file_path
} else {
let logs_path = config.logs_dir.clone().unwrap_or_else(logs_dir);
logs_path.join(format!("g3_session_{}.json", session_id))
};
if !log_file_path.exists() {
debug!("Session log file not found: {:?}", log_file_path);
@@ -275,8 +283,16 @@ fn try_extract_from_conversation_history(
session_id: &str,
config: &FeedbackExtractionConfig,
) -> Option<String> {
let logs_path = config.logs_dir.clone().unwrap_or_else(logs_dir);
let log_file_path = logs_path.join(format!("g3_session_{}.json", session_id));
// Try new .g3/sessions/<session_id>/session.json path first
let log_file_path = crate::get_session_file(session_id);
// Fall back to old logs/ path if new path doesn't exist
let log_file_path = if log_file_path.exists() {
log_file_path
} else {
let logs_path = config.logs_dir.clone().unwrap_or_else(logs_dir);
logs_path.join(format!("g3_session_{}.json", session_id))
};
if !log_file_path.exists() {
return None;

View File

@@ -19,11 +19,6 @@ pub use prompts::get_agent_system_prompt;
mod task_result_comprehensive_tests;
use crate::ui_writer::UiWriter;
// Make fixed_filter_json public so it can be accessed from g3-cli
pub mod fixed_filter_json;
#[cfg(test)]
mod fixed_filter_tests;
#[cfg(test)]
mod tilde_expansion_tests;
@@ -32,7 +27,6 @@ mod error_handling_test;
mod prompts;
use anyhow::Result;
use chrono::Local;
use g3_computer_control::WebDriverController;
use g3_config::Config;
use g3_execution::CodeExecutor;
@@ -42,9 +36,7 @@ use prompts::{get_system_prompt_for_native, SYSTEM_PROMPT_FOR_NON_NATIVE_TOOL_US
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::fs::OpenOptions;
use std::io::Write;
use std::sync::{Mutex, OnceLock};
use std::time::{Duration, Instant};
use tokio_util::sync::CancellationToken;
use tracing::{debug, error, info, warn};
@@ -84,6 +76,52 @@ pub fn logs_dir() -> std::path::PathBuf {
/// Used to direct all logs to the workspace directory
pub const G3_WORKSPACE_PATH_ENV: &str = "G3_WORKSPACE_PATH";
/// Get the base .g3 directory path
/// This is the root for all g3 session data in the current workspace
pub fn get_g3_dir() -> std::path::PathBuf {
if let Ok(workspace_path) = std::env::var(G3_WORKSPACE_PATH_ENV) {
std::path::PathBuf::from(workspace_path).join(".g3")
} else {
std::env::current_dir().unwrap_or_default().join(".g3")
}
}
/// Get the session directory for a specific session ID
/// Returns .g3/session/<session_id>/
pub fn get_session_logs_dir(session_id: &str) -> std::path::PathBuf {
get_g3_dir().join("sessions").join(session_id)
}
/// Ensure the session directory exists for a specific session ID
/// Creates .g3/session/<session_id>/ and subdirectories
pub fn ensure_session_dir(session_id: &str) -> std::io::Result<std::path::PathBuf> {
let session_dir = get_session_logs_dir(session_id);
std::fs::create_dir_all(&session_dir)?;
// Create subdirectories
std::fs::create_dir_all(session_dir.join("thinned"))?;
Ok(session_dir)
}
/// Get the thinned content directory for a session
/// Returns .g3/session/<session_id>/thinned/
pub fn get_thinned_dir(session_id: &str) -> std::path::PathBuf {
get_session_logs_dir(session_id).join("thinned")
}
/// Get the path to the session.json file for a session
/// Returns .g3/session/<session_id>/session.json
pub fn get_session_file(session_id: &str) -> std::path::PathBuf {
get_session_logs_dir(session_id).join("session.json")
}
/// Get the path to the context summary file for a session
/// Returns .g3/session/<session_id>/context_summary.txt
pub fn get_context_summary_file(session_id: &str) -> std::path::PathBuf {
get_session_logs_dir(session_id).join("context_summary.txt")
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolCall {
pub tool: String,
@@ -733,7 +771,8 @@ Format this as a detailed but concise summary that can be used to resume the con
/// Perform context thinning: scan first third of conversation and replace large tool results
/// Returns a summary message about what was thinned
pub fn thin_context(&mut self) -> (String, usize) {
/// If session_id is provided, thinned content is saved to .g3/session/<session_id>/thinned/
pub fn thin_context(&mut self, session_id: Option<&str>) -> (String, usize) {
let current_percentage = self.percentage_used() as u32;
let current_threshold = (current_percentage / 10) * 10;
@@ -748,15 +787,28 @@ Format this as a detailed but concise summary that can be used to resume the con
let mut tool_call_leaned_count = 0;
let mut chars_saved = 0;
// Create ~/tmp directory if it doesn't exist
let tmp_dir = shellexpand::tilde("~/tmp").to_string();
if let Err(e) = std::fs::create_dir_all(&tmp_dir) {
warn!("Failed to create ~/tmp directory: {}", e);
return (
"⚠️ Context thinning failed: could not create ~/tmp directory".to_string(),
0,
);
}
// Determine output directory: use session dir if available, otherwise ~/tmp
let tmp_dir = if let Some(sid) = session_id {
let thinned_dir = get_thinned_dir(sid);
if let Err(e) = std::fs::create_dir_all(&thinned_dir) {
warn!("Failed to create thinned directory: {}", e);
return (
"⚠️ Context thinning failed: could not create thinned directory".to_string(),
0,
);
}
thinned_dir.to_string_lossy().to_string()
} else {
let fallback_dir = shellexpand::tilde("~/tmp").to_string();
if let Err(e) = std::fs::create_dir_all(&fallback_dir) {
warn!("Failed to create ~/tmp directory: {}", e);
return (
"⚠️ Context thinning failed: could not create ~/tmp directory".to_string(),
0,
);
}
fallback_dir
};
// Scan the first third of messages
for i in 0..first_third_end {
@@ -970,7 +1022,8 @@ Format this as a detailed but concise summary that can be used to resume the con
/// Perform context thinning on the ENTIRE conversation history (not just first third)
/// This is the "skinnify" variant that processes all messages
/// Returns a summary message about what was thinned
pub fn thin_context_all(&mut self) -> (String, usize) {
/// If session_id is provided, thinned content is saved to .g3/session/<session_id>/thinned/
pub fn thin_context_all(&mut self, session_id: Option<&str>) -> (String, usize) {
let current_percentage = self.percentage_used() as u32;
// Calculate the total messages - process ALL of them
@@ -980,15 +1033,28 @@ Format this as a detailed but concise summary that can be used to resume the con
let mut tool_call_leaned_count = 0;
let mut chars_saved = 0;
// Create ~/tmp directory if it doesn't exist
let tmp_dir = shellexpand::tilde("~/tmp").to_string();
if let Err(e) = std::fs::create_dir_all(&tmp_dir) {
warn!("Failed to create ~/tmp directory: {}", e);
return (
"⚠️ Context skinnifying failed: could not create ~/tmp directory".to_string(),
0,
);
}
// Determine output directory: use session dir if available, otherwise ~/tmp
let tmp_dir = if let Some(sid) = session_id {
let thinned_dir = get_thinned_dir(sid);
if let Err(e) = std::fs::create_dir_all(&thinned_dir) {
warn!("Failed to create thinned directory: {}", e);
return (
"⚠️ Context skinnifying failed: could not create thinned directory".to_string(),
0,
);
}
thinned_dir.to_string_lossy().to_string()
} else {
let fallback_dir = shellexpand::tilde("~/tmp").to_string();
if let Err(e) = std::fs::create_dir_all(&fallback_dir) {
warn!("Failed to create ~/tmp directory: {}", e);
return (
"⚠️ Context skinnifying failed: could not create ~/tmp directory".to_string(),
0,
);
}
fallback_dir
};
// Scan ALL messages (not just first third)
for i in 0..total_messages {
@@ -1825,7 +1891,7 @@ impl<W: UiWriter> Agent<W> {
// Step 1: Try thinnify (first third of context)
self.ui_writer.print_context_status("🥒 Step 1: Trying thinnify...\n");
let (thin_msg, thin_saved) = self.context_window.thin_context();
let (thin_msg, thin_saved) = self.context_window.thin_context(self.session_id.as_deref());
self.thinning_events.push(thin_saved);
self.ui_writer.print_context_thinning(&thin_msg);
@@ -1843,7 +1909,7 @@ impl<W: UiWriter> Agent<W> {
// Step 2: Try skinnify (entire context)
self.ui_writer.print_context_status("🦴 Step 2: Trying skinnify...\n");
let (skinny_msg, skinny_saved) = self.context_window.thin_context_all();
let (skinny_msg, skinny_saved) = self.context_window.thin_context_all(self.session_id.as_deref());
self.thinning_events.push(skinny_saved);
self.ui_writer.print_context_thinning(&skinny_msg);
@@ -1887,7 +1953,7 @@ impl<W: UiWriter> Agent<W> {
// Step 1: Try thinnify (first third of context)
self.ui_writer.print_context_status("🥒 Step 1: Trying thinnify...\n");
let (thin_msg, thin_saved) = self.context_window.thin_context();
let (thin_msg, thin_saved) = self.context_window.thin_context(self.session_id.as_deref());
self.thinning_events.push(thin_saved);
self.ui_writer.print_context_thinning(&thin_msg);
@@ -1904,7 +1970,7 @@ impl<W: UiWriter> Agent<W> {
// Step 2: Try skinnify (entire context)
self.ui_writer.print_context_status("🦴 Step 2: Trying skinnify...\n");
let (skinny_msg, skinny_saved) = self.context_window.thin_context_all();
let (skinny_msg, skinny_saved) = self.context_window.thin_context_all(self.session_id.as_deref());
self.thinning_events.push(skinny_saved);
self.ui_writer.print_context_thinning(&skinny_msg);
@@ -2066,60 +2132,6 @@ impl<W: UiWriter> Agent<W> {
Ok(context_length)
}
fn tool_log_handle() -> Option<&'static Mutex<std::fs::File>> {
static TOOL_LOG: OnceLock<Option<Mutex<std::fs::File>>> = OnceLock::new();
TOOL_LOG
.get_or_init(|| {
let logs_dir = get_logs_dir();
if let Err(e) = std::fs::create_dir_all(&logs_dir) {
error!("Failed to create logs directory for tool log: {}", e);
return None;
}
let ts = Local::now().format("%Y%m%d_%H%M%S").to_string();
let path = logs_dir.join(format!("tool_calls_{}.log", ts));
match OpenOptions::new().create(true).append(true).open(&path) {
Ok(file) => Some(Mutex::new(file)),
Err(e) => {
error!("Failed to open tool log file {:?}: {}", path, e);
None
}
}
})
.as_ref()
}
fn log_tool_call(&self, tool_call: &ToolCall, response: &str) {
if let Some(handle) = Self::tool_log_handle() {
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
let args_str = serde_json::to_string(&tool_call.args)
.unwrap_or_else(|_| "<unserializable>".to_string());
fn sanitize(s: &str) -> String {
s.replace('\n', "\\n")
}
fn truncate(s: &str, limit: usize) -> String {
s.chars().take(limit).collect()
}
let args_snippet = truncate(&sanitize(&args_str), 80);
let response_snippet = truncate(&sanitize(response), 80);
let tool_field = format!("{:<15}", tool_call.tool);
let line = format!(
"{} {} {} 🟩 {}\n",
timestamp, tool_field, args_snippet, response_snippet
);
if let Ok(mut file) = handle.lock() {
let _ = file.write_all(line.as_bytes());
let _ = file.flush();
}
}
}
pub fn get_provider_info(&self) -> Result<(String, String)> {
let provider = self.providers.get(None)?;
Ok((provider.name().to_string(), provider.model().to_string()))
@@ -2226,7 +2238,7 @@ impl<W: UiWriter> Agent<W> {
) -> Result<TaskResult> {
// Reset the JSON tool call filter state at the start of each new task
// This prevents the filter from staying in suppression mode between user interactions
fixed_filter_json::reset_fixed_json_tool_state();
self.ui_writer.reset_json_filter();
// Validate that the system prompt is the first message (critical invariant)
self.validate_system_prompt_is_first();
@@ -2441,19 +2453,21 @@ impl<W: UiWriter> Agent<W> {
.unwrap_or_default()
.as_secs();
// Create logs directory if it doesn't exist
let logs_dir = get_logs_dir();
if !logs_dir.exists() {
// Use new .g3/session/<session_id>/ structure if we have a session ID
let filename = if let Some(ref session_id) = self.session_id {
// Ensure session directory exists
if let Err(e) = ensure_session_dir(session_id) {
error!("Failed to create session directory: {}", e);
return;
}
get_session_file(session_id)
} else {
// Fallback to old logs/ directory for sessions without ID
let logs_dir = get_logs_dir();
if let Err(e) = std::fs::create_dir_all(&logs_dir) {
error!("Failed to create logs directory: {}", e);
return;
}
}
// Use session-based filename if we have a session ID, otherwise fall back to timestamp
let filename = if let Some(ref session_id) = self.session_id {
logs_dir.join(format!("g3_session_{}.json", session_id))
} else {
logs_dir.join(format!("g3_context_{}.json", timestamp))
};
@@ -2529,18 +2543,15 @@ impl<W: UiWriter> Agent<W> {
None => return,
};
// Create logs directory if it doesn't exist
let logs_dir = get_logs_dir();
if !logs_dir.exists() {
if let Err(e) = std::fs::create_dir_all(&logs_dir) {
error!("Failed to create logs directory: {}", e);
return;
}
// Ensure session directory exists
if let Err(e) = ensure_session_dir(session_id) {
error!("Failed to create session directory: {}", e);
return;
}
// Generate filename using same pattern as save_context_window
let filename = logs_dir.join(format!("context_window_{}.txt", session_id));
let symlink_path = logs_dir.join("current_context_window");
// Use new .g3/session/<session_id>/ structure
let filename = get_context_summary_file(session_id);
let symlink_path = get_g3_dir().join("sessions").join("current_context_window");
// Build the summary content
let mut summary_lines = Vec::new();
@@ -2851,7 +2862,7 @@ impl<W: UiWriter> Agent<W> {
/// Manually trigger context thinning regardless of thresholds
pub fn force_thin(&mut self) -> String {
info!("Manual context thinning triggered");
let (message, chars_saved) = self.context_window.thin_context();
let (message, chars_saved) = self.context_window.thin_context(self.session_id.as_deref());
self.thinning_events.push(chars_saved);
message
}
@@ -2860,7 +2871,7 @@ impl<W: UiWriter> Agent<W> {
/// Unlike force_thin which only processes the first third, this processes all messages
pub fn force_thin_all(&mut self) -> String {
info!("Manual full context skinnifying triggered");
let (message, chars_saved) = self.context_window.thin_context_all();
let (message, chars_saved) = self.context_window.thin_context_all(self.session_id.as_deref());
self.thinning_events.push(chars_saved);
message
}
@@ -3106,9 +3117,8 @@ impl<W: UiWriter> Agent<W> {
}
};
// Get the session log path
let logs_dir = get_logs_dir();
let session_log_path = logs_dir.join(format!("g3_session_{}.json", session_id));
// Get the session log path (now in .g3/sessions/<session_id>/session.json)
let session_log_path = get_session_file(&session_id);
// Get current TODO content
let todo_snapshot = std::fs::read_to_string(get_todo_path()).ok();
@@ -3889,7 +3899,7 @@ impl<W: UiWriter> Agent<W> {
self.context_window.percentage_used() as u32
));
let (thin_summary, chars_saved) = self.context_window.thin_context();
let (thin_summary, chars_saved) = self.context_window.thin_context(self.session_id.as_deref());
self.thinning_events.push(chars_saved);
self.ui_writer.print_context_thinning(&thin_summary);
@@ -4289,7 +4299,7 @@ impl<W: UiWriter> Agent<W> {
);
let mut modified_tool_call = tool_call.clone();
modified_tool_call.tool = prefixed_tool_name;
self.log_tool_call(&modified_tool_call, &warning_msg);
debug!("{}", warning_msg);
continue; // Skip execution of duplicate
}
@@ -4308,7 +4318,7 @@ impl<W: UiWriter> Agent<W> {
// Log to tool log with red prefix
let mut modified_tool_call = tool_call.clone();
modified_tool_call.tool = prefixed_tool_name;
self.log_tool_call(&modified_tool_call, &warning_msg);
debug!("{}", warning_msg);
continue; // Skip execution of duplicate
}
@@ -4323,7 +4333,7 @@ impl<W: UiWriter> Agent<W> {
// Check if we should thin the context BEFORE executing the tool
if self.context_window.should_thin() {
let (thin_summary, chars_saved) =
self.context_window.thin_context();
self.context_window.thin_context(self.session_id.as_deref());
self.thinning_events.push(chars_saved);
// Print the thinning summary to the user
self.ui_writer.print_context_thinning(&thin_summary);
@@ -4348,7 +4358,7 @@ impl<W: UiWriter> Agent<W> {
// Filter out JSON tool calls from the display
let filtered_content =
fixed_filter_json::fixed_filter_json_tool_calls(&clean_content);
self.ui_writer.filter_json_tool_calls(&clean_content);
let final_display_content = filtered_content.trim();
// Display any new content before tool execution
@@ -4634,7 +4644,7 @@ impl<W: UiWriter> Agent<W> {
// Reset the JSON tool call filter state after each tool execution
// This ensures the filter doesn't stay in suppression mode for subsequent streaming content
fixed_filter_json::reset_fixed_json_tool_state();
self.ui_writer.reset_json_filter();
// Reset parser for next iteration - this clears the text buffer
parser.reset();
@@ -4667,7 +4677,7 @@ impl<W: UiWriter> Agent<W> {
if !clean_content.is_empty() {
let filtered_content =
fixed_filter_json::fixed_filter_json_tool_calls(&clean_content);
self.ui_writer.filter_json_tool_calls(&clean_content);
if !filtered_content.is_empty() {
if !response_started {
@@ -4712,9 +4722,7 @@ impl<W: UiWriter> Agent<W> {
.replace("<</SYS>>", "");
let filtered_text =
fixed_filter_json::fixed_filter_json_tool_calls(
&clean_text,
);
self.ui_writer.filter_json_tool_calls(&clean_text);
// Only use this if we truly have nothing else
if !filtered_text.trim().is_empty() && full_response.is_empty()
@@ -5074,7 +5082,7 @@ impl<W: UiWriter> Agent<W> {
Ok(s) => s.clone(),
Err(e) => format!("ERROR: {}", e),
};
self.log_tool_call(tool_call, &log_str);
debug!("Tool {} completed: {}", tool_call.tool, &log_str.chars().take(100).collect::<String>());
result
}
@@ -6878,7 +6886,7 @@ impl<W: UiWriter> Agent<W> {
}
}
// Note: JSON tool call filtering is now handled by fixed_filter_json::fixed_filter_json_tool_calls
// Note: JSON tool call filtering is now handled by UiWriter::filter_json_tool_calls (implemented in g3-cli)
// Apply unified diff to an input string with optional [start, end) bounds
pub fn apply_unified_diff_to_string(

View File

@@ -69,6 +69,18 @@ pub trait UiWriter: Send + Sync {
/// Print the final output summary with markdown formatting
/// Shows a spinner while formatting, then renders the markdown
fn print_final_output(&self, summary: &str);
/// Filter JSON tool calls from streaming content for display.
/// This is a UI concern - the raw content should be preserved for logging.
/// Default implementation passes through unchanged.
fn filter_json_tool_calls(&self, content: &str) -> String {
content.to_string()
}
/// Reset the JSON tool call filter state.
/// Called at the start of a new response to clear any partial state.
/// Default implementation does nothing.
fn reset_json_filter(&self) {}
}
/// A no-op implementation for when UI output is not needed

View File

@@ -69,7 +69,7 @@ fn test_thin_context_basic() {
// Trigger thinning at 50%
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
println!("Thinning summary: {}", summary);
@@ -130,7 +130,7 @@ fn test_thin_write_file_tool_calls() {
// Trigger thinning at 50%
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
println!("Thinning summary: {}", summary);
@@ -190,7 +190,7 @@ fn test_thin_str_replace_tool_calls() {
// Trigger thinning at 50%
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
println!("Thinning summary: {}", summary);
@@ -224,7 +224,7 @@ fn test_thin_context_no_large_results() {
}
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
// Should report no large results found
assert!(summary.contains("no large tool results or tool calls found"));
@@ -253,7 +253,7 @@ fn test_thin_context_only_affects_first_third() {
}
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
// First third is 4 messages (indices 0-3), so only indices 1 and 3 should be thinned
// That's 2 tool results

View File

@@ -30,7 +30,7 @@ fn test_todo_read_results_not_thinned() {
// Trigger thinning at 50%
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
println!("Thinning summary: {}", summary);
@@ -87,7 +87,7 @@ fn test_todo_write_results_not_thinned() {
// Trigger thinning at 50%
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
println!("Thinning summary: {}", summary);
@@ -135,7 +135,7 @@ fn test_non_todo_results_still_thinned() {
// Trigger thinning at 50%
context.used_tokens = 5000;
let (summary, _chars_saved) = context.thin_context();
let (summary, _chars_saved) = context.thin_context(None);
println!("Thinning summary: {}", summary);
@@ -185,7 +185,7 @@ fn test_todo_read_with_spaces_in_tool_name() {
// Trigger thinning
context.used_tokens = 5000;
let (_summary, _chars_saved) = context.thin_context();
let (_summary, _chars_saved) = context.thin_context(None);
// Verify TODO result was not thinned
let first_third_end = context.conversation_history.len() / 3;