refactor(g3-cli): Extract focused modules from lib.rs for improved readability
Extract three cohesive modules from the monolithic lib.rs (3188 -> 2785 lines): - metrics.rs (147 lines): Turn metrics tracking and histogram generation - TurnMetrics struct - format_elapsed_time() for human-readable durations - generate_turn_histogram() for performance visualization - Added unit tests for core functions - project_files.rs (181 lines): Project file reading utilities - read_agents_config() for AGENTS.md loading - read_project_readme() for README detection - read_project_memory() for .g3/memory.md - extract_readme_heading() for display - Added unit tests - coach_feedback.rs (129 lines): Coach feedback extraction from session logs - extract_from_logs() main entry point - Helper functions for log parsing and text extraction All modules have clear single responsibilities, improved documentation, and maintain identical behavior to the original inline functions. Agent: carmack
This commit is contained in:
129
crates/g3-cli/src/coach_feedback.rs
Normal file
129
crates/g3-cli/src/coach_feedback.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
//! Coach feedback extraction from session logs.
|
||||
//!
|
||||
//! Extracts feedback from the coach agent's session logs for the coach-player loop.
|
||||
|
||||
use anyhow::Result;
|
||||
use std::path::Path;
|
||||
|
||||
use g3_core::Agent;
|
||||
|
||||
use crate::simple_output::SimpleOutput;
|
||||
use crate::ui_writer_impl::ConsoleUiWriter;
|
||||
|
||||
/// Extract coach feedback by reading from the coach agent's specific log file.
|
||||
///
|
||||
/// Uses the coach agent's session ID to find the exact log file.
|
||||
pub fn extract_from_logs(
|
||||
coach_result: &g3_core::TaskResult,
|
||||
coach_agent: &Agent<ConsoleUiWriter>,
|
||||
output: &SimpleOutput,
|
||||
) -> Result<String> {
|
||||
let session_id = coach_agent
|
||||
.get_session_id()
|
||||
.ok_or_else(|| anyhow::anyhow!("Coach agent has no session ID"))?;
|
||||
|
||||
let log_file_path = resolve_log_path(&session_id);
|
||||
|
||||
// Try to extract from session log
|
||||
if let Some(feedback) = try_extract_from_log(&log_file_path) {
|
||||
output.print(&format!("✅ Extracted coach feedback from session: {}", session_id));
|
||||
return Ok(feedback);
|
||||
}
|
||||
|
||||
// Fallback: use the TaskResult's extract_summary method
|
||||
let fallback = coach_result.extract_summary();
|
||||
if !fallback.is_empty() {
|
||||
output.print(&format!(
|
||||
"✅ Extracted coach feedback from response: {} chars",
|
||||
fallback.len()
|
||||
));
|
||||
return Ok(fallback);
|
||||
}
|
||||
|
||||
Err(anyhow::anyhow!(
|
||||
"Could not extract coach feedback from session: {}\n\
|
||||
Log file path: {:?}\n\
|
||||
Log file exists: {}\n\
|
||||
Coach result response length: {} chars",
|
||||
session_id,
|
||||
log_file_path,
|
||||
log_file_path.exists(),
|
||||
coach_result.response.len()
|
||||
))
|
||||
}
|
||||
|
||||
/// Resolve the log file path, trying new path first then falling back to old.
|
||||
fn resolve_log_path(session_id: &str) -> std::path::PathBuf {
|
||||
let new_path = g3_core::get_session_file(session_id);
|
||||
if new_path.exists() {
|
||||
new_path
|
||||
} else {
|
||||
Path::new("logs").join(format!("g3_session_{}.json", session_id))
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract feedback from a session log file.
|
||||
///
|
||||
/// Searches backwards for the last assistant message with substantial text content.
|
||||
fn try_extract_from_log(log_file_path: &Path) -> Option<String> {
|
||||
if !log_file_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let log_content = std::fs::read_to_string(log_file_path).ok()?;
|
||||
let log_json: serde_json::Value = serde_json::from_str(&log_content).ok()?;
|
||||
|
||||
let messages = log_json
|
||||
.get("context_window")?
|
||||
.get("conversation_history")?
|
||||
.as_array()?;
|
||||
|
||||
// Search backwards for the last assistant message with text content
|
||||
for msg in messages.iter().rev() {
|
||||
if let Some(feedback) = extract_assistant_text(msg) {
|
||||
return Some(feedback);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Extract text content from an assistant message.
|
||||
fn extract_assistant_text(msg: &serde_json::Value) -> Option<String> {
|
||||
let role = msg.get("role").and_then(|v| v.as_str())?;
|
||||
if !role.eq_ignore_ascii_case("assistant") {
|
||||
return None;
|
||||
}
|
||||
|
||||
let content = msg.get("content")?;
|
||||
|
||||
// Handle string content
|
||||
if let Some(content_str) = content.as_str() {
|
||||
return filter_substantial_text(content_str);
|
||||
}
|
||||
|
||||
// Handle array content (native tool calling format)
|
||||
if let Some(content_array) = content.as_array() {
|
||||
for block in content_array {
|
||||
if block.get("type").and_then(|v| v.as_str()) == Some("text") {
|
||||
if let Some(text) = block.get("text").and_then(|v| v.as_str()) {
|
||||
if let Some(result) = filter_substantial_text(text) {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Filter out empty or very short responses (likely just tool calls).
|
||||
fn filter_substantial_text(text: &str) -> Option<String> {
|
||||
let trimmed = text.trim();
|
||||
if !trimmed.is_empty() && trimmed.len() > 10 {
|
||||
Some(trimmed.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,253 +1,17 @@
|
||||
// JSON tool call filtering for display (moved from g3-core)
|
||||
//! G3 CLI - Command-line interface for the G3 AI coding agent.
|
||||
|
||||
pub mod filter_json;
|
||||
pub mod metrics;
|
||||
pub mod project_files;
|
||||
pub mod streaming_markdown;
|
||||
|
||||
mod coach_feedback;
|
||||
mod machine_ui_writer;
|
||||
mod simple_output;
|
||||
mod ui_writer_impl;
|
||||
|
||||
use anyhow::Result;
|
||||
use crossterm::style::{Color, ResetColor, SetForegroundColor};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct TurnMetrics {
|
||||
turn_number: usize,
|
||||
tokens_used: u32,
|
||||
wall_clock_time: Duration,
|
||||
}
|
||||
|
||||
/// Generate a histogram showing tokens used and wall clock time per turn
|
||||
fn generate_turn_histogram(turn_metrics: &[TurnMetrics]) -> String {
|
||||
if turn_metrics.is_empty() {
|
||||
return " No turn data available".to_string();
|
||||
}
|
||||
|
||||
let mut histogram = String::new();
|
||||
|
||||
// Find max values for scaling
|
||||
let max_tokens = turn_metrics
|
||||
.iter()
|
||||
.map(|t| t.tokens_used)
|
||||
.max()
|
||||
.unwrap_or(1);
|
||||
let max_time_ms = turn_metrics
|
||||
.iter()
|
||||
.map(|t| t.wall_clock_time.as_millis().min(u32::MAX as u128) as u32)
|
||||
.max()
|
||||
.unwrap_or(1);
|
||||
|
||||
// Constants for histogram display
|
||||
const MAX_BAR_WIDTH: usize = 40;
|
||||
const TOKEN_CHAR: char = '█';
|
||||
const TIME_CHAR: char = '▓';
|
||||
|
||||
histogram.push_str("\n📊 Per-Turn Performance Histogram:\n");
|
||||
histogram.push_str(&format!(
|
||||
" {} = Tokens Used (max: {})\n",
|
||||
TOKEN_CHAR, max_tokens
|
||||
));
|
||||
histogram.push_str(&format!(
|
||||
" {} = Wall Clock Time (max: {:.1}s)\n\n",
|
||||
TIME_CHAR,
|
||||
max_time_ms as f64 / 1000.0
|
||||
));
|
||||
|
||||
for metrics in turn_metrics {
|
||||
let turn_time_ms = metrics.wall_clock_time.as_millis().min(u32::MAX as u128) as u32;
|
||||
|
||||
// Calculate bar lengths (proportional to max values)
|
||||
let token_bar_len = if max_tokens > 0 {
|
||||
((metrics.tokens_used as f64 / max_tokens as f64) * MAX_BAR_WIDTH as f64) as usize
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let time_bar_len = if max_time_ms > 0 {
|
||||
((turn_time_ms as f64 / max_time_ms as f64) * MAX_BAR_WIDTH as f64) as usize
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// Format time duration
|
||||
let time_str = if turn_time_ms < 1000 {
|
||||
format!("{}ms", turn_time_ms)
|
||||
} else if turn_time_ms < 60_000 {
|
||||
format!("{:.1}s", turn_time_ms as f64 / 1000.0)
|
||||
} else {
|
||||
let minutes = turn_time_ms / 60_000;
|
||||
let seconds = (turn_time_ms % 60_000) as f64 / 1000.0;
|
||||
format!("{}m{:.1}s", minutes, seconds)
|
||||
};
|
||||
|
||||
// Create the bars
|
||||
let token_bar = TOKEN_CHAR.to_string().repeat(token_bar_len);
|
||||
let time_bar = TIME_CHAR.to_string().repeat(time_bar_len);
|
||||
|
||||
// Add turn information
|
||||
histogram.push_str(&format!(
|
||||
" Turn {:2}: {:>6} tokens │{:<40}│\n",
|
||||
metrics.turn_number, metrics.tokens_used, token_bar
|
||||
));
|
||||
histogram.push_str(&format!(
|
||||
" {:>6} │{:<40}│\n",
|
||||
time_str, time_bar
|
||||
));
|
||||
|
||||
// Add separator line between turns (except for last turn)
|
||||
if metrics.turn_number != turn_metrics.last().unwrap().turn_number {
|
||||
histogram
|
||||
.push_str(" ────────────┼────────────────────────────────────────┤\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Add summary statistics
|
||||
let total_tokens: u32 = turn_metrics.iter().map(|t| t.tokens_used).sum();
|
||||
let total_time: Duration = turn_metrics.iter().map(|t| t.wall_clock_time).sum();
|
||||
let avg_tokens = total_tokens as f64 / turn_metrics.len() as f64;
|
||||
let avg_time_ms = total_time.as_millis() as f64 / turn_metrics.len() as f64;
|
||||
|
||||
histogram.push_str("\n📈 Summary Statistics:\n");
|
||||
histogram.push_str(&format!(
|
||||
" • Total Tokens: {} across {} turns\n",
|
||||
total_tokens,
|
||||
turn_metrics.len()
|
||||
));
|
||||
histogram.push_str(&format!(" • Average Tokens/Turn: {:.1}\n", avg_tokens));
|
||||
histogram.push_str(&format!(
|
||||
" • Total Time: {:.1}s\n",
|
||||
total_time.as_secs_f64()
|
||||
));
|
||||
histogram.push_str(&format!(
|
||||
" • Average Time/Turn: {:.1}s\n",
|
||||
avg_time_ms / 1000.0
|
||||
));
|
||||
|
||||
histogram
|
||||
}
|
||||
|
||||
/// Format a Duration as human-readable elapsed time (e.g., "1h 23m 45s", "5m 30s", "45s")
|
||||
fn format_elapsed_time(duration: Duration) -> String {
|
||||
let total_secs = duration.as_secs();
|
||||
let hours = total_secs / 3600;
|
||||
let minutes = (total_secs % 3600) / 60;
|
||||
let seconds = total_secs % 60;
|
||||
|
||||
if hours > 0 {
|
||||
format!("{}h {}m {}s", hours, minutes, seconds)
|
||||
} else if minutes > 0 {
|
||||
format!("{}m {}s", minutes, seconds)
|
||||
} else if seconds > 0 {
|
||||
format!("{}s", seconds)
|
||||
} else {
|
||||
// For very short durations, show milliseconds
|
||||
format!("{}ms", duration.as_millis())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract coach feedback by reading from the coach agent's specific log file
|
||||
/// Uses the coach agent's session ID to find the exact log file
|
||||
fn extract_coach_feedback_from_logs(
|
||||
coach_result: &g3_core::TaskResult,
|
||||
coach_agent: &g3_core::Agent<ConsoleUiWriter>,
|
||||
output: &SimpleOutput,
|
||||
) -> Result<String> {
|
||||
// Get the coach agent's session ID
|
||||
let session_id = coach_agent
|
||||
.get_session_id()
|
||||
.ok_or_else(|| anyhow::anyhow!("Coach agent has no session ID"))?;
|
||||
|
||||
// Try new .g3/sessions/<session_id>/session.json path first
|
||||
let log_file_path = g3_core::get_session_file(&session_id);
|
||||
|
||||
// Fall back to old logs/ path if new path doesn't exist
|
||||
let log_file_path = if log_file_path.exists() {
|
||||
log_file_path
|
||||
} else {
|
||||
let logs_dir = std::path::Path::new("logs");
|
||||
logs_dir.join(format!("g3_session_{}.json", session_id))
|
||||
};
|
||||
|
||||
// Try to extract from session log
|
||||
if let Some(feedback) = try_extract_feedback_from_log(&log_file_path, output) {
|
||||
output.print(&format!(
|
||||
"✅ Extracted coach feedback from session: {}",
|
||||
session_id
|
||||
));
|
||||
return Ok(feedback);
|
||||
}
|
||||
|
||||
// Fallback: use the TaskResult's extract_summary method
|
||||
let fallback = coach_result.extract_summary();
|
||||
if !fallback.is_empty() {
|
||||
output.print(&format!(
|
||||
"✅ Extracted coach feedback from response: {} chars",
|
||||
fallback.len()
|
||||
));
|
||||
return Ok(fallback);
|
||||
}
|
||||
|
||||
// Last resort: return an error instead of panicking
|
||||
Err(anyhow::anyhow!(
|
||||
"Could not extract coach feedback from session: {}\n\
|
||||
Log file path: {:?}\n\
|
||||
Log file exists: {}\n\
|
||||
Coach result response length: {} chars",
|
||||
session_id,
|
||||
log_file_path,
|
||||
log_file_path.exists(),
|
||||
coach_result.response.len()
|
||||
))
|
||||
}
|
||||
|
||||
/// Helper function to extract feedback from a session log file
|
||||
/// Looks for the last assistant message with substantial text content
|
||||
fn try_extract_feedback_from_log(
|
||||
log_file_path: &std::path::Path,
|
||||
_output: &SimpleOutput,
|
||||
) -> Option<String> {
|
||||
if !log_file_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let log_content = std::fs::read_to_string(log_file_path).ok()?;
|
||||
let log_json: serde_json::Value = serde_json::from_str(&log_content).ok()?;
|
||||
|
||||
let messages = log_json
|
||||
.get("context_window")?
|
||||
.get("conversation_history")?
|
||||
.as_array()?;
|
||||
|
||||
// Search backwards for the last assistant message with text content
|
||||
for msg in messages.iter().rev() {
|
||||
let role = msg.get("role").and_then(|v| v.as_str())?;
|
||||
|
||||
if role.eq_ignore_ascii_case("assistant") {
|
||||
if let Some(content) = msg.get("content") {
|
||||
// Handle string content
|
||||
if let Some(content_str) = content.as_str() {
|
||||
let trimmed = content_str.trim();
|
||||
// Skip empty or very short responses (likely just tool calls)
|
||||
if !trimmed.is_empty() && trimmed.len() > 10 {
|
||||
return Some(trimmed.to_string());
|
||||
}
|
||||
}
|
||||
// Handle array content (native tool calling format)
|
||||
if let Some(content_array) = content.as_array() {
|
||||
for block in content_array {
|
||||
if block.get("type").and_then(|v| v.as_str()) == Some("text") {
|
||||
if let Some(text) = block.get("text").and_then(|v| v.as_str()) {
|
||||
let trimmed = text.trim();
|
||||
if !trimmed.is_empty() && trimmed.len() > 10 {
|
||||
return Some(trimmed.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
use clap::Parser;
|
||||
use g3_config::Config;
|
||||
use g3_core::{project::Project, ui_writer::UiWriter, Agent, DiscoveryOptions};
|
||||
@@ -257,15 +21,15 @@ use sha2::{Digest, Sha256};
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::exit;
|
||||
use std::time::Instant;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use g3_core::error_handling::{classify_error, ErrorType, RecoverableError};
|
||||
mod simple_output;
|
||||
mod ui_writer_impl;
|
||||
use simple_output::SimpleOutput;
|
||||
mod machine_ui_writer;
|
||||
use machine_ui_writer::MachineUiWriter;
|
||||
use metrics::{format_elapsed_time, generate_turn_histogram, TurnMetrics};
|
||||
use project_files::{extract_readme_heading, read_agents_config, read_project_memory, read_project_readme};
|
||||
use simple_output::SimpleOutput;
|
||||
use ui_writer_impl::ConsoleUiWriter;
|
||||
|
||||
#[derive(Parser, Clone)]
|
||||
@@ -1423,173 +1187,6 @@ async fn run_with_machine_mode(
|
||||
}
|
||||
|
||||
/// Check if we're in a project directory and read AGENTS.md if available
|
||||
fn read_agents_config(workspace_dir: &Path) -> Option<String> {
|
||||
// Look for AGENTS.md in the current directory
|
||||
let agents_path = workspace_dir.join("AGENTS.md");
|
||||
|
||||
if agents_path.exists() {
|
||||
match std::fs::read_to_string(&agents_path) {
|
||||
Ok(content) => {
|
||||
// Return the content with a note about which file was read
|
||||
Some(format!(
|
||||
"🤖 Agent Configuration (from AGENTS.md):\n\n{}",
|
||||
content
|
||||
))
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue without the agents config
|
||||
error!("Failed to read AGENTS.md: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check for alternative names
|
||||
let alt_path = workspace_dir.join("agents.md");
|
||||
if alt_path.exists() {
|
||||
match std::fs::read_to_string(&alt_path) {
|
||||
Ok(content) => Some(format!(
|
||||
"🤖 Agent Configuration (from agents.md):\n\n{}",
|
||||
content
|
||||
)),
|
||||
Err(e) => {
|
||||
error!("Failed to read agents.md: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if we're in a project directory and read README if available
|
||||
fn read_project_readme(workspace_dir: &Path) -> Option<String> {
|
||||
// Check if we're in a project directory (contains .g3 or .git)
|
||||
let is_project_dir = workspace_dir.join(".g3").exists() || workspace_dir.join(".git").exists();
|
||||
|
||||
if !is_project_dir {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Look for README files in common formats
|
||||
let readme_names = [
|
||||
"README.md",
|
||||
"README.MD",
|
||||
"readme.md",
|
||||
"Readme.md",
|
||||
"README",
|
||||
"README.txt",
|
||||
"README.rst",
|
||||
];
|
||||
|
||||
for readme_name in &readme_names {
|
||||
let readme_path = workspace_dir.join(readme_name);
|
||||
if readme_path.exists() {
|
||||
match std::fs::read_to_string(&readme_path) {
|
||||
Ok(content) => {
|
||||
// Return the content with a note about which file was read
|
||||
return Some(format!(
|
||||
"📚 Project README (from {}):\n\n{}",
|
||||
readme_name, content
|
||||
));
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue looking for other README files
|
||||
error!("Failed to read {}: {}", readme_path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Read project memory if available
|
||||
fn read_project_memory(workspace_dir: &Path) -> Option<String> {
|
||||
let memory_path = workspace_dir.join(".g3").join("memory.md");
|
||||
|
||||
if memory_path.exists() {
|
||||
match std::fs::read_to_string(&memory_path) {
|
||||
Ok(content) => {
|
||||
let size = if content.len() < 1000 {
|
||||
format!("{} chars", content.len())
|
||||
} else {
|
||||
format!("{:.1}k chars", content.len() as f64 / 1000.0)
|
||||
};
|
||||
Some(format!(
|
||||
"🧠 Project Memory ({}):\n\n{}",
|
||||
size, content
|
||||
))
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the main heading or title from README content
|
||||
fn extract_readme_heading(readme_content: &str) -> Option<String> {
|
||||
// Find the README section in the combined content
|
||||
// The README section starts with "📚 Project README (from"
|
||||
let readme_start = readme_content.find("📚 Project README (from");
|
||||
|
||||
// If we can't find the README marker, the content might be just README
|
||||
// or might not contain README at all
|
||||
let content_to_search = match readme_start {
|
||||
Some(pos) => &readme_content[pos..],
|
||||
None => readme_content,
|
||||
};
|
||||
|
||||
// Process the content line by line, skipping the prefix line
|
||||
let mut content_lines = Vec::new();
|
||||
for line in content_to_search.lines() {
|
||||
// Skip the "📚 Project README (from ...):" line
|
||||
if line.starts_with("📚 Project README") {
|
||||
continue;
|
||||
}
|
||||
content_lines.push(line);
|
||||
}
|
||||
let content = content_lines.join("\n");
|
||||
|
||||
// Look for the first markdown heading
|
||||
for line in content.lines() {
|
||||
let trimmed = line.trim();
|
||||
|
||||
// Check for H1 heading (# Title)
|
||||
if let Some(stripped) = trimmed.strip_prefix("# ") {
|
||||
let title = stripped.trim();
|
||||
if !title.is_empty() {
|
||||
// Return the full title (including any description after dash)
|
||||
return Some(title.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Skip other markdown headings for now (##, ###, etc.)
|
||||
// We're only looking for the main H1 heading
|
||||
}
|
||||
|
||||
// If no H1 heading found, look for the first non-empty, non-metadata line as a fallback
|
||||
for line in content.lines().take(5) {
|
||||
let trimmed = line.trim();
|
||||
// Skip empty lines, other heading markers, and metadata
|
||||
if !trimmed.is_empty()
|
||||
&& !trimmed.starts_with("📚")
|
||||
&& !trimmed.starts_with('#')
|
||||
&& !trimmed.starts_with("==")
|
||||
&& !trimmed.starts_with("--")
|
||||
{
|
||||
// Limit length for display
|
||||
return Some(if trimmed.len() > 100 {
|
||||
format!("{}...", &trimmed[..97])
|
||||
} else {
|
||||
trimmed.to_string()
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn run_interactive<W: UiWriter>(
|
||||
mut agent: Agent<W>,
|
||||
show_prompt: bool,
|
||||
@@ -3063,7 +2660,7 @@ Remember: Be clear in your review and concise in your feedback. APPROVE iff the
|
||||
|
||||
// Extract the complete coach feedback from the response
|
||||
let coach_feedback_text =
|
||||
extract_coach_feedback_from_logs(&coach_result, &coach_agent, &output)?;
|
||||
coach_feedback::extract_from_logs(&coach_result, &coach_agent, &output)?;
|
||||
|
||||
// Log the size of the feedback for debugging
|
||||
debug!(
|
||||
|
||||
147
crates/g3-cli/src/metrics.rs
Normal file
147
crates/g3-cli/src/metrics.rs
Normal file
@@ -0,0 +1,147 @@
|
||||
//! Turn metrics and histogram generation for performance visualization.
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
/// Metrics captured for a single turn of interaction.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TurnMetrics {
|
||||
pub turn_number: usize,
|
||||
pub tokens_used: u32,
|
||||
pub wall_clock_time: Duration,
|
||||
}
|
||||
|
||||
/// Format a Duration as human-readable elapsed time (e.g., "1h 23m 45s").
|
||||
pub fn format_elapsed_time(duration: Duration) -> String {
|
||||
let total_secs = duration.as_secs();
|
||||
let hours = total_secs / 3600;
|
||||
let minutes = (total_secs % 3600) / 60;
|
||||
let seconds = total_secs % 60;
|
||||
|
||||
match (hours, minutes, seconds) {
|
||||
(h, m, s) if h > 0 => format!("{}h {}m {}s", h, m, s),
|
||||
(_, m, s) if m > 0 => format!("{}m {}s", m, s),
|
||||
(_, _, s) if s > 0 => format!("{}s", s),
|
||||
_ => format!("{}ms", duration.as_millis()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a histogram showing tokens used and wall clock time per turn.
|
||||
pub fn generate_turn_histogram(turn_metrics: &[TurnMetrics]) -> String {
|
||||
if turn_metrics.is_empty() {
|
||||
return " No turn data available".to_string();
|
||||
}
|
||||
|
||||
const MAX_BAR_WIDTH: usize = 40;
|
||||
const TOKEN_CHAR: char = '█';
|
||||
const TIME_CHAR: char = '▓';
|
||||
|
||||
let max_tokens = turn_metrics.iter().map(|t| t.tokens_used).max().unwrap_or(1);
|
||||
let max_time_ms = turn_metrics
|
||||
.iter()
|
||||
.map(|t| t.wall_clock_time.as_millis().min(u32::MAX as u128) as u32)
|
||||
.max()
|
||||
.unwrap_or(1);
|
||||
|
||||
let mut histogram = String::new();
|
||||
histogram.push_str("\n📊 Per-Turn Performance Histogram:\n");
|
||||
histogram.push_str(&format!(" {} = Tokens Used (max: {})\n", TOKEN_CHAR, max_tokens));
|
||||
histogram.push_str(&format!(
|
||||
" {} = Wall Clock Time (max: {:.1}s)\n\n",
|
||||
TIME_CHAR,
|
||||
max_time_ms as f64 / 1000.0
|
||||
));
|
||||
|
||||
for metrics in turn_metrics {
|
||||
let turn_time_ms = metrics.wall_clock_time.as_millis().min(u32::MAX as u128) as u32;
|
||||
|
||||
let token_bar_len = scale_bar(metrics.tokens_used, max_tokens, MAX_BAR_WIDTH);
|
||||
let time_bar_len = scale_bar(turn_time_ms, max_time_ms, MAX_BAR_WIDTH);
|
||||
|
||||
let time_str = format_duration_ms(turn_time_ms);
|
||||
let token_bar = TOKEN_CHAR.to_string().repeat(token_bar_len);
|
||||
let time_bar = TIME_CHAR.to_string().repeat(time_bar_len);
|
||||
|
||||
histogram.push_str(&format!(
|
||||
" Turn {:2}: {:>6} tokens │{:<40}│\n",
|
||||
metrics.turn_number, metrics.tokens_used, token_bar
|
||||
));
|
||||
histogram.push_str(&format!(" {:>6} │{:<40}│\n", time_str, time_bar));
|
||||
|
||||
// Separator between turns (except for last)
|
||||
if metrics.turn_number != turn_metrics.last().unwrap().turn_number {
|
||||
histogram.push_str(
|
||||
" ────────────┼────────────────────────────────────────┤\n",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
append_summary_statistics(&mut histogram, turn_metrics);
|
||||
histogram
|
||||
}
|
||||
|
||||
/// Scale a value to a bar length proportional to max.
|
||||
fn scale_bar(value: u32, max: u32, max_width: usize) -> usize {
|
||||
if max == 0 {
|
||||
0
|
||||
} else {
|
||||
((value as f64 / max as f64) * max_width as f64) as usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Format milliseconds as a human-readable duration string.
|
||||
fn format_duration_ms(ms: u32) -> String {
|
||||
match ms {
|
||||
ms if ms < 1000 => format!("{}ms", ms),
|
||||
ms if ms < 60_000 => format!("{:.1}s", ms as f64 / 1000.0),
|
||||
ms => {
|
||||
let minutes = ms / 60_000;
|
||||
let seconds = (ms % 60_000) as f64 / 1000.0;
|
||||
format!("{}m{:.1}s", minutes, seconds)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Append summary statistics to the histogram output.
|
||||
fn append_summary_statistics(histogram: &mut String, turn_metrics: &[TurnMetrics]) {
|
||||
let total_tokens: u32 = turn_metrics.iter().map(|t| t.tokens_used).sum();
|
||||
let total_time: Duration = turn_metrics.iter().map(|t| t.wall_clock_time).sum();
|
||||
let avg_tokens = total_tokens as f64 / turn_metrics.len() as f64;
|
||||
let avg_time_ms = total_time.as_millis() as f64 / turn_metrics.len() as f64;
|
||||
|
||||
histogram.push_str("\n📈 Summary Statistics:\n");
|
||||
histogram.push_str(&format!(
|
||||
" • Total Tokens: {} across {} turns\n",
|
||||
total_tokens,
|
||||
turn_metrics.len()
|
||||
));
|
||||
histogram.push_str(&format!(" • Average Tokens/Turn: {:.1}\n", avg_tokens));
|
||||
histogram.push_str(&format!(" • Total Time: {:.1}s\n", total_time.as_secs_f64()));
|
||||
histogram.push_str(&format!(" • Average Time/Turn: {:.1}s\n", avg_time_ms / 1000.0));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_format_elapsed_time() {
|
||||
assert_eq!(format_elapsed_time(Duration::from_millis(500)), "500ms");
|
||||
assert_eq!(format_elapsed_time(Duration::from_secs(45)), "45s");
|
||||
assert_eq!(format_elapsed_time(Duration::from_secs(90)), "1m 30s");
|
||||
assert_eq!(format_elapsed_time(Duration::from_secs(3661)), "1h 1m 1s");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_histogram() {
|
||||
let result = generate_turn_histogram(&[]);
|
||||
assert!(result.contains("No turn data available"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scale_bar() {
|
||||
assert_eq!(scale_bar(50, 100, 40), 20);
|
||||
assert_eq!(scale_bar(100, 100, 40), 40);
|
||||
assert_eq!(scale_bar(0, 100, 40), 0);
|
||||
assert_eq!(scale_bar(50, 0, 40), 0);
|
||||
}
|
||||
}
|
||||
181
crates/g3-cli/src/project_files.rs
Normal file
181
crates/g3-cli/src/project_files.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
//! Project file reading utilities.
|
||||
//!
|
||||
//! Reads AGENTS.md, README.md, and project memory files from the workspace.
|
||||
|
||||
use std::path::Path;
|
||||
use tracing::error;
|
||||
|
||||
/// Read AGENTS.md configuration from the workspace directory.
|
||||
/// Returns formatted content with emoji prefix, or None if not found.
|
||||
pub fn read_agents_config(workspace_dir: &Path) -> Option<String> {
|
||||
// Try AGENTS.md first, then agents.md
|
||||
let paths = [
|
||||
(workspace_dir.join("AGENTS.md"), "AGENTS.md"),
|
||||
(workspace_dir.join("agents.md"), "agents.md"),
|
||||
];
|
||||
|
||||
for (path, name) in &paths {
|
||||
if path.exists() {
|
||||
match std::fs::read_to_string(path) {
|
||||
Ok(content) => {
|
||||
return Some(format!("🤖 Agent Configuration (from {}):{}\n{}", name, "\n", content));
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to read {}: {}", name, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Read README from the workspace directory if it's a project directory.
|
||||
/// Returns formatted content with emoji prefix, or None if not found.
|
||||
pub fn read_project_readme(workspace_dir: &Path) -> Option<String> {
|
||||
// Only read README if we're in a project directory
|
||||
let is_project_dir = workspace_dir.join(".g3").exists() || workspace_dir.join(".git").exists();
|
||||
if !is_project_dir {
|
||||
return None;
|
||||
}
|
||||
|
||||
const README_NAMES: &[&str] = &[
|
||||
"README.md",
|
||||
"README.MD",
|
||||
"readme.md",
|
||||
"Readme.md",
|
||||
"README",
|
||||
"README.txt",
|
||||
"README.rst",
|
||||
];
|
||||
|
||||
for name in README_NAMES {
|
||||
let path = workspace_dir.join(name);
|
||||
if path.exists() {
|
||||
match std::fs::read_to_string(&path) {
|
||||
Ok(content) => {
|
||||
return Some(format!("📚 Project README (from {}):{}\n{}", name, "\n", content));
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to read {}: {}", path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Read project memory from .g3/memory.md in the workspace directory.
|
||||
/// Returns formatted content with emoji prefix and size info, or None if not found.
|
||||
pub fn read_project_memory(workspace_dir: &Path) -> Option<String> {
|
||||
let memory_path = workspace_dir.join(".g3").join("memory.md");
|
||||
|
||||
if !memory_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match std::fs::read_to_string(&memory_path) {
|
||||
Ok(content) => {
|
||||
let size = format_size(content.len());
|
||||
Some(format!("🧠 Project Memory ({}):{}\n{}", size, "\n", content))
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a byte size for display.
|
||||
fn format_size(len: usize) -> String {
|
||||
if len < 1000 {
|
||||
format!("{} chars", len)
|
||||
} else {
|
||||
format!("{:.1}k chars", len as f64 / 1000.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the first H1 heading from README content for display.
|
||||
pub fn extract_readme_heading(readme_content: &str) -> Option<String> {
|
||||
// Find where the actual README content starts (after any prefix markers)
|
||||
let readme_start = readme_content.find("📚 Project README (from");
|
||||
|
||||
let content_to_search = match readme_start {
|
||||
Some(pos) => &readme_content[pos..],
|
||||
None => readme_content,
|
||||
};
|
||||
|
||||
// Skip the prefix line and collect content
|
||||
let content: String = content_to_search
|
||||
.lines()
|
||||
.filter(|line| !line.starts_with("📚 Project README"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
// Look for H1 heading
|
||||
for line in content.lines() {
|
||||
let trimmed = line.trim();
|
||||
if let Some(stripped) = trimmed.strip_prefix("# ") {
|
||||
let title = stripped.trim();
|
||||
if !title.is_empty() {
|
||||
return Some(title.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: first non-empty, non-metadata line
|
||||
find_fallback_title(&content)
|
||||
}
|
||||
|
||||
/// Find a fallback title from the first few lines of content.
|
||||
fn find_fallback_title(content: &str) -> Option<String> {
|
||||
for line in content.lines().take(5) {
|
||||
let trimmed = line.trim();
|
||||
if !trimmed.is_empty()
|
||||
&& !trimmed.starts_with("📚")
|
||||
&& !trimmed.starts_with('#')
|
||||
&& !trimmed.starts_with("==")
|
||||
&& !trimmed.starts_with("--")
|
||||
{
|
||||
return Some(truncate_for_display(trimmed, 100));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Truncate a string for display, adding ellipsis if needed.
|
||||
fn truncate_for_display(s: &str, max_len: usize) -> String {
|
||||
if s.len() > max_len {
|
||||
format!("{}...", &s[..max_len - 3])
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_extract_readme_heading() {
|
||||
let content = "# My Project\n\nSome description";
|
||||
assert_eq!(extract_readme_heading(content), Some("My Project".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_readme_heading_with_prefix() {
|
||||
let content = "📚 Project README (from README.md):\n# Cool App\n\nDescription";
|
||||
assert_eq!(extract_readme_heading(content), Some("Cool App".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_size() {
|
||||
assert_eq!(format_size(500), "500 chars");
|
||||
assert_eq!(format_size(1500), "1.5k chars");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_for_display() {
|
||||
assert_eq!(truncate_for_display("short", 100), "short");
|
||||
let long = "a".repeat(150);
|
||||
let truncated = truncate_for_display(&long, 100);
|
||||
assert!(truncated.ends_with("..."));
|
||||
assert_eq!(truncated.len(), 100);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user