add code exploration fast start
This tries to short-circuit multiple round-trips to llm for reading code. It's a precursor to trying to context engineer tailored to specific tasks. In initial experiments, it's only marginally faster than regular mode, and burns more tokens.
This commit is contained in:
14
Cargo.lock
generated
14
Cargo.lock
generated
@@ -1365,6 +1365,8 @@ dependencies = [
|
|||||||
"dirs 5.0.1",
|
"dirs 5.0.1",
|
||||||
"g3-config",
|
"g3-config",
|
||||||
"g3-core",
|
"g3-core",
|
||||||
|
"g3-planner",
|
||||||
|
"g3-providers",
|
||||||
"hex",
|
"hex",
|
||||||
"indicatif",
|
"indicatif",
|
||||||
"ratatui",
|
"ratatui",
|
||||||
@@ -1499,6 +1501,18 @@ dependencies = [
|
|||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "g3-planner"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"const_format",
|
||||||
|
"g3-providers",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "g3-providers"
|
name = "g3-providers"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
members = [
|
members = [
|
||||||
"crates/g3-cli",
|
"crates/g3-cli",
|
||||||
"crates/g3-core",
|
"crates/g3-core",
|
||||||
|
"crates/g3-planner",
|
||||||
"crates/g3-providers",
|
"crates/g3-providers",
|
||||||
"crates/g3-config",
|
"crates/g3-config",
|
||||||
"crates/g3-execution",
|
"crates/g3-execution",
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ description = "CLI interface for G3 AI coding agent"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
g3-core = { path = "../g3-core" }
|
g3-core = { path = "../g3-core" }
|
||||||
g3-config = { path = "../g3-config" }
|
g3-config = { path = "../g3-config" }
|
||||||
|
g3-planner = { path = "../g3-planner" }
|
||||||
|
g3-providers = { path = "../g3-providers" }
|
||||||
clap = { workspace = true }
|
clap = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
|||||||
@@ -159,7 +159,7 @@ fn extract_coach_feedback_from_logs(
|
|||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use g3_config::Config;
|
use g3_config::Config;
|
||||||
use g3_core::{project::Project, ui_writer::UiWriter, Agent};
|
use g3_core::{project::Project, ui_writer::UiWriter, Agent, DiscoveryOptions};
|
||||||
use rustyline::error::ReadlineError;
|
use rustyline::error::ReadlineError;
|
||||||
use rustyline::DefaultEditor;
|
use rustyline::DefaultEditor;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@@ -247,6 +247,10 @@ pub struct Cli {
|
|||||||
/// Enable WebDriver browser automation tools
|
/// Enable WebDriver browser automation tools
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub webdriver: bool,
|
pub webdriver: bool,
|
||||||
|
|
||||||
|
/// Enable fast codebase discovery before first LLM turn
|
||||||
|
#[arg(long, value_name = "PATH")]
|
||||||
|
pub codebase_fast_start: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn run() -> Result<()> {
|
pub async fn run() -> Result<()> {
|
||||||
@@ -676,6 +680,7 @@ async fn run_accumulative_mode(
|
|||||||
cli.show_code,
|
cli.show_code,
|
||||||
cli.max_turns,
|
cli.max_turns,
|
||||||
cli.quiet,
|
cli.quiet,
|
||||||
|
cli.codebase_fast_start.clone(),
|
||||||
) => result,
|
) => result,
|
||||||
_ = tokio::signal::ctrl_c() => {
|
_ = tokio::signal::ctrl_c() => {
|
||||||
output.print("\n⚠️ Autonomous run cancelled by user (Ctrl+C)");
|
output.print("\n⚠️ Autonomous run cancelled by user (Ctrl+C)");
|
||||||
@@ -727,6 +732,7 @@ async fn run_autonomous_machine(
|
|||||||
show_code: bool,
|
show_code: bool,
|
||||||
max_turns: usize,
|
max_turns: usize,
|
||||||
_quiet: bool,
|
_quiet: bool,
|
||||||
|
_codebase_fast_start: Option<PathBuf>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
println!("AUTONOMOUS_MODE_STARTED");
|
println!("AUTONOMOUS_MODE_STARTED");
|
||||||
println!("WORKSPACE: {}", project.workspace().display());
|
println!("WORKSPACE: {}", project.workspace().display());
|
||||||
@@ -757,7 +763,7 @@ async fn run_autonomous_machine(
|
|||||||
);
|
);
|
||||||
|
|
||||||
println!("TASK_START");
|
println!("TASK_START");
|
||||||
let result = agent.execute_task_with_timing(&task, None, false, show_prompt, show_code, true).await?;
|
let result = agent.execute_task_with_timing(&task, None, false, show_prompt, show_code, true, None).await?;
|
||||||
println!("AGENT_RESPONSE:");
|
println!("AGENT_RESPONSE:");
|
||||||
println!("{}", result.response);
|
println!("{}", result.response);
|
||||||
println!("END_AGENT_RESPONSE");
|
println!("END_AGENT_RESPONSE");
|
||||||
@@ -784,13 +790,14 @@ async fn run_with_console_mode(
|
|||||||
cli.show_code,
|
cli.show_code,
|
||||||
cli.max_turns,
|
cli.max_turns,
|
||||||
cli.quiet,
|
cli.quiet,
|
||||||
|
cli.codebase_fast_start.clone(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
} else if let Some(task) = cli.task {
|
} else if let Some(task) = cli.task {
|
||||||
// Single-shot mode
|
// Single-shot mode
|
||||||
let output = SimpleOutput::new();
|
let output = SimpleOutput::new();
|
||||||
let result = agent
|
let result = agent
|
||||||
.execute_task_with_timing(&task, None, false, cli.show_prompt, cli.show_code, true)
|
.execute_task_with_timing(&task, None, false, cli.show_prompt, cli.show_code, true, None)
|
||||||
.await?;
|
.await?;
|
||||||
output.print_smart(&result.response);
|
output.print_smart(&result.response);
|
||||||
} else {
|
} else {
|
||||||
@@ -815,12 +822,13 @@ async fn run_with_machine_mode(
|
|||||||
cli.show_code,
|
cli.show_code,
|
||||||
cli.max_turns,
|
cli.max_turns,
|
||||||
cli.quiet,
|
cli.quiet,
|
||||||
|
cli.codebase_fast_start.clone(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
} else if let Some(task) = cli.task {
|
} else if let Some(task) = cli.task {
|
||||||
// Single-shot mode
|
// Single-shot mode
|
||||||
let result = agent
|
let result = agent
|
||||||
.execute_task_with_timing(&task, None, false, cli.show_prompt, cli.show_code, true)
|
.execute_task_with_timing(&task, None, false, cli.show_prompt, cli.show_code, true, None)
|
||||||
.await?;
|
.await?;
|
||||||
println!("AGENT_RESPONSE:");
|
println!("AGENT_RESPONSE:");
|
||||||
println!("{}", result.response);
|
println!("{}", result.response);
|
||||||
@@ -1212,7 +1220,7 @@ async fn execute_task<W: UiWriter>(
|
|||||||
// Execute task with cancellation support
|
// Execute task with cancellation support
|
||||||
let execution_result = tokio::select! {
|
let execution_result = tokio::select! {
|
||||||
result = agent.execute_task_with_timing_cancellable(
|
result = agent.execute_task_with_timing_cancellable(
|
||||||
input, None, false, show_prompt, show_code, true, cancellation_token.clone()
|
input, None, false, show_prompt, show_code, true, cancellation_token.clone(), None
|
||||||
) => {
|
) => {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
@@ -1403,7 +1411,7 @@ async fn execute_task_machine(
|
|||||||
// Execute task with cancellation support
|
// Execute task with cancellation support
|
||||||
let execution_result = tokio::select! {
|
let execution_result = tokio::select! {
|
||||||
result = agent.execute_task_with_timing_cancellable(
|
result = agent.execute_task_with_timing_cancellable(
|
||||||
input, None, false, show_prompt, show_code, true, cancellation_token.clone()
|
input, None, false, show_prompt, show_code, true, cancellation_token.clone(), None
|
||||||
) => {
|
) => {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
@@ -1552,6 +1560,7 @@ async fn run_autonomous(
|
|||||||
show_code: bool,
|
show_code: bool,
|
||||||
max_turns: usize,
|
max_turns: usize,
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
|
codebase_fast_start: Option<PathBuf>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
let output = SimpleOutput::new();
|
let output = SimpleOutput::new();
|
||||||
@@ -1684,6 +1693,39 @@ async fn run_autonomous(
|
|||||||
output.print("🎯 Starting with player implementation");
|
output.print("🎯 Starting with player implementation");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Load fast-discovery messages before the loop starts (if enabled)
|
||||||
|
let (discovery_messages, discovery_working_dir): (Vec<g3_providers::Message>, Option<String>) =
|
||||||
|
if let Some(ref codebase_path) = codebase_fast_start {
|
||||||
|
// Canonicalize the path to ensure it's absolute
|
||||||
|
let canonical_path = codebase_path.canonicalize().unwrap_or_else(|_| codebase_path.clone());
|
||||||
|
let path_str = canonical_path.to_string_lossy();
|
||||||
|
output.print(&format!("🔍 Fast-discovery mode: will explore codebase at {}", path_str));
|
||||||
|
// Get the provider from the agent and use async LLM-based discovery
|
||||||
|
match agent.get_provider() {
|
||||||
|
Ok(provider) => {
|
||||||
|
// Create a status callback that prints to output
|
||||||
|
let output_clone = output.clone();
|
||||||
|
let status_callback: g3_planner::StatusCallback = Box::new(move |msg: &str| {
|
||||||
|
output_clone.print(msg);
|
||||||
|
});
|
||||||
|
match g3_planner::get_initial_discovery_messages(&path_str, provider, Some(&status_callback)).await {
|
||||||
|
Ok(messages) => (messages, Some(path_str.to_string())),
|
||||||
|
Err(e) => {
|
||||||
|
output.print(&format!("⚠️ LLM discovery failed: {}, skipping fast-start", e));
|
||||||
|
(Vec::new(), None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
output.print(&format!("⚠️ Could not get provider: {}, skipping fast-start", e));
|
||||||
|
(Vec::new(), None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(Vec::new(), None)
|
||||||
|
};
|
||||||
|
let has_discovery = !discovery_messages.is_empty();
|
||||||
|
|
||||||
let mut turn = 1;
|
let mut turn = 1;
|
||||||
let mut coach_feedback = String::new();
|
let mut coach_feedback = String::new();
|
||||||
let mut implementation_approved = false;
|
let mut implementation_approved = false;
|
||||||
@@ -1749,6 +1791,12 @@ async fn run_autonomous(
|
|||||||
show_prompt,
|
show_prompt,
|
||||||
show_code,
|
show_code,
|
||||||
true,
|
true,
|
||||||
|
if has_discovery {
|
||||||
|
Some(DiscoveryOptions {
|
||||||
|
messages: &discovery_messages,
|
||||||
|
fast_start_path: discovery_working_dir.as_deref(),
|
||||||
|
})
|
||||||
|
} else { None },
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -1946,7 +1994,13 @@ Remember: Be clear in your review and concise in your feedback. APPROVE iff the
|
|||||||
|
|
||||||
loop {
|
loop {
|
||||||
match coach_agent
|
match coach_agent
|
||||||
.execute_task_with_timing(&coach_prompt, None, false, show_prompt, show_code, true)
|
.execute_task_with_timing(&coach_prompt, None, false, show_prompt, show_code, true,
|
||||||
|
if has_discovery {
|
||||||
|
Some(DiscoveryOptions {
|
||||||
|
messages: &discovery_messages,
|
||||||
|
fast_start_path: discovery_working_dir.as_deref(),
|
||||||
|
})
|
||||||
|
} else { None })
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
/// Simple output helper for printing messages
|
/// Simple output helper for printing messages
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct SimpleOutput {
|
pub struct SimpleOutput {
|
||||||
machine_mode: bool,
|
machine_mode: bool,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,6 +46,13 @@ pub struct ToolCall {
|
|||||||
pub args: serde_json::Value, // Should be a JSON object with tool-specific arguments
|
pub args: serde_json::Value, // Should be a JSON object with tool-specific arguments
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Options for fast-start discovery execution
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DiscoveryOptions<'a> {
|
||||||
|
pub messages: &'a [Message],
|
||||||
|
pub fast_start_path: Option<&'a str>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum StreamState {
|
pub enum StreamState {
|
||||||
Generating,
|
Generating,
|
||||||
@@ -760,6 +767,8 @@ pub struct Agent<W: UiWriter> {
|
|||||||
std::sync::Arc<tokio::sync::RwLock<Option<g3_computer_control::MacAxController>>>,
|
std::sync::Arc<tokio::sync::RwLock<Option<g3_computer_control::MacAxController>>>,
|
||||||
tool_call_count: usize,
|
tool_call_count: usize,
|
||||||
requirements_sha: Option<String>,
|
requirements_sha: Option<String>,
|
||||||
|
/// Working directory for tool execution (set by --codebase-fast-start)
|
||||||
|
working_dir: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<W: UiWriter> Agent<W> {
|
impl<W: UiWriter> Agent<W> {
|
||||||
@@ -1032,6 +1041,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
},
|
},
|
||||||
tool_call_count: 0,
|
tool_call_count: 0,
|
||||||
requirements_sha: None,
|
requirements_sha: None,
|
||||||
|
working_dir: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1282,6 +1292,11 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
Ok((provider.name().to_string(), provider.model().to_string()))
|
Ok((provider.name().to_string(), provider.model().to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the default LLM provider
|
||||||
|
pub fn get_provider(&self) -> Result<&dyn g3_providers::LLMProvider> {
|
||||||
|
self.providers.get(None)
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the current session ID for this agent
|
/// Get the current session ID for this agent
|
||||||
pub fn get_session_id(&self) -> Option<&str> {
|
pub fn get_session_id(&self) -> Option<&str> {
|
||||||
self.session_id.as_deref()
|
self.session_id.as_deref()
|
||||||
@@ -1293,7 +1308,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
language: Option<&str>,
|
language: Option<&str>,
|
||||||
_auto_execute: bool,
|
_auto_execute: bool,
|
||||||
) -> Result<TaskResult> {
|
) -> Result<TaskResult> {
|
||||||
self.execute_task_with_options(description, language, false, false, false)
|
self.execute_task_with_options(description, language, false, false, false, None)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1304,6 +1319,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
_auto_execute: bool,
|
_auto_execute: bool,
|
||||||
show_prompt: bool,
|
show_prompt: bool,
|
||||||
show_code: bool,
|
show_code: bool,
|
||||||
|
discovery_options: Option<DiscoveryOptions<'_>>,
|
||||||
) -> Result<TaskResult> {
|
) -> Result<TaskResult> {
|
||||||
self.execute_task_with_timing(
|
self.execute_task_with_timing(
|
||||||
description,
|
description,
|
||||||
@@ -1312,6 +1328,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
show_prompt,
|
show_prompt,
|
||||||
show_code,
|
show_code,
|
||||||
false,
|
false,
|
||||||
|
discovery_options,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
@@ -1324,6 +1341,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
show_prompt: bool,
|
show_prompt: bool,
|
||||||
show_code: bool,
|
show_code: bool,
|
||||||
show_timing: bool,
|
show_timing: bool,
|
||||||
|
discovery_options: Option<DiscoveryOptions<'_>>,
|
||||||
) -> Result<TaskResult> {
|
) -> Result<TaskResult> {
|
||||||
// Create a cancellation token that never cancels for backward compatibility
|
// Create a cancellation token that never cancels for backward compatibility
|
||||||
let cancellation_token = CancellationToken::new();
|
let cancellation_token = CancellationToken::new();
|
||||||
@@ -1335,6 +1353,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
show_code,
|
show_code,
|
||||||
show_timing,
|
show_timing,
|
||||||
cancellation_token,
|
cancellation_token,
|
||||||
|
discovery_options,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
@@ -1349,6 +1368,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
show_code: bool,
|
show_code: bool,
|
||||||
show_timing: bool,
|
show_timing: bool,
|
||||||
cancellation_token: CancellationToken,
|
cancellation_token: CancellationToken,
|
||||||
|
discovery_options: Option<DiscoveryOptions<'_>>,
|
||||||
) -> Result<TaskResult> {
|
) -> Result<TaskResult> {
|
||||||
// Execute the task directly without splitting
|
// Execute the task directly without splitting
|
||||||
self.execute_single_task(
|
self.execute_single_task(
|
||||||
@@ -1357,6 +1377,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
show_code,
|
show_code,
|
||||||
show_timing,
|
show_timing,
|
||||||
cancellation_token,
|
cancellation_token,
|
||||||
|
discovery_options,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
@@ -1368,6 +1389,7 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
_show_code: bool,
|
_show_code: bool,
|
||||||
show_timing: bool,
|
show_timing: bool,
|
||||||
cancellation_token: CancellationToken,
|
cancellation_token: CancellationToken,
|
||||||
|
discovery_options: Option<DiscoveryOptions<'_>>,
|
||||||
) -> Result<TaskResult> {
|
) -> Result<TaskResult> {
|
||||||
// Reset the JSON tool call filter state at the start of each new task
|
// Reset the JSON tool call filter state at the start of each new task
|
||||||
// This prevents the filter from staying in suppression mode between user interactions
|
// This prevents the filter from staying in suppression mode between user interactions
|
||||||
@@ -1385,6 +1407,39 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
let user_message = Message::new(MessageRole::User, format!("Task: {}", description));
|
let user_message = Message::new(MessageRole::User, format!("Task: {}", description));
|
||||||
self.context_window.add_message(user_message);
|
self.context_window.add_message(user_message);
|
||||||
|
|
||||||
|
// Execute fast-discovery tool calls if provided (immediately after user message)
|
||||||
|
if let Some(ref options) = discovery_options {
|
||||||
|
self.ui_writer.println("▶️ Playing back discovery commands...");
|
||||||
|
// Store the working directory for subsequent tool calls in the streaming loop
|
||||||
|
if let Some(path) = options.fast_start_path {
|
||||||
|
self.working_dir = Some(path.to_string());
|
||||||
|
}
|
||||||
|
let provider = self.providers.get(None)?;
|
||||||
|
let supports_cache = provider.supports_cache_control();
|
||||||
|
let message_count = options.messages.len();
|
||||||
|
|
||||||
|
for (idx, discovery_msg) in options.messages.iter().enumerate() {
|
||||||
|
if let Ok(tool_call) = serde_json::from_str::<ToolCall>(&discovery_msg.content) {
|
||||||
|
self.add_message_to_context(discovery_msg.clone());
|
||||||
|
let result = self.execute_tool_call_in_dir(&tool_call, options.fast_start_path).await
|
||||||
|
.unwrap_or_else(|e| format!("Error: {}", e));
|
||||||
|
|
||||||
|
// Add cache_control to the last user message if provider supports it (anthropic)
|
||||||
|
let is_last = idx == message_count - 1;
|
||||||
|
let result_message = if is_last && supports_cache {
|
||||||
|
Message::with_cache_control(
|
||||||
|
MessageRole::User,
|
||||||
|
format!("Tool result: {}", result),
|
||||||
|
CacheControl::ephemeral(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Message::new(MessageRole::User, format!("Tool result: {}", result))
|
||||||
|
};
|
||||||
|
self.add_message_to_context(result_message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Use the complete conversation history for the request
|
// Use the complete conversation history for the request
|
||||||
let messages = self.context_window.conversation_history.clone();
|
let messages = self.context_window.conversation_history.clone();
|
||||||
|
|
||||||
@@ -1575,6 +1630,24 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
&self.context_window
|
&self.context_window
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a message directly to the context window.
|
||||||
|
/// Used for injecting discovery messages before the first LLM turn.
|
||||||
|
pub fn add_message_to_context(&mut self, message: Message) {
|
||||||
|
self.context_window.add_message(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a tool call and return the result.
|
||||||
|
/// This is a public wrapper around execute_tool for use by external callers
|
||||||
|
/// like the planner's fast-discovery feature.
|
||||||
|
pub async fn execute_tool_call(&mut self, tool_call: &ToolCall) -> Result<String> {
|
||||||
|
self.execute_tool(tool_call).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a tool call with an optional working directory (for discovery commands)
|
||||||
|
pub async fn execute_tool_call_in_dir(&mut self, tool_call: &ToolCall, working_dir: Option<&str>) -> Result<String> {
|
||||||
|
self.execute_tool_in_dir(tool_call, working_dir).await
|
||||||
|
}
|
||||||
|
|
||||||
/// Log an error message to the session JSON file as the last message
|
/// Log an error message to the session JSON file as the last message
|
||||||
/// This is used in autonomous mode to record context length exceeded errors
|
/// This is used in autonomous mode to record context length exceeded errors
|
||||||
pub fn log_error_to_session(
|
pub fn log_error_to_session(
|
||||||
@@ -3157,11 +3230,14 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
self.ui_writer.print_tool_output_header();
|
self.ui_writer.print_tool_output_header();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clone working_dir to avoid borrow checker issues
|
||||||
|
let working_dir = self.working_dir.clone();
|
||||||
let exec_start = Instant::now();
|
let exec_start = Instant::now();
|
||||||
// Add 8-minute timeout for tool execution
|
// Add 8-minute timeout for tool execution
|
||||||
let tool_result = match tokio::time::timeout(
|
let tool_result = match tokio::time::timeout(
|
||||||
Duration::from_secs(8 * 60), // 8 minutes
|
Duration::from_secs(8 * 60), // 8 minutes
|
||||||
self.execute_tool(&tool_call),
|
// Use working_dir if set (from --codebase-fast-start)
|
||||||
|
self.execute_tool_in_dir(&tool_call, working_dir.as_deref()),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -3707,8 +3783,17 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
pub async fn execute_tool(&mut self, tool_call: &ToolCall) -> Result<String> {
|
pub async fn execute_tool(&mut self, tool_call: &ToolCall) -> Result<String> {
|
||||||
// Increment tool call count
|
// Increment tool call count
|
||||||
self.tool_call_count += 1;
|
self.tool_call_count += 1;
|
||||||
|
self.execute_tool_in_dir(tool_call, None).await
|
||||||
|
}
|
||||||
|
|
||||||
let result = self.execute_tool_inner(tool_call).await;
|
/// Execute a tool with an optional working directory (for discovery commands)
|
||||||
|
pub async fn execute_tool_in_dir(&mut self, tool_call: &ToolCall, working_dir: Option<&str>) -> Result<String> {
|
||||||
|
// Only increment tool call count if not already incremented by execute_tool
|
||||||
|
if working_dir.is_some() {
|
||||||
|
self.tool_call_count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = self.execute_tool_inner_in_dir(tool_call, working_dir).await;
|
||||||
let log_str = match &result {
|
let log_str = match &result {
|
||||||
Ok(s) => s.clone(),
|
Ok(s) => s.clone(),
|
||||||
Err(e) => format!("ERROR: {}", e),
|
Err(e) => format!("ERROR: {}", e),
|
||||||
@@ -3717,9 +3802,12 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute_tool_inner(&mut self, tool_call: &ToolCall) -> Result<String> {
|
async fn execute_tool_inner_in_dir(&mut self, tool_call: &ToolCall, working_dir: Option<&str>) -> Result<String> {
|
||||||
debug!("=== EXECUTING TOOL ===");
|
debug!("=== EXECUTING TOOL ===");
|
||||||
debug!("Tool name: {}", tool_call.tool);
|
debug!("Tool name: {}", tool_call.tool);
|
||||||
|
eprintln!("[DEBUG execute_tool_inner_in_dir] tool='{}' working_dir={:?} args={}",
|
||||||
|
tool_call.tool, working_dir, serde_json::to_string(&tool_call.args).unwrap_or_default());
|
||||||
|
debug!("Working directory passed to execute_tool_inner_in_dir: {:?}", working_dir);
|
||||||
debug!("Tool args (raw): {:?}", tool_call.args);
|
debug!("Tool args (raw): {:?}", tool_call.args);
|
||||||
debug!(
|
debug!(
|
||||||
"Tool args (JSON): {}",
|
"Tool args (JSON): {}",
|
||||||
@@ -3754,9 +3842,11 @@ impl<W: UiWriter> Agent<W> {
|
|||||||
let receiver = ToolOutputReceiver {
|
let receiver = ToolOutputReceiver {
|
||||||
ui_writer: &self.ui_writer,
|
ui_writer: &self.ui_writer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
debug!("ABOUT TO CALL execute_bash_streaming_in_dir: escaped_command='{}', working_dir={:?}", escaped_command, working_dir);
|
||||||
|
|
||||||
match executor
|
match executor
|
||||||
.execute_bash_streaming(&escaped_command, &receiver)
|
.execute_bash_streaming_in_dir(&escaped_command, &receiver, working_dir)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
|
|||||||
@@ -5,6 +5,17 @@ use tempfile::NamedTempFile;
|
|||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use tracing::{info, debug, error};
|
use tracing::{info, debug, error};
|
||||||
|
|
||||||
|
/// Expand tilde (~) in a path to the user's home directory
|
||||||
|
fn expand_tilde(path: &str) -> String {
|
||||||
|
if path.starts_with("~") {
|
||||||
|
if let Some(home) = std::env::var_os("HOME") {
|
||||||
|
let home_str = home.to_string_lossy();
|
||||||
|
return path.replacen("~", &home_str, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
pub struct CodeExecutor {
|
pub struct CodeExecutor {
|
||||||
// Future: add configuration for execution limits, sandboxing, etc.
|
// Future: add configuration for execution limits, sandboxing, etc.
|
||||||
}
|
}
|
||||||
@@ -241,11 +252,33 @@ impl CodeExecutor {
|
|||||||
&self,
|
&self,
|
||||||
code: &str,
|
code: &str,
|
||||||
receiver: &R
|
receiver: &R
|
||||||
|
) -> Result<ExecutionResult> {
|
||||||
|
self.execute_bash_streaming_in_dir(code, receiver, None).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute bash command with streaming output in a specific directory
|
||||||
|
pub async fn execute_bash_streaming_in_dir<R: OutputReceiver>(
|
||||||
|
&self,
|
||||||
|
code: &str,
|
||||||
|
receiver: &R,
|
||||||
|
working_dir: Option<&str>,
|
||||||
) -> Result<ExecutionResult> {
|
) -> Result<ExecutionResult> {
|
||||||
use std::process::Stdio;
|
use std::process::Stdio;
|
||||||
use tokio::io::{AsyncBufReadExt, BufReader};
|
use tokio::io::{AsyncBufReadExt, BufReader};
|
||||||
use tokio::process::Command as TokioCommand;
|
use tokio::process::Command as TokioCommand;
|
||||||
|
|
||||||
|
// CRITICAL DEBUG: Print to stderr so it's always visible
|
||||||
|
debug!("========== execute_bash_streaming_in_dir START ==========");
|
||||||
|
debug!("Code to execute: {}", code);
|
||||||
|
debug!("Working directory parameter: {:?}", working_dir);
|
||||||
|
debug!("FULL DIAGNOSTIC: code='{}', working_dir={:?}", code, working_dir);
|
||||||
|
|
||||||
|
if let Some(dir) = working_dir {
|
||||||
|
debug!("Working dir exists check: {}", std::path::Path::new(dir).exists());
|
||||||
|
debug!("Working dir is_dir check: {}", std::path::Path::new(dir).is_dir());
|
||||||
|
}
|
||||||
|
debug!("Current process working directory: {:?}", std::env::current_dir());
|
||||||
|
|
||||||
// Check if this is a detached/daemon command that should run independently
|
// Check if this is a detached/daemon command that should run independently
|
||||||
// Look for patterns like: setsid, nohup with &, or explicit backgrounding with disown
|
// Look for patterns like: setsid, nohup with &, or explicit backgrounding with disown
|
||||||
let is_detached = code.trim_start().starts_with("setsid ")
|
let is_detached = code.trim_start().starts_with("setsid ")
|
||||||
@@ -255,10 +288,17 @@ impl CodeExecutor {
|
|||||||
|
|
||||||
if is_detached {
|
if is_detached {
|
||||||
// For detached commands, just spawn and return immediately
|
// For detached commands, just spawn and return immediately
|
||||||
TokioCommand::new("bash")
|
let mut cmd = TokioCommand::new("bash");
|
||||||
.arg("-c")
|
cmd.arg("-c")
|
||||||
.arg(code)
|
.arg(code);
|
||||||
.spawn()?;
|
|
||||||
|
// Set working directory if provided
|
||||||
|
if let Some(dir) = working_dir {
|
||||||
|
let expanded_dir = expand_tilde(dir);
|
||||||
|
cmd.current_dir(&expanded_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.spawn()?;
|
||||||
|
|
||||||
// Don't wait for the process - it's meant to run independently
|
// Don't wait for the process - it's meant to run independently
|
||||||
return Ok(ExecutionResult {
|
return Ok(ExecutionResult {
|
||||||
@@ -269,12 +309,33 @@ impl CodeExecutor {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut child = TokioCommand::new("bash")
|
let mut cmd = TokioCommand::new("bash");
|
||||||
.arg("-c")
|
cmd.arg("-c")
|
||||||
.arg(code)
|
.arg(code)
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped());
|
||||||
.spawn()?;
|
|
||||||
|
// Set working directory if provided
|
||||||
|
if let Some(dir) = working_dir {
|
||||||
|
debug!("Setting current_dir on command to: {}", dir);
|
||||||
|
let expanded_dir = expand_tilde(dir);
|
||||||
|
debug!("Expanded working dir: {}", expanded_dir);
|
||||||
|
debug!("Expanded dir exists: {}", std::path::Path::new(&expanded_dir).exists());
|
||||||
|
debug!("Expanded dir is_dir: {}", std::path::Path::new(&expanded_dir).is_dir());
|
||||||
|
cmd.current_dir(&expanded_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("About to spawn command...");
|
||||||
|
let spawn_result = cmd.spawn();
|
||||||
|
debug!("Spawn result: {:?}", spawn_result.is_ok());
|
||||||
|
let mut child = match spawn_result {
|
||||||
|
Ok(c) => c,
|
||||||
|
Err(e) => {
|
||||||
|
debug!("SPAWN ERROR: {:?}", e);
|
||||||
|
return Err(e.into());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
debug!("Command spawned successfully");
|
||||||
|
|
||||||
let stdout = child.stdout.take().unwrap();
|
let stdout = child.stdout.take().unwrap();
|
||||||
let stderr = child.stderr.take().unwrap();
|
let stderr = child.stderr.take().unwrap();
|
||||||
@@ -322,12 +383,23 @@ impl CodeExecutor {
|
|||||||
|
|
||||||
let status = child.wait().await?;
|
let status = child.wait().await?;
|
||||||
|
|
||||||
Ok(ExecutionResult {
|
let result = ExecutionResult {
|
||||||
stdout: stdout_output.join("\n"),
|
stdout: stdout_output.join("\n"),
|
||||||
stderr: stderr_output.join("\n"),
|
stderr: stderr_output.join("\n"),
|
||||||
exit_code: status.code().unwrap_or(-1),
|
exit_code: status.code().unwrap_or(-1),
|
||||||
success: status.success(),
|
success: status.success(),
|
||||||
})
|
};
|
||||||
|
|
||||||
|
debug!("========== execute_bash_streaming_in_dir END ==========");
|
||||||
|
debug!("Exit code: {}", result.exit_code);
|
||||||
|
debug!("Success: {}", result.success);
|
||||||
|
debug!("Stdout length: {}", result.stdout.len());
|
||||||
|
debug!("Stderr length: {}", result.stderr.len());
|
||||||
|
if !result.stderr.is_empty() {
|
||||||
|
debug!("Stderr content: {}", result.stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
13
crates/g3-planner/Cargo.toml
Normal file
13
crates/g3-planner/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
[package]
|
||||||
|
name = "g3-planner"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
description = "Fast-discovery planner for G3 AI coding agent"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
g3-providers = { path = "../g3-providers" }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
const_format = "0.2"
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
724
crates/g3-planner/src/code_explore.rs
Normal file
724
crates/g3-planner/src/code_explore.rs
Normal file
@@ -0,0 +1,724 @@
|
|||||||
|
//! Code exploration module for analyzing codebases
|
||||||
|
//!
|
||||||
|
//! This module provides functions to explore and analyze codebases
|
||||||
|
//! for various programming languages, returning structured reports
|
||||||
|
//! about the code structure.
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
/// Main entry point for exploring a codebase at the given path.
|
||||||
|
/// Detects which languages are present and generates a comprehensive report.
|
||||||
|
pub fn explore_codebase(path: &str) -> String {
|
||||||
|
let path = expand_tilde(path);
|
||||||
|
let mut report = String::new();
|
||||||
|
let mut languages_found = Vec::new();
|
||||||
|
|
||||||
|
// Check for each language and add to report if found
|
||||||
|
if has_rust_files(&path) {
|
||||||
|
languages_found.push("Rust".to_string());
|
||||||
|
report.push_str(&explore_rust(&path));
|
||||||
|
}
|
||||||
|
if has_java_files(&path) {
|
||||||
|
languages_found.push("Java".to_string());
|
||||||
|
report.push_str(&explore_java(&path));
|
||||||
|
}
|
||||||
|
if has_kotlin_files(&path) {
|
||||||
|
languages_found.push("Kotlin".to_string());
|
||||||
|
report.push_str(&explore_kotlin(&path));
|
||||||
|
}
|
||||||
|
if has_swift_files(&path) {
|
||||||
|
languages_found.push("Swift".to_string());
|
||||||
|
report.push_str(&explore_swift(&path));
|
||||||
|
}
|
||||||
|
if has_go_files(&path) {
|
||||||
|
languages_found.push("Go".to_string());
|
||||||
|
report.push_str(&explore_go(&path));
|
||||||
|
}
|
||||||
|
if has_python_files(&path) {
|
||||||
|
languages_found.push("Python".to_string());
|
||||||
|
report.push_str(&explore_python(&path));
|
||||||
|
}
|
||||||
|
if has_typescript_files(&path) {
|
||||||
|
languages_found.push("TypeScript".to_string());
|
||||||
|
report.push_str(&explore_typescript(&path));
|
||||||
|
}
|
||||||
|
if has_javascript_files(&path) {
|
||||||
|
languages_found.push("JavaScript".to_string());
|
||||||
|
report.push_str(&explore_javascript(&path));
|
||||||
|
}
|
||||||
|
if has_cpp_files(&path) {
|
||||||
|
languages_found.push("C/C++".to_string());
|
||||||
|
report.push_str(&explore_cpp(&path));
|
||||||
|
}
|
||||||
|
if has_markdown_files(&path) {
|
||||||
|
languages_found.push("Markdown".to_string());
|
||||||
|
report.push_str(&explore_markdown(&path));
|
||||||
|
}
|
||||||
|
if has_yaml_files(&path) {
|
||||||
|
languages_found.push("YAML".to_string());
|
||||||
|
report.push_str(&explore_yaml(&path));
|
||||||
|
}
|
||||||
|
if has_sql_files(&path) {
|
||||||
|
languages_found.push("SQL".to_string());
|
||||||
|
report.push_str(&explore_sql(&path));
|
||||||
|
}
|
||||||
|
if has_ruby_files(&path) {
|
||||||
|
languages_found.push("Ruby".to_string());
|
||||||
|
report.push_str(&explore_ruby(&path));
|
||||||
|
}
|
||||||
|
|
||||||
|
if languages_found.is_empty() {
|
||||||
|
report.push_str("No recognized programming languages found in the codebase.\n");
|
||||||
|
} else {
|
||||||
|
let header = format!(
|
||||||
|
"=== CODEBASE ANALYSIS ===\nLanguages detected: {}\n\n",
|
||||||
|
languages_found.join(", ")
|
||||||
|
);
|
||||||
|
report = header + &report;
|
||||||
|
}
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Expand tilde to home directory
|
||||||
|
fn expand_tilde(path: &str) -> String {
|
||||||
|
if path.starts_with("~/") {
|
||||||
|
if let Some(home) = std::env::var_os("HOME") {
|
||||||
|
return path.replacen("~", &home.to_string_lossy(), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run a shell command and return its output
|
||||||
|
fn run_command(cmd: &str, working_dir: &str) -> String {
|
||||||
|
let output = Command::new("sh")
|
||||||
|
.arg("-c")
|
||||||
|
.arg(cmd)
|
||||||
|
.current_dir(working_dir)
|
||||||
|
.output();
|
||||||
|
|
||||||
|
match output {
|
||||||
|
Ok(out) => {
|
||||||
|
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||||
|
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||||
|
if !stdout.is_empty() {
|
||||||
|
stdout.to_string()
|
||||||
|
} else if !stderr.is_empty() {
|
||||||
|
format!("(stderr): {}", stderr)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => format!("Error running command: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if files with given extension exist
|
||||||
|
fn has_files_with_extension(path: &str, extension: &str) -> bool {
|
||||||
|
let cmd = format!(
|
||||||
|
"find . -name '.git' -prune -o -type f -name '*.{}' -print | head -1",
|
||||||
|
extension
|
||||||
|
);
|
||||||
|
!run_command(&cmd, path).trim().is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Language detection functions
|
||||||
|
fn has_rust_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "rs") || Path::new(path).join("Cargo.toml").exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_java_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "java")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_kotlin_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "kt") || has_files_with_extension(path, "kts")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_swift_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "swift")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_go_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "go")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_python_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "py")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_typescript_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "ts") || has_files_with_extension(path, "tsx")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_javascript_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "js") || has_files_with_extension(path, "jsx")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_cpp_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "cpp")
|
||||||
|
|| has_files_with_extension(path, "cc")
|
||||||
|
|| has_files_with_extension(path, "c")
|
||||||
|
|| has_files_with_extension(path, "h")
|
||||||
|
|| has_files_with_extension(path, "hpp")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_markdown_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "md")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_yaml_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "yaml") || has_files_with_extension(path, "yml")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_sql_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "sql")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_ruby_files(path: &str) -> bool {
|
||||||
|
has_files_with_extension(path, "rb")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Rust codebase
|
||||||
|
pub fn explore_rust(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== RUST ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.rs' . 2>/dev/null | grep -v '/target/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Dependencies (Cargo.toml)
|
||||||
|
report.push_str("--- Dependencies (Cargo.toml) ---\n");
|
||||||
|
let cargo = run_command("cat Cargo.toml 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&cargo);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Data structures
|
||||||
|
report.push_str("--- Data Structures (Structs, Enums, Types) ---\n");
|
||||||
|
let structs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.rs' '^(pub )?(struct|enum|type|union) ' . 2>/dev/null | grep -v '/target/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&structs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Traits and implementations
|
||||||
|
report.push_str("--- Traits & Implementations ---\n");
|
||||||
|
let traits = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.rs' '^(pub )?trait |^impl ' . 2>/dev/null | grep -v '/target/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&traits);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Public functions
|
||||||
|
report.push_str("--- Public Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.rs' '^pub (async )?fn ' . 2>/dev/null | grep -v '/target/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Java codebase
|
||||||
|
pub fn explore_java(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== JAVA ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.java' . 2>/dev/null | grep -v '/build/' | grep -v '/target/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Build files
|
||||||
|
report.push_str("--- Build Configuration ---\n");
|
||||||
|
let build = run_command(
|
||||||
|
"cat pom.xml 2>/dev/null | head -50 || cat build.gradle 2>/dev/null | head -50",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&build);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes and interfaces
|
||||||
|
report.push_str("--- Classes & Interfaces ---\n");
|
||||||
|
let classes = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.java' '^(public |private |protected )?(abstract )?(class|interface|enum|record) ' . 2>/dev/null | grep -v '/build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&classes);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Public methods
|
||||||
|
report.push_str("--- Public Methods ---\n");
|
||||||
|
let methods = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.java' '^\s+public .+\(' . 2>/dev/null | grep -v '/build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&methods);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Kotlin codebase
|
||||||
|
pub fn explore_kotlin(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== KOTLIN ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.kt' -g '*.kts' . 2>/dev/null | grep -v '/build/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Build files
|
||||||
|
report.push_str("--- Build Configuration ---\n");
|
||||||
|
let build = run_command("cat build.gradle.kts 2>/dev/null | head -50 || cat build.gradle 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&build);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes, objects, interfaces
|
||||||
|
report.push_str("--- Classes, Objects & Interfaces ---\n");
|
||||||
|
let classes = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.kt' '^(data |sealed |open |abstract )?(class|interface|object|enum class) ' . 2>/dev/null | grep -v '/build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&classes);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
report.push_str("--- Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.kt' '^(suspend |private |internal |public )?fun ' . 2>/dev/null | grep -v '/build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Swift codebase
|
||||||
|
pub fn explore_swift(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== SWIFT ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.swift' . 2>/dev/null | grep -v '/.build/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Package.swift
|
||||||
|
report.push_str("--- Package Configuration ---\n");
|
||||||
|
let pkg = run_command("cat Package.swift 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&pkg);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes, structs, protocols
|
||||||
|
report.push_str("--- Types (Classes, Structs, Protocols, Enums) ---\n");
|
||||||
|
let types = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.swift' '^(public |private |internal |open |final )?(class|struct|protocol|enum|actor) ' . 2>/dev/null | grep -v '/.build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&types);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
report.push_str("--- Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.swift' '^\s*(public |private |internal |open )?func ' . 2>/dev/null | grep -v '/.build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Go codebase
|
||||||
|
pub fn explore_go(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== GO ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.go' . 2>/dev/null | grep -v '/vendor/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// go.mod
|
||||||
|
report.push_str("--- Module Configuration ---\n");
|
||||||
|
let gomod = run_command("cat go.mod 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&gomod);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Types (structs, interfaces)
|
||||||
|
report.push_str("--- Types (Structs & Interfaces) ---\n");
|
||||||
|
let types = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.go' '^type .+ (struct|interface)' . 2>/dev/null | grep -v '/vendor/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&types);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
report.push_str("--- Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.go' '^func ' . 2>/dev/null | grep -v '/vendor/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Python codebase
|
||||||
|
pub fn explore_python(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== PYTHON ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.py' . 2>/dev/null | grep -v '/__pycache__/' | grep -v '/venv/' | grep -v '/.venv/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Requirements/setup
|
||||||
|
report.push_str("--- Dependencies ---\n");
|
||||||
|
let deps = run_command(
|
||||||
|
"cat requirements.txt 2>/dev/null | head -30 || cat pyproject.toml 2>/dev/null | head -50 || cat setup.py 2>/dev/null | head -30",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&deps);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes
|
||||||
|
report.push_str("--- Classes ---\n");
|
||||||
|
let classes = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.py' '^class ' . 2>/dev/null | grep -v '/__pycache__/' | grep -v '/venv/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&classes);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
report.push_str("--- Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.py' '^def |^async def ' . 2>/dev/null | grep -v '/__pycache__/' | grep -v '/venv/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore TypeScript codebase
|
||||||
|
pub fn explore_typescript(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== TYPESCRIPT ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.ts' -g '*.tsx' . 2>/dev/null | grep -v '/node_modules/' | grep -v '/dist/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// package.json
|
||||||
|
report.push_str("--- Package Configuration ---\n");
|
||||||
|
let pkg = run_command("cat package.json 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&pkg);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Types, interfaces, classes
|
||||||
|
report.push_str("--- Types, Interfaces & Classes ---\n");
|
||||||
|
let types = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.ts' -g '*.tsx' '^export (type|interface|class|enum|abstract class) ' . 2>/dev/null | grep -v '/node_modules/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&types);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
report.push_str("--- Exported Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.ts' -g '*.tsx' '^export (async )?function |^export const .+ = (async )?\(' . 2>/dev/null | grep -v '/node_modules/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore JavaScript codebase
|
||||||
|
pub fn explore_javascript(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== JAVASCRIPT ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.js' -g '*.jsx' . 2>/dev/null | grep -v '/node_modules/' | grep -v '/dist/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// package.json
|
||||||
|
report.push_str("--- Package Configuration ---\n");
|
||||||
|
let pkg = run_command("cat package.json 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&pkg);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes
|
||||||
|
report.push_str("--- Classes ---\n");
|
||||||
|
let classes = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.js' -g '*.jsx' '^(export )?(default )?(class ) ' . 2>/dev/null | grep -v '/node_modules/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&classes);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
report.push_str("--- Exported Functions ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.js' -g '*.jsx' '^(export )?(async )?function |^module\.exports' . 2>/dev/null | grep -v '/node_modules/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore C/C++ codebase
|
||||||
|
pub fn explore_cpp(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== C/C++ ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.c' -g '*.cpp' -g '*.cc' -g '*.h' -g '*.hpp' . 2>/dev/null | grep -v '/build/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Build files
|
||||||
|
report.push_str("--- Build Configuration ---\n");
|
||||||
|
let build = run_command(
|
||||||
|
"cat CMakeLists.txt 2>/dev/null | head -50 || cat Makefile 2>/dev/null | head -50",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&build);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes and structs
|
||||||
|
report.push_str("--- Classes & Structs ---\n");
|
||||||
|
let classes = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.cpp' -g '*.cc' -g '*.h' -g '*.hpp' '^(class|struct|enum|union|typedef) ' . 2>/dev/null | grep -v '/build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&classes);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Functions (simplified pattern)
|
||||||
|
report.push_str("--- Function Declarations ---\n");
|
||||||
|
let funcs = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.h' -g '*.hpp' '^[a-zA-Z_][a-zA-Z0-9_<>: ]*\s+[a-zA-Z_][a-zA-Z0-9_]*\s*\(' . 2>/dev/null | grep -v '/build/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&funcs);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Markdown documentation
|
||||||
|
pub fn explore_markdown(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== MARKDOWN DOCUMENTATION ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- Documentation Files ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.md' . 2>/dev/null | grep -v '/node_modules/' | grep -v '/vendor/' | sort | head -50",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// README content
|
||||||
|
report.push_str("--- README Overview ---\n");
|
||||||
|
let readme = run_command(
|
||||||
|
"cat README.md 2>/dev/null | head -100 || cat readme.md 2>/dev/null | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&readme);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Headers from all markdown files
|
||||||
|
report.push_str("--- Document Headers ---\n");
|
||||||
|
let headers = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename -g '*.md' '^#{1,3} ' . 2>/dev/null | grep -v '/node_modules/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&headers);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore YAML configuration files
|
||||||
|
pub fn explore_yaml(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== YAML CONFIGURATION ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- YAML Files ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.yaml' -g '*.yml' . 2>/dev/null | grep -v '/node_modules/' | grep -v '/vendor/' | sort | head -50",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Top-level keys from YAML files
|
||||||
|
report.push_str("--- Top-Level Keys ---\n");
|
||||||
|
let keys = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename -g '*.yaml' -g '*.yml' '^[a-zA-Z_][a-zA-Z0-9_-]*:' . 2>/dev/null | grep -v '/node_modules/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&keys);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore SQL files
|
||||||
|
pub fn explore_sql(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== SQL ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- SQL Files ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.sql' . 2>/dev/null | sort | head -50",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Tables
|
||||||
|
report.push_str("--- Table Definitions ---\n");
|
||||||
|
let tables = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename -i -g '*.sql' 'CREATE TABLE' . 2>/dev/null | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&tables);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Views and procedures
|
||||||
|
report.push_str("--- Views & Procedures ---\n");
|
||||||
|
let views = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename -i -g '*.sql' 'CREATE (VIEW|PROCEDURE|FUNCTION)' . 2>/dev/null | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&views);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explore Ruby codebase
|
||||||
|
pub fn explore_ruby(path: &str) -> String {
|
||||||
|
let mut report = String::new();
|
||||||
|
report.push_str("\n=== RUBY ===\n\n");
|
||||||
|
|
||||||
|
// File structure
|
||||||
|
report.push_str("--- File Structure ---\n");
|
||||||
|
let files = run_command(
|
||||||
|
"rg --files -g '*.rb' . 2>/dev/null | grep -v '/vendor/' | sort | head -100",
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&files);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Gemfile
|
||||||
|
report.push_str("--- Dependencies (Gemfile) ---\n");
|
||||||
|
let gemfile = run_command("cat Gemfile 2>/dev/null | head -50", path);
|
||||||
|
report.push_str(&gemfile);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Classes and modules
|
||||||
|
report.push_str("--- Classes & Modules ---\n");
|
||||||
|
let classes = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.rb' '^(class|module) ' . 2>/dev/null | grep -v '/vendor/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&classes);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
// Methods
|
||||||
|
report.push_str("--- Methods ---\n");
|
||||||
|
let methods = run_command(
|
||||||
|
r#"rg --no-heading --line-number --with-filename --max-filesize 500K -g '*.rb' '^\s*def ' . 2>/dev/null | grep -v '/vendor/' | head -100"#,
|
||||||
|
path,
|
||||||
|
);
|
||||||
|
report.push_str(&methods);
|
||||||
|
report.push('\n');
|
||||||
|
|
||||||
|
report
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_expand_tilde() {
|
||||||
|
let path = expand_tilde("~/test");
|
||||||
|
assert!(!path.starts_with("~"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_explore_codebase_returns_string() {
|
||||||
|
// Test with current directory
|
||||||
|
let result = explore_codebase(".");
|
||||||
|
assert!(!result.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
253
crates/g3-planner/src/lib.rs
Normal file
253
crates/g3-planner/src/lib.rs
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
//! g3-planner: Fast-discovery planner for G3 AI coding agent
|
||||||
|
//!
|
||||||
|
//! This crate provides functionality to generate initial discovery tool calls
|
||||||
|
//! that are injected into the conversation before the first LLM turn.
|
||||||
|
|
||||||
|
mod code_explore;
|
||||||
|
pub mod prompts;
|
||||||
|
|
||||||
|
pub use code_explore::explore_codebase;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use g3_providers::{CompletionRequest, LLMProvider, Message, MessageRole};
|
||||||
|
use prompts::{DISCOVERY_REQUIREMENTS_PROMPT, DISCOVERY_SYSTEM_PROMPT};
|
||||||
|
|
||||||
|
/// Type alias for a status callback function
|
||||||
|
pub type StatusCallback = Box<dyn Fn(&str) + Send + Sync>;
|
||||||
|
|
||||||
|
/// Generates initial discovery messages for fast codebase exploration.
|
||||||
|
///
|
||||||
|
/// This function:
|
||||||
|
/// 1. Runs explore_codebase to get a codebase report
|
||||||
|
/// 2. Sends the report to the LLM with DISCOVERY_SYSTEM_PROMPT
|
||||||
|
/// 3. Extracts shell commands from the LLM response
|
||||||
|
/// 4. Returns Assistant messages with tool calls for each command
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `codebase_path` - The path to the codebase to explore
|
||||||
|
/// * `provider` - An LLM provider to query for exploration commands
|
||||||
|
/// * `status_callback` - Optional callback for status updates
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// A `Result<Vec<Message>>` containing Assistant messages with JSON tool call strings.
|
||||||
|
pub async fn get_initial_discovery_messages(
|
||||||
|
codebase_path: &str,
|
||||||
|
provider: &dyn LLMProvider,
|
||||||
|
status_callback: Option<&StatusCallback>,
|
||||||
|
) -> Result<Vec<Message>> {
|
||||||
|
// Helper to call status callback if provided
|
||||||
|
let status = |msg: &str| {
|
||||||
|
if let Some(cb) = status_callback {
|
||||||
|
cb(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
status("🔍 Starting code discovery...");
|
||||||
|
|
||||||
|
// Step 1: Run explore_codebase to get the codebase report
|
||||||
|
let codebase_report = explore_codebase(codebase_path);
|
||||||
|
|
||||||
|
// Step 2: Build the prompt with the codebase report appended
|
||||||
|
let user_prompt = format!(
|
||||||
|
"{}\n\n=== CODEBASE REPORT ===\n\n{}",
|
||||||
|
DISCOVERY_REQUIREMENTS_PROMPT, codebase_report
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 3: Create messages for the LLM
|
||||||
|
let messages = vec![
|
||||||
|
Message::new(MessageRole::System, DISCOVERY_SYSTEM_PROMPT.to_string()),
|
||||||
|
Message::new(MessageRole::User, user_prompt),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Step 4: Send to LLM
|
||||||
|
let request = CompletionRequest {
|
||||||
|
messages,
|
||||||
|
max_tokens: Some(provider.max_tokens()),
|
||||||
|
temperature: Some(provider.temperature()),
|
||||||
|
stream: false,
|
||||||
|
tools: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
status("🤖 Calling LLM for discovery commands...");
|
||||||
|
|
||||||
|
let response = provider.complete(request).await?;
|
||||||
|
|
||||||
|
// Step 5: Extract shell commands from the response
|
||||||
|
let shell_commands = extract_shell_commands(&response.content);
|
||||||
|
|
||||||
|
status(&format!("📋 Extracted {} discovery commands", shell_commands.len()));
|
||||||
|
|
||||||
|
// Step 6: Format as tool messages
|
||||||
|
let tool_messages = shell_commands
|
||||||
|
.into_iter()
|
||||||
|
.map(|cmd| create_tool_message("shell", &cmd))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(tool_messages)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates an Assistant message with a tool call in g3's JSON format.
|
||||||
|
pub fn create_tool_message(tool: &str, command: &str) -> Message {
|
||||||
|
let tool_call = serde_json::json!({
|
||||||
|
"tool": tool,
|
||||||
|
"args": {
|
||||||
|
"command": command
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Message::new(MessageRole::Assistant, tool_call.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract shell commands from the LLM response.
|
||||||
|
/// Looks for {{CODE EXPLORATION COMMANDS}} section and extracts commands from code blocks.
|
||||||
|
pub fn extract_shell_commands(response: &str) -> Vec<String> {
|
||||||
|
let mut commands = Vec::new();
|
||||||
|
|
||||||
|
let section_marker = "{{CODE EXPLORATION COMMANDS}}";
|
||||||
|
let section_start = match response.find(section_marker) {
|
||||||
|
Some(pos) => pos + section_marker.len(),
|
||||||
|
None => return commands,
|
||||||
|
};
|
||||||
|
|
||||||
|
let section_content = &response[section_start..];
|
||||||
|
let mut in_code_block = false;
|
||||||
|
let mut current_block = String::new();
|
||||||
|
|
||||||
|
for line in section_content.lines() {
|
||||||
|
let trimmed = line.trim();
|
||||||
|
|
||||||
|
if trimmed.starts_with("```") {
|
||||||
|
if in_code_block {
|
||||||
|
// End of code block - extract commands
|
||||||
|
for cmd_line in current_block.lines() {
|
||||||
|
let cmd = cmd_line.trim();
|
||||||
|
if !cmd.is_empty() && !cmd.starts_with('#') {
|
||||||
|
commands.push(cmd.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
current_block.clear();
|
||||||
|
}
|
||||||
|
in_code_block = !in_code_block;
|
||||||
|
} else if in_code_block {
|
||||||
|
current_block.push_str(line);
|
||||||
|
current_block.push('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
commands
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract the summary section from the LLM response
|
||||||
|
pub fn extract_summary(response: &str) -> Option<String> {
|
||||||
|
let section_marker = "{{SUMMARY BASED ON INITIAL INFO}}";
|
||||||
|
let section_start = match response.find(section_marker) {
|
||||||
|
Some(pos) => pos + section_marker.len(),
|
||||||
|
None => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let section_content = &response[section_start..];
|
||||||
|
let section_end = section_content.find("{{").unwrap_or(section_content.len());
|
||||||
|
|
||||||
|
let summary = section_content[..section_end].trim().to_string();
|
||||||
|
if summary.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(summary)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_tool_message_format() {
|
||||||
|
let msg = create_tool_message("shell", "ls -la");
|
||||||
|
|
||||||
|
assert!(matches!(msg.role, MessageRole::Assistant));
|
||||||
|
|
||||||
|
let parsed: serde_json::Value = serde_json::from_str(&msg.content).unwrap();
|
||||||
|
assert_eq!(parsed["tool"], "shell");
|
||||||
|
assert_eq!(parsed["args"]["command"], "ls -la");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_basic() {
|
||||||
|
let response = r#"
|
||||||
|
Some text here.
|
||||||
|
|
||||||
|
{{CODE EXPLORATION COMMANDS}}
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ls -la
|
||||||
|
cat README.md
|
||||||
|
rg --files -g '*.rs'
|
||||||
|
```
|
||||||
|
|
||||||
|
More text.
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert_eq!(commands.len(), 3);
|
||||||
|
assert_eq!(commands[0], "ls -la");
|
||||||
|
assert_eq!(commands[1], "cat README.md");
|
||||||
|
assert_eq!(commands[2], "rg --files -g '*.rs'");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_with_comments() {
|
||||||
|
let response = r#"
|
||||||
|
{{CODE EXPLORATION COMMANDS}}
|
||||||
|
|
||||||
|
```
|
||||||
|
# This is a comment
|
||||||
|
ls -la
|
||||||
|
# Another comment
|
||||||
|
cat file.txt
|
||||||
|
```
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert_eq!(commands.len(), 2);
|
||||||
|
assert_eq!(commands[0], "ls -la");
|
||||||
|
assert_eq!(commands[1], "cat file.txt");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_no_section() {
|
||||||
|
let response = "Some response without the expected section.";
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert!(commands.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_summary() {
|
||||||
|
let response = r#"
|
||||||
|
{{SUMMARY BASED ON INITIAL INFO}}
|
||||||
|
|
||||||
|
This is a summary of the codebase.
|
||||||
|
It has multiple lines.
|
||||||
|
|
||||||
|
{{CODE EXPLORATION COMMANDS}}
|
||||||
|
|
||||||
|
```
|
||||||
|
ls -la
|
||||||
|
```
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let summary = extract_summary(response);
|
||||||
|
assert!(summary.is_some());
|
||||||
|
let summary_text = summary.unwrap();
|
||||||
|
assert!(summary_text.contains("This is a summary"));
|
||||||
|
assert!(summary_text.contains("multiple lines"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_summary_no_section() {
|
||||||
|
let response = "Response without summary section.";
|
||||||
|
let summary = extract_summary(response);
|
||||||
|
assert!(summary.is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
31
crates/g3-planner/src/prompts.rs
Normal file
31
crates/g3-planner/src/prompts.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
//! Prompts used for discovery phase
|
||||||
|
|
||||||
|
/// System prompt for discovery mode - instructs the LLM to analyze codebase and generate exploration commands
|
||||||
|
pub const DISCOVERY_SYSTEM_PROMPT: &str = r#"You are an expert code analyst. Your task is to analyze a codebase structure and generate shell commands to explore it further.
|
||||||
|
|
||||||
|
You will receive:
|
||||||
|
1. User requirements describing what needs to be implemented
|
||||||
|
2. A codebase report showing the structure and key elements of the codebase
|
||||||
|
|
||||||
|
Your job is to:
|
||||||
|
1. Understand the requirements and identify what parts of the codebase are relevant
|
||||||
|
2. Generate shell commands to explore those parts in more detail
|
||||||
|
|
||||||
|
IMPORTANT: Do NOT attempt to implement anything. Only generate exploration commands."#;
|
||||||
|
|
||||||
|
/// Discovery prompt template - used when we have a codebase report.
|
||||||
|
/// The codebase report should be appended after this prompt.
|
||||||
|
pub const DISCOVERY_REQUIREMENTS_PROMPT: &str = r#"**CRITICAL**: DO ABSOLUTELY NOT ATTEMPT TO IMPLEMENT THESE REQUIREMENTS AT THIS POINT. ONLY USE THEM TO
|
||||||
|
UNDERSTAND WHICH PARTS OF THE CODE YOU MIGHT BE INTERESTED IN, AND WHAT SEARCH/GREP EXPRESSIONS YOU MIGHT WANT TO USE
|
||||||
|
TO GET A BETTER UNDERSTANDING OF THE CODEBASE.
|
||||||
|
|
||||||
|
Your task is to analyze the codebase structure provided below and generate shell commands to explore it further.
|
||||||
|
|
||||||
|
Your output MUST include:
|
||||||
|
1. A section with heading {{SUMMARY BASED ON INITIAL INFO}} containing a brief summary of what you understand about the codebase structure (max 10000 tokens).
|
||||||
|
2. A section with heading {{CODE EXPLORATION COMMANDS}} containing shell commands to explore the codebase further.
|
||||||
|
- Use tools like `ls`, `rg` (ripgrep), `grep`, `sed`, `cat`, `head`, `tail` etc.
|
||||||
|
- Focus on commands that will help understand the code structure without dumping entire files.
|
||||||
|
- Mark the beginning and end of the commands with "```".
|
||||||
|
|
||||||
|
DO NOT ADD ANY COMMENTS OR OTHER EXPLANATION IN THE COMMANDS SECTION, JUST INCLUDE THE SHELL COMMANDS."#;
|
||||||
103
crates/g3-planner/tests/planner_test.rs
Normal file
103
crates/g3-planner/tests/planner_test.rs
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
//! Integration tests for g3-planner
|
||||||
|
|
||||||
|
use g3_planner::{create_tool_message, explore_codebase, extract_shell_commands};
|
||||||
|
use g3_providers::MessageRole;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_tool_message_format() {
|
||||||
|
let msg = create_tool_message("shell", "ls -la");
|
||||||
|
|
||||||
|
assert!(matches!(msg.role, MessageRole::Assistant));
|
||||||
|
|
||||||
|
let parsed: serde_json::Value = serde_json::from_str(&msg.content).unwrap();
|
||||||
|
assert_eq!(parsed["tool"], "shell");
|
||||||
|
assert_eq!(parsed["args"]["command"], "ls -la");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_explore_codebase_returns_report() {
|
||||||
|
// Test with current directory (should find Rust files in g3 project)
|
||||||
|
let report = explore_codebase(".");
|
||||||
|
|
||||||
|
// Should return a non-empty report
|
||||||
|
assert!(!report.is_empty(), "Report should not be empty");
|
||||||
|
|
||||||
|
// Should contain the codebase analysis header
|
||||||
|
assert!(
|
||||||
|
report.contains("CODEBASE ANALYSIS") || report.contains("No recognized"),
|
||||||
|
"Report should have analysis header or indicate no languages found"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_basic() {
|
||||||
|
let response = r#"
|
||||||
|
Some text here.
|
||||||
|
|
||||||
|
{{CODE EXPLORATION COMMANDS}}
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ls -la
|
||||||
|
cat README.md
|
||||||
|
rg --files -g '*.rs'
|
||||||
|
```
|
||||||
|
|
||||||
|
More text.
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert_eq!(commands.len(), 3);
|
||||||
|
assert_eq!(commands[0], "ls -la");
|
||||||
|
assert_eq!(commands[1], "cat README.md");
|
||||||
|
assert_eq!(commands[2], "rg --files -g '*.rs'");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_with_comments() {
|
||||||
|
let response = r#"
|
||||||
|
{{CODE EXPLORATION COMMANDS}}
|
||||||
|
|
||||||
|
```
|
||||||
|
# This is a comment
|
||||||
|
ls -la
|
||||||
|
# Another comment
|
||||||
|
cat file.txt
|
||||||
|
```
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert_eq!(commands.len(), 2);
|
||||||
|
assert_eq!(commands[0], "ls -la");
|
||||||
|
assert_eq!(commands[1], "cat file.txt");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_no_section() {
|
||||||
|
let response = "Some response without the expected section.";
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert!(commands.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_shell_commands_multiple_code_blocks() {
|
||||||
|
let response = r#"
|
||||||
|
{{CODE EXPLORATION COMMANDS}}
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ls -la
|
||||||
|
```
|
||||||
|
|
||||||
|
Some explanation text.
|
||||||
|
|
||||||
|
```
|
||||||
|
cat README.md
|
||||||
|
head -50 src/main.rs
|
||||||
|
```
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let commands = extract_shell_commands(response);
|
||||||
|
assert_eq!(commands.len(), 3);
|
||||||
|
assert_eq!(commands[0], "ls -la");
|
||||||
|
assert_eq!(commands[1], "cat README.md");
|
||||||
|
assert_eq!(commands[2], "head -50 src/main.rs");
|
||||||
|
}
|
||||||
@@ -678,6 +678,14 @@ impl LLMProvider for AnthropicProvider {
|
|||||||
// Anthropic supports cache control
|
// Anthropic supports cache control
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn max_tokens(&self) -> u32 {
|
||||||
|
self.max_tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
fn temperature(&self) -> f32 {
|
||||||
|
self.temperature
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Anthropic API request/response structures
|
// Anthropic API request/response structures
|
||||||
|
|||||||
@@ -1055,6 +1055,14 @@ impl LLMProvider for DatabricksProvider {
|
|||||||
fn supports_cache_control(&self) -> bool {
|
fn supports_cache_control(&self) -> bool {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn max_tokens(&self) -> u32 {
|
||||||
|
self.max_tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
fn temperature(&self) -> f32 {
|
||||||
|
self.temperature
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Databricks API request/response structures
|
// Databricks API request/response structures
|
||||||
|
|||||||
@@ -771,4 +771,12 @@ impl LLMProvider for EmbeddedProvider {
|
|||||||
fn model(&self) -> &str {
|
fn model(&self) -> &str {
|
||||||
&self.model_name
|
&self.model_name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn max_tokens(&self) -> u32 {
|
||||||
|
self.max_tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
fn temperature(&self) -> f32 {
|
||||||
|
self.temperature
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,6 +26,12 @@ pub trait LLMProvider: Send + Sync {
|
|||||||
fn supports_cache_control(&self) -> bool {
|
fn supports_cache_control(&self) -> bool {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the configured max_tokens for this provider
|
||||||
|
fn max_tokens(&self) -> u32;
|
||||||
|
|
||||||
|
/// Get the configured temperature for this provider
|
||||||
|
fn temperature(&self) -> f32;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|||||||
@@ -384,6 +384,14 @@ impl LLMProvider for OpenAIProvider {
|
|||||||
// OpenAI models support native tool calling
|
// OpenAI models support native tool calling
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn max_tokens(&self) -> u32 {
|
||||||
|
self.max_tokens.unwrap_or(16000)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn temperature(&self) -> f32 {
|
||||||
|
self._temperature.unwrap_or(0.1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_messages(messages: &[Message]) -> Vec<serde_json::Value> {
|
fn convert_messages(messages: &[Message]) -> Vec<serde_json::Value> {
|
||||||
|
|||||||
Reference in New Issue
Block a user