Remove automatic README loading from context window

README.md is no longer auto-loaded into the LLM context at startup.
This saves ~4,600 tokens per session while AGENTS.md and memory.md
still provide all critical information for code tasks.

Changes:
- Delete read_project_readme() function
- Remove readme_content parameter from combine_project_content()
- Rename extract_readme_heading() -> extract_project_heading()
- Rename Agent constructors: *_with_readme_* -> *_with_project_context_*
- Update context preservation to only check for Agent Configuration
- Remove has_readme field from LoadedContent
- Update all tests to use new markers and function names

The LLM can still read README.md on-demand via read_file when needed.
This commit is contained in:
Dhanji R. Prasanna
2026-01-29 11:07:41 +11:00
parent 05d253ee2a
commit 7bfb9efa19
16 changed files with 113 additions and 174 deletions

View File

@@ -131,7 +131,7 @@ g3 supports local models via llama.cpp with Metal acceleration on macOS. Here's
#### Local Models
| Model | Size | Speed | Agentic Score | Notes |
|-------|------|-------|---------------|-------|
| **Qwen3-32B** (Dense) | 18 GB | Slow | ⭐⭐⭐ | Good reasoning, spotty execution |
| ~~Qwen3-32B~~ (Dense) | 18 GB | Slow | | Good reasoning, but flails on execution and crashes |
| Qwen3-14B | 8.4 GB | Medium | ⭐⭐ | Understands tasks but makes implementation errors |
| GLM-4 9B | 5.7 GB | Fast | ⭐⭐ | Quick responses, may struggle with complex tasks |
| Qwen3-4B | 2.3 GB | Very Fast | ❌ | Generates malformed tool calls - not for agentic use |

View File

@@ -157,7 +157,7 @@ pub async fn run_accumulative_mode(
// Create agent for this autonomous run
let ui_writer = ConsoleUiWriter::new();
ui_writer.set_workspace_path(workspace_dir.clone());
let agent = Agent::new_autonomous_with_readme_and_quiet(
let agent = Agent::new_autonomous_with_project_context_and_quiet(
config.clone(),
ui_writer,
combined_content.clone(),
@@ -291,7 +291,7 @@ async fn handle_command(
// Create agent for interactive mode with requirements context
let ui_writer = ConsoleUiWriter::new();
ui_writer.set_workspace_path(workspace_dir.clone());
let agent = Agent::new_with_readme_and_quiet(
let agent = Agent::new_with_project_context_and_quiet(
config,
ui_writer,
chat_combined_content.clone(),

View File

@@ -7,7 +7,7 @@ use tracing::debug;
use g3_core::ui_writer::UiWriter;
use g3_core::Agent;
use crate::project_files::{combine_project_content, read_agents_config, read_include_prompt, read_workspace_memory, read_project_readme};
use crate::project_files::{combine_project_content, read_agents_config, read_include_prompt, read_workspace_memory};
use crate::display::{LoadedContent, print_loaded_status, print_workspace_path};
use crate::language_prompts::{get_language_prompts_for_workspace, get_agent_language_prompts_for_workspace_with_langs};
use crate::simple_output::SimpleOutput;
@@ -115,9 +115,8 @@ pub async fn run_agent_mode(
// Note: allow_multiple_tool_calls parameter is deprecated but kept for API compatibility
let system_prompt = get_agent_system_prompt(&agent_prompt, true);
// Load AGENTS.md, README, and memory - same as normal mode
// Load AGENTS.md and memory - same as normal mode
let agents_content_opt = read_agents_config(&workspace_dir);
let readme_content_opt = read_project_readme(&workspace_dir);
let memory_content_opt = read_workspace_memory(&workspace_dir);
// Read include prompt early so we can show it in the status line
@@ -129,7 +128,6 @@ pub async fn run_agent_mode(
.and_then(|p| p.file_name())
.map(|s| s.to_string_lossy().to_string());
let loaded = LoadedContent::new(
readme_content_opt.is_some(),
agents_content_opt.is_some(),
memory_content_opt.is_some(),
include_filename,
@@ -164,7 +162,6 @@ pub async fn run_agent_mode(
// Combine all content for the agent's context
let combined_content = combine_project_content(
agents_content_opt,
readme_content_opt,
memory_content_opt,
language_content,
include_prompt,

View File

@@ -501,7 +501,7 @@ async fn execute_coach_turn(
let ui_writer = ConsoleUiWriter::new();
ui_writer.set_workspace_path(project.workspace().to_path_buf());
let mut coach_agent =
match Agent::new_autonomous_with_readme_and_quiet(coach_config, ui_writer, None, quiet)
match Agent::new_autonomous_with_project_context_and_quiet(coach_config, ui_writer, None, quiet)
.await
{
Ok(a) => a,

View File

@@ -109,7 +109,6 @@ pub fn print_workspace_path(workspace_path: &Path) {
/// Information about what project files were loaded.
#[derive(Default)]
pub struct LoadedContent {
pub has_readme: bool,
pub has_agents: bool,
pub has_memory: bool,
pub include_prompt_filename: Option<String>,
@@ -117,9 +116,8 @@ pub struct LoadedContent {
impl LoadedContent {
/// Create from explicit boolean flags.
pub fn new(has_readme: bool, has_agents: bool, has_memory: bool, include_prompt_filename: Option<String>) -> Self {
pub fn new(has_agents: bool, has_memory: bool, include_prompt_filename: Option<String>) -> Self {
Self {
has_readme,
has_agents,
has_memory,
include_prompt_filename,
@@ -129,7 +127,6 @@ impl LoadedContent {
/// Create from combined content string by detecting markers.
pub fn from_combined_content(content: &str) -> Self {
Self {
has_readme: content.contains("Project README"),
has_agents: content.contains("Agent Configuration"),
has_memory: content.contains("=== Workspace Memory"),
include_prompt_filename: if content.contains("Included Prompt") {
@@ -151,15 +148,12 @@ impl LoadedContent {
/// Check if any content was loaded.
pub fn has_any(&self) -> bool {
self.has_readme || self.has_agents || self.has_memory || self.include_prompt_filename.is_some()
self.has_agents || self.has_memory || self.include_prompt_filename.is_some()
}
/// Build a list of loaded item names in load order.
pub fn to_loaded_items(&self) -> Vec<String> {
let mut items = Vec::new();
if self.has_readme {
items.push("README".to_string());
}
if self.has_agents {
items.push("AGENTS.md".to_string());
}
@@ -232,9 +226,8 @@ mod tests {
#[test]
fn test_loaded_content_from_combined() {
let content = "Project README\nAgent Configuration\n=== Workspace Memory";
let content = "Agent Configuration\n=== Workspace Memory";
let loaded = LoadedContent::from_combined_content(content);
assert!(loaded.has_readme);
assert!(loaded.has_agents);
assert!(loaded.has_memory);
assert!(loaded.include_prompt_filename.is_none());
@@ -242,23 +235,22 @@ mod tests {
#[test]
fn test_loaded_content_with_include_prompt() {
let content = "Project README\nIncluded Prompt";
let content = "Agent Configuration\nIncluded Prompt";
let loaded = LoadedContent::from_combined_content(content)
.with_include_prompt_filename(Some("custom.md".to_string()));
assert!(loaded.has_readme);
assert!(loaded.has_agents);
assert_eq!(loaded.include_prompt_filename, Some("custom.md".to_string()));
}
#[test]
fn test_loaded_content_to_items_order() {
let loaded = LoadedContent {
has_readme: true,
has_agents: true,
has_memory: true,
include_prompt_filename: Some("prompt.md".to_string()),
};
let items = loaded.to_loaded_items();
assert_eq!(items, vec!["README", "AGENTS.md", "prompt.md", "Memory"]);
assert_eq!(items, vec!["AGENTS.md", "prompt.md", "Memory"]);
}
#[test]
@@ -266,11 +258,11 @@ mod tests {
let empty = LoadedContent::default();
assert!(!empty.has_any());
let with_readme = LoadedContent {
has_readme: true,
let with_agents = LoadedContent {
has_agents: true,
..Default::default()
};
assert!(with_readme.has_any());
assert!(with_agents.has_any());
}
#[test]

View File

@@ -15,7 +15,7 @@ use crate::commands::handle_command;
use crate::display::{LoadedContent, print_loaded_status, print_project_heading, print_workspace_path};
use crate::g3_status::{G3Status, Status};
use crate::project::Project;
use crate::project_files::extract_readme_heading;
use crate::project_files::extract_project_heading;
use crate::simple_output::SimpleOutput;
use crate::template::process_template;
use crate::task_execution::execute_task_with_retry;
@@ -153,11 +153,9 @@ pub async fn run_interactive<W: UiWriter>(
if let Some(ref content) = combined_content {
let loaded = LoadedContent::from_combined_content(content);
// Extract project name if README is loaded
if loaded.has_readme {
if let Some(name) = extract_readme_heading(content) {
print_project_heading(&name);
}
// Extract project name from AGENTS.md or memory
if let Some(name) = extract_project_heading(content) {
print_project_heading(&name);
}
print_loaded_status(&loaded);

View File

@@ -39,7 +39,7 @@ use accumulative::run_accumulative_mode;
use agent_mode::run_agent_mode;
use autonomous::run_autonomous;
use interactive::run_interactive;
use project_files::{combine_project_content, read_agents_config, read_include_prompt, read_workspace_memory, read_project_readme};
use project_files::{combine_project_content, read_agents_config, read_include_prompt, read_workspace_memory};
use simple_output::SimpleOutput;
use ui_writer_impl::ConsoleUiWriter;
use g3_core::ui_writer::UiWriter;
@@ -109,7 +109,6 @@ pub async fn run() -> Result<()> {
// Load project context files
let agents_content = read_agents_config(&workspace_dir);
let readme_content = read_project_readme(&workspace_dir);
let memory_content = read_workspace_memory(&workspace_dir);
let language_content = language_prompts::get_language_prompts_for_workspace(&workspace_dir);
let include_prompt = read_include_prompt(cli.include_prompt.as_deref());
@@ -124,8 +123,8 @@ pub async fn run() -> Result<()> {
// Load configuration with CLI overrides
let config = load_config_with_cli_overrides(&cli)?;
// Combine AGENTS.md, README, and memory content
let combined_content = combine_project_content(agents_content, readme_content, memory_content, language_content, include_prompt, &workspace_dir);
// Combine AGENTS.md and memory content
let combined_content = combine_project_content(agents_content, memory_content, language_content, include_prompt, &workspace_dir);
run_console_mode(cli, config, project, combined_content, workspace_dir).await
}
@@ -171,7 +170,7 @@ async fn run_console_mode(
ui_writer.set_workspace_path(workspace_dir.clone());
let mut agent = if cli.autonomous {
Agent::new_autonomous_with_readme_and_quiet(
Agent::new_autonomous_with_project_context_and_quiet(
config.clone(),
ui_writer,
combined_content.clone(),
@@ -179,7 +178,7 @@ async fn run_console_mode(
)
.await?
} else {
Agent::new_with_readme_and_quiet(
Agent::new_with_project_context_and_quiet(
config.clone(),
ui_writer,
combined_content.clone(),

View File

@@ -1,6 +1,6 @@
//! Project file reading utilities.
//!
//! Reads AGENTS.md, README.md, and workspace memory files from the workspace.
//! Reads AGENTS.md and workspace memory files from the workspace.
use std::path::Path;
use tracing::error;
@@ -31,41 +31,6 @@ pub fn read_agents_config(workspace_dir: &Path) -> Option<String> {
None
}
/// Read README from the workspace directory if it's a project directory.
/// Returns formatted content with emoji prefix, or None if not found.
pub fn read_project_readme(workspace_dir: &Path) -> Option<String> {
// Only read README if we're in a project directory
let is_project_dir = workspace_dir.join(".g3").exists() || workspace_dir.join(".git").exists();
if !is_project_dir {
return None;
}
const README_NAMES: &[&str] = &[
"README.md",
"README.MD",
"readme.md",
"Readme.md",
"README",
"README.txt",
"README.rst",
];
for name in README_NAMES {
let path = workspace_dir.join(name);
if path.exists() {
match std::fs::read_to_string(&path) {
Ok(content) => {
return Some(format!("📚 Project README (from {}):{}\n{}", name, "\n", content));
}
Err(e) => {
error!("Failed to read {}: {}", path.display(), e);
}
}
}
}
None
}
/// Read workspace memory from analysis/memory.md in the workspace directory.
/// Returns formatted content with emoji prefix and size info, or None if not found.
pub fn read_workspace_memory(workspace_dir: &Path) -> Option<String> {
@@ -110,15 +75,14 @@ pub fn read_include_prompt(path: Option<&std::path::Path>) -> Option<String> {
}
}
/// Combine AGENTS.md, README, and memory content into a single string.
/// Combine AGENTS.md and memory content into a single string for project context.
///
/// Returns None if all inputs are None, otherwise joins non-None parts with double newlines.
/// Prepends the current working directory to help the LLM avoid path hallucinations.
///
/// Order: Working Directory → AGENTS.md → README → Language prompts → Include prompt → Memory
/// Order: Working Directory → AGENTS.md → Language prompts → Include prompt → Memory
pub fn combine_project_content(
agents_content: Option<String>,
readme_content: Option<String>,
memory_content: Option<String>,
language_content: Option<String>,
include_prompt: Option<String>,
@@ -127,10 +91,10 @@ pub fn combine_project_content(
// Always include working directory to prevent LLM from hallucinating paths
let cwd_info = format!("📂 Working Directory: {}", workspace_dir.display());
// Order: cwd → agents → readme → language → include_prompt → memory
// Order: cwd → agents → language → include_prompt → memory
// Include prompt comes BEFORE memory so memory is always last (most recent context)
let parts: Vec<String> = [
Some(cwd_info), agents_content, readme_content, language_content, include_prompt, memory_content
Some(cwd_info), agents_content, language_content, include_prompt, memory_content
]
.into_iter()
.flatten()
@@ -152,26 +116,19 @@ fn format_size(len: usize) -> String {
}
}
/// Extract the first H1 heading from README content for display.
pub fn extract_readme_heading(readme_content: &str) -> Option<String> {
// Find where the actual README content starts (after any prefix markers)
let readme_start = readme_content.find("📚 Project README (from");
let content_to_search = match readme_start {
Some(pos) => &readme_content[pos..],
None => readme_content,
};
// Skip the prefix line and collect content
let content: String = content_to_search
.lines()
.filter(|line| !line.starts_with("📚 Project README"))
.collect::<Vec<_>>()
.join("\n");
// Look for H1 heading
for line in content.lines() {
/// Extract the first H1 heading from project context content for display.
/// Looks for H1 headings in AGENTS.md or memory content.
pub fn extract_project_heading(project_context: &str) -> Option<String> {
// Look for H1 heading in the content
// Skip prefix lines (emoji markers)
for line in project_context.lines() {
let trimmed = line.trim();
// Skip emoji prefix lines
if trimmed.starts_with("📂") || trimmed.starts_with("🤖") || trimmed.starts_with("🔧") || trimmed.starts_with("📎") || trimmed.starts_with("===") {
continue;
}
if let Some(stripped) = trimmed.strip_prefix("# ") {
let title = stripped.trim();
if !title.is_empty() {
@@ -181,7 +138,7 @@ pub fn extract_readme_heading(readme_content: &str) -> Option<String> {
}
// Fallback: first non-empty, non-metadata line
find_fallback_title(&content)
find_fallback_title(project_context)
}
/// Find a fallback title from the first few lines of content.
@@ -190,6 +147,9 @@ fn find_fallback_title(content: &str) -> Option<String> {
let trimmed = line.trim();
if !trimmed.is_empty()
&& !trimmed.starts_with("📚")
&& !trimmed.starts_with("📂")
&& !trimmed.starts_with("🤖")
&& !trimmed.starts_with("🔧")
&& !trimmed.starts_with('#')
&& !trimmed.starts_with("==")
&& !trimmed.starts_with("--")
@@ -216,15 +176,15 @@ mod tests {
use super::*;
#[test]
fn test_extract_readme_heading() {
fn test_extract_project_heading() {
let content = "# My Project\n\nSome description";
assert_eq!(extract_readme_heading(content), Some("My Project".to_string()));
assert_eq!(extract_project_heading(content), Some("My Project".to_string()));
}
#[test]
fn test_extract_readme_heading_with_prefix() {
let content = "📚 Project README (from README.md):\n# Cool App\n\nDescription";
assert_eq!(extract_readme_heading(content), Some("Cool App".to_string()));
fn test_extract_project_heading_with_agents_prefix() {
let content = "🤖 Agent Configuration (from AGENTS.md):\n# Cool App\n\nDescription";
assert_eq!(extract_project_heading(content), Some("Cool App".to_string()));
}
#[test]
@@ -256,7 +216,6 @@ mod tests {
let workspace = std::path::PathBuf::from("/test/workspace");
let result = combine_project_content(
Some("agents".to_string()),
Some("readme".to_string()),
Some("memory".to_string()),
Some("language".to_string()),
None, // include_prompt
@@ -266,7 +225,6 @@ mod tests {
let content = result.unwrap();
assert!(content.contains("📂 Working Directory: /test/workspace"));
assert!(content.contains("agents"));
assert!(content.contains("readme"));
assert!(content.contains("memory"));
assert!(content.contains("language"));
}
@@ -274,17 +232,17 @@ mod tests {
#[test]
fn test_combine_project_content_partial() {
let workspace = std::path::PathBuf::from("/test/workspace");
let result = combine_project_content(None, Some("readme".to_string()), None, None, None, &workspace);
let result = combine_project_content(None, Some("memory".to_string()), None, None, &workspace);
assert!(result.is_some());
let content = result.unwrap();
assert!(content.contains("📂 Working Directory: /test/workspace"));
assert!(content.contains("readme"));
assert!(content.contains("memory"));
}
#[test]
fn test_combine_project_content_all_none() {
let workspace = std::path::PathBuf::from("/test/workspace");
let result = combine_project_content(None, None, None, None, None, &workspace);
let result = combine_project_content(None, None, None, None, &workspace);
// Now always returns Some because we always include the working directory
assert!(result.is_some());
assert!(result.unwrap().contains("📂 Working Directory: /test/workspace"));
@@ -295,7 +253,6 @@ mod tests {
let workspace = std::path::PathBuf::from("/test/workspace");
let result = combine_project_content(
Some("agents".to_string()),
Some("readme".to_string()),
Some("memory".to_string()),
Some("language".to_string()),
Some("include_prompt".to_string()),
@@ -307,12 +264,11 @@ mod tests {
}
#[test]
fn test_combine_project_content_order_include_before_memory() {
// Verify that include_prompt appears BEFORE memory in the combined content
fn test_combine_project_content_order() {
// Verify correct ordering: agents < language < include_prompt < memory
let workspace = std::path::PathBuf::from("/test/workspace");
let result = combine_project_content(
Some("AGENTS_CONTENT".to_string()),
Some("README_CONTENT".to_string()),
Some("MEMORY_CONTENT".to_string()),
Some("LANGUAGE_CONTENT".to_string()),
Some("INCLUDE_PROMPT_CONTENT".to_string()),
@@ -322,14 +278,12 @@ mod tests {
// Find positions of each section
let agents_pos = content.find("AGENTS_CONTENT").expect("agents not found");
let readme_pos = content.find("README_CONTENT").expect("readme not found");
let language_pos = content.find("LANGUAGE_CONTENT").expect("language not found");
let include_pos = content.find("INCLUDE_PROMPT_CONTENT").expect("include_prompt not found");
let memory_pos = content.find("MEMORY_CONTENT").expect("memory not found");
// Verify order: agents < readme < language < include_prompt < memory
assert!(agents_pos < readme_pos, "agents should come before readme");
assert!(readme_pos < language_pos, "readme should come before language");
// Verify order: agents < language < include_prompt < memory
assert!(agents_pos < language_pos, "agents should come before language");
assert!(language_pos < include_pos, "language should come before include_prompt");
assert!(include_pos < memory_pos, "include_prompt should come before memory");
}
@@ -340,7 +294,6 @@ mod tests {
let workspace = std::path::PathBuf::from("/test/workspace");
let result = combine_project_content(
Some("AGENTS".to_string()),
Some("README".to_string()),
Some("MEMORY".to_string()),
Some("LANGUAGE".to_string()),
None, // no include_prompt

View File

@@ -283,8 +283,8 @@ Format this as a detailed but concise summary that can be used to resume the con
if let Some(system_prompt) = preserved.system_prompt {
self.add_message(system_prompt);
}
if let Some(readme) = preserved.readme {
self.add_message(readme);
if let Some(project_context) = preserved.project_context {
self.add_message(project_context);
}
// Add ACD stub if provided (before summary so LLM knows about dehydrated context)
@@ -322,10 +322,10 @@ Format this as a detailed but concise summary that can be used to resume the con
fn extract_preserved_messages(&self) -> PreservedMessages {
let system_prompt = self.conversation_history.first().cloned();
let readme = self.conversation_history.get(1).and_then(|msg| {
// Look for project context (AGENTS.md, memory, etc.) in the second message
let project_context = self.conversation_history.get(1).and_then(|msg| {
if matches!(msg.role, MessageRole::System)
&& (msg.content.contains("Project README")
|| msg.content.contains("Agent Configuration"))
&& msg.content.contains("Agent Configuration")
{
Some(msg.clone())
} else {
@@ -343,7 +343,7 @@ Format this as a detailed but concise summary that can be used to resume the con
PreservedMessages {
system_prompt,
readme,
project_context,
last_assistant_message,
}
}
@@ -740,7 +740,7 @@ Format this as a detailed but concise summary that can be used to resume the con
/// Messages preserved across compaction.
struct PreservedMessages {
system_prompt: Option<Message>,
readme: Option<Message>,
project_context: Option<Message>,
last_assistant_message: Option<Message>,
}

View File

@@ -214,22 +214,22 @@ impl<W: UiWriter> Agent<W> {
Self::new_with_mode(config, ui_writer, true, false).await
}
pub async fn new_with_readme_and_quiet(
pub async fn new_with_project_context_and_quiet(
config: Config,
ui_writer: W,
readme_content: Option<String>,
project_context: Option<String>,
quiet: bool,
) -> Result<Self> {
Self::new_with_mode_and_readme(config, ui_writer, false, readme_content, quiet, None).await
Self::new_with_mode_and_project_context(config, ui_writer, false, project_context, quiet, None).await
}
pub async fn new_autonomous_with_readme_and_quiet(
pub async fn new_autonomous_with_project_context_and_quiet(
config: Config,
ui_writer: W,
readme_content: Option<String>,
project_context: Option<String>,
quiet: bool,
) -> Result<Self> {
Self::new_with_mode_and_readme(config, ui_writer, true, readme_content, quiet, None).await
Self::new_with_mode_and_project_context(config, ui_writer, true, project_context, quiet, None).await
}
/// Create a new agent with a custom system prompt (for agent mode)
@@ -238,13 +238,13 @@ impl<W: UiWriter> Agent<W> {
config: Config,
ui_writer: W,
custom_system_prompt: String,
readme_content: Option<String>,
project_context: Option<String>,
) -> Result<Self> {
Self::new_with_mode_and_readme(
Self::new_with_mode_and_project_context(
config,
ui_writer,
false,
readme_content,
project_context,
false,
Some(custom_system_prompt),
)
@@ -261,17 +261,17 @@ impl<W: UiWriter> Agent<W> {
ui_writer: W,
providers: ProviderRegistry,
) -> Result<Self> {
Self::new_for_test_with_readme(config, ui_writer, providers, None).await
Self::new_for_test_with_project_context(config, ui_writer, providers, None).await
}
/// Create a new agent for testing with README content.
/// Create a new agent for testing with project context.
/// This allows tests to verify context window structure with combined content.
#[doc(hidden)]
pub async fn new_for_test_with_readme(
pub async fn new_for_test_with_project_context(
config: Config,
ui_writer: W,
providers: ProviderRegistry,
readme_content: Option<String>,
project_context: Option<String>,
) -> Result<Self> {
use crate::context_window::ContextWindow;
use crate::prompts::get_system_prompt_for_native;
@@ -285,10 +285,10 @@ impl<W: UiWriter> Agent<W> {
let system_message = Message::new(MessageRole::System, system_prompt);
context_window.add_message(system_message);
// Add README content if provided
if let Some(readme) = readme_content {
let readme_message = Message::new(MessageRole::System, readme);
context_window.add_message(readme_message);
// Add project context if provided
if let Some(context) = project_context {
let context_message = Message::new(MessageRole::System, context);
context_window.add_message(context_message);
}
// For tests: auto_compact=false, is_autonomous=false, quiet=true, no computer_controller
@@ -310,14 +310,14 @@ impl<W: UiWriter> Agent<W> {
is_autonomous: bool,
quiet: bool,
) -> Result<Self> {
Self::new_with_mode_and_readme(config, ui_writer, is_autonomous, None, quiet, None).await
Self::new_with_mode_and_project_context(config, ui_writer, is_autonomous, None, quiet, None).await
}
async fn new_with_mode_and_readme(
async fn new_with_mode_and_project_context(
config: Config,
ui_writer: W,
is_autonomous: bool,
readme_content: Option<String>,
project_context: Option<String>,
quiet: bool,
custom_system_prompt: Option<String>,
) -> Result<Self> {
@@ -361,10 +361,10 @@ impl<W: UiWriter> Agent<W> {
let system_message = Message::new(MessageRole::System, system_prompt);
context_window.add_message(system_message);
// If README content is provided, add it as a second system message (after the main system prompt)
if let Some(readme) = readme_content {
let readme_message = Message::new(MessageRole::System, readme);
context_window.add_message(readme_message);
// If project context is provided, add it as a second system message (after the main system prompt)
if let Some(context) = project_context {
let context_message = Message::new(MessageRole::System, context);
context_window.add_message(context_message);
}
// NOTE: TODO lists are now session-scoped and stored in .g3/sessions/<session_id>/todo.g3.md

View File

@@ -1,6 +1,6 @@
//! Integration tests for project context loading and ordering.
//!
//! Tests that the context window has the correct structure when projects are loaded.
//! Tests that the context window has the correct structure when project context is loaded.
//! Also tests that project content survives compaction.
use g3_core::{
@@ -11,7 +11,7 @@ use g3_config::Config;
use g3_providers::{mock::MockProvider, ProviderRegistry, MockResponse, MessageRole};
/// Helper to create a test agent with mock provider
async fn create_test_agent(readme_content: Option<String>) -> Agent<NullUiWriter> {
async fn create_test_agent(project_context: Option<String>) -> Agent<NullUiWriter> {
let config = Config::default();
let provider = MockProvider::new()
.with_response(MockResponse::text("Test response"));
@@ -19,7 +19,7 @@ async fn create_test_agent(readme_content: Option<String>) -> Agent<NullUiWriter
let mut registry = ProviderRegistry::new();
registry.register(provider);
Agent::new_for_test_with_readme(config, NullUiWriter, registry, readme_content)
Agent::new_for_test_with_project_context(config, NullUiWriter, registry, project_context)
.await
.expect("Failed to create test agent")
}
@@ -337,7 +337,7 @@ async fn create_agent_with_mock_and_readme(
let mut registry = ProviderRegistry::new();
registry.register(provider);
Agent::new_for_test_with_readme(config, NullUiWriter, registry, readme_content)
Agent::new_for_test_with_project_context(config, NullUiWriter, registry, readme_content)
.await
.expect("Failed to create test agent")
}

View File

@@ -104,9 +104,9 @@ fn test_reset_with_summary_and_stub_no_stub() {
assert!(has_summary, "Should have summary");
}
/// Test that README message is preserved during reset
/// Test that project context message is preserved during reset
#[test]
fn test_reset_preserves_readme() {
fn test_reset_preserves_project_context() {
let mut context = ContextWindow::new(100000);
// Add system prompt
@@ -115,10 +115,10 @@ fn test_reset_preserves_readme() {
"You are a helpful assistant.".to_string(),
));
// Add README message (second system message with specific content)
// Add project context message (second system message with Agent Configuration)
context.add_message(Message::new(
MessageRole::System,
"Project README: This is a test project.".to_string(),
"🤖 Agent Configuration (from AGENTS.md):\nTest agent config.".to_string(),
));
// Add conversation
@@ -133,11 +133,11 @@ fn test_reset_preserves_readme() {
Some(stub),
);
// README should be preserved
let has_readme = context.conversation_history.iter().any(|m|
m.content.contains("Project README")
// Project context should be preserved
let has_project_context = context.conversation_history.iter().any(|m|
m.content.contains("Agent Configuration")
);
assert!(has_readme, "README message should be preserved");
assert!(has_project_context, "Project context message should be preserved");
}
/// Test fragment chain integrity

View File

@@ -15,7 +15,7 @@ use tempfile::TempDir;
async fn create_test_agent(temp_dir: &TempDir) -> Agent<NullUiWriter> {
std::env::set_current_dir(temp_dir.path()).unwrap();
let config = Config::default();
Agent::new_with_readme_and_quiet(config, NullUiWriter, None, true)
Agent::new_with_project_context_and_quiet(config, NullUiWriter, None, true)
.await
.unwrap()
}
@@ -24,7 +24,7 @@ async fn create_test_agent(temp_dir: &TempDir) -> Agent<NullUiWriter> {
async fn create_agent_mode_agent(temp_dir: &TempDir, agent_name: &str) -> Agent<NullUiWriter> {
std::env::set_current_dir(temp_dir.path()).unwrap();
let config = Config::default();
let mut agent = Agent::new_with_readme_and_quiet(config, NullUiWriter, None, true)
let mut agent = Agent::new_with_project_context_and_quiet(config, NullUiWriter, None, true)
.await
.unwrap();
agent.set_agent_mode(agent_name);

View File

@@ -54,18 +54,18 @@ fn test_reset_with_summary_preserves_system_prompt() {
assert!(has_user_msg, "Should have the latest user message");
}
/// Test that reset_with_summary preserves README message if present
/// Test that reset_with_summary preserves project context message if present
#[test]
fn test_reset_with_summary_preserves_readme() {
fn test_reset_with_summary_preserves_project_context() {
let mut context = ContextWindow::new(10000);
// Add the system prompt as the first message
let system_prompt = "You are G3, an AI programming agent...";
context.add_message(Message::new(MessageRole::System, system_prompt.to_string()));
// Add README as second system message
let readme_content = "# Project README\n\nThis is a test project.";
context.add_message(Message::new(MessageRole::System, readme_content.to_string()));
// Add project context as second system message (with Agent Configuration marker)
let project_context = "🤖 Agent Configuration (from AGENTS.md):\n\nTest agent config.";
context.add_message(Message::new(MessageRole::System, project_context.to_string()));
// Add some conversation history
context.add_message(Message::new(MessageRole::User, "Task: Write a function".to_string()));
@@ -85,15 +85,15 @@ fn test_reset_with_summary_preserves_readme() {
"First message should be the system prompt"
);
// Verify the README was preserved as the second message
// Verify the project context was preserved as the second message
let second_message = &context.conversation_history[1];
assert!(
matches!(second_message.role, MessageRole::System),
"Second message should be a System message"
);
assert!(
second_message.content.contains("Project README"),
"Second message should be the README"
second_message.content.contains("Agent Configuration"),
"Second message should be the project context"
);
}

View File

@@ -357,7 +357,7 @@ pub async fn call_refinement_llm_with_tools(
project.enter_workspace()?;
// Create agent - not autonomous mode, just regular agent with tools
let mut agent = Agent::new_with_readme_and_quiet(
let mut agent = Agent::new_with_project_context_and_quiet(
planner_config,
ui_writer,
Some(system_prompt),

View File

@@ -608,7 +608,7 @@ pub async fn run_coach_player_loop(
let player_config = g3_config.for_player()?;
let ui_writer = llm::PlannerUiWriter::new();
let mut player_agent = Agent::new_autonomous_with_readme_and_quiet(
let mut player_agent = Agent::new_autonomous_with_project_context_and_quiet(
player_config,
ui_writer,
None,
@@ -666,7 +666,7 @@ pub async fn run_coach_player_loop(
let coach_config = g3_config.for_coach()?;
let coach_ui_writer = llm::PlannerUiWriter::new();
let mut coach_agent = Agent::new_autonomous_with_readme_and_quiet(
let mut coach_agent = Agent::new_autonomous_with_project_context_and_quiet(
coach_config,
coach_ui_writer,
None,