Add studio sdlc command for SDLC maintenance pipeline

Implements a pipeline that orchestrates 7 g3 agents in sequence:
1. euler - dependency graph and hotspots analysis
2. breaker - whitebox exploration and edge-case discovery
3. hopper - deep testing and regression integrity
4. fowler - refactoring to deduplicate and reduce complexity
5. carmack - in-place rewriting for readability and concision
6. lamport - human-readable documentation and validation
7. huffman - semantic compression of memory

Features:
- Commit cursor tracking (--from flag to set starting point)
- Crash recovery (resumes from last incomplete stage)
- Git worktree isolation for all pipeline work
- Visual pipeline display with status icons
- Summary generation saved to .g3/sessions/sdlc/
- Pipeline state persisted to analysis/sdlc/pipeline.json

CLI:
- studio sdlc run [-c N] [--from COMMIT]
- studio sdlc status
- studio sdlc reset

Also adds huffman agent to embedded agents list.
This commit is contained in:
Dhanji R. Prasanna
2026-02-05 10:46:10 +11:00
parent fdb1255f02
commit add8060526
4 changed files with 994 additions and 4 deletions

View File

@@ -1,5 +1,5 @@
# Workspace Memory
> Updated: 2026-02-04T03:13:35Z | Size: 18.4k chars
> Updated: 2026-02-04T23:42:21Z | Size: 19.9k chars
### Remember Tool Wiring
- `crates/g3-core/src/tools/memory.rs` [0..5000] - `execute_remember()`, `get_memory_path()`, `merge_memory()`
@@ -342,3 +342,28 @@ Machine-readable invariants for Plan Mode verification.
**Selector syntax**: `foo.bar` (nested), `foo[0]` (index), `foo[*]` (wildcard)
**Predicate rules**: contains, equals, exists, not_exists, min_length, max_length, greater_than, less_than, matches
### Studio SDLC Pipeline Command
Orchestrates 7 g3 agents in sequence for codebase maintenance.
- `crates/studio/src/sdlc.rs`
- `PIPELINE_STAGES` [28..62] - static array of 7 agents: euler, breaker, hopper, fowler, carmack, lamport, huffman
- `Stage` [18..26] - name, description, focus fields
- `StageStatus` [65..80] - enum: Pending, Running, Complete, Failed, Skipped
- `PipelineState` [108..140] - run_id, stages[], commit_cursor, session_id
- `PipelineState::load()` [165..185] - loads from analysis/sdlc/pipeline.json, handles corruption
- `PipelineState::save()` [188..200] - persists state for crash recovery
- `PipelineState::resume()` [330..340] - finds first incomplete stage, resets Running→Pending
- `display_pipeline()` [354..390] - box display with status icons (○/◉/✓/✗/⊘)
- `generate_summary()` [410..475] - markdown table of results
- `crates/studio/src/main.rs`
- `SdlcAction` [88..104] - enum: Run{commits}, Status, Reset
- `cmd_sdlc_run()` [540..655] - orchestrates pipeline in worktree
- `cmd_sdlc_status()` [658..695] - displays current state
- `cmd_sdlc_reset()` [698..710] - clears pipeline state
- `run_agent_in_worktree()` [770..800] - executes g3 --agent in worktree
**Pipeline Order**: euler → breaker → hopper → fowler → carmack → lamport → huffman
**State Storage**: `analysis/sdlc/pipeline.json` (git-tracked)
**CLI**: `studio sdlc run [-c N]`, `studio sdlc status`, `studio sdlc reset`

View File

@@ -19,6 +19,7 @@ static EMBEDDED_AGENTS: &[(&str, &str)] = &[
("euler", include_str!("../../../agents/euler.md")),
("fowler", include_str!("../../../agents/fowler.md")),
("hopper", include_str!("../../../agents/hopper.md")),
("huffman", include_str!("../../../agents/huffman.md")),
("lamport", include_str!("../../../agents/lamport.md")),
("scout", include_str!("../../../agents/scout.md")),
];
@@ -88,7 +89,7 @@ mod tests {
#[test]
fn test_embedded_agents_exist() {
// Verify all expected agents are embedded
let expected = ["breaker", "carmack", "euler", "fowler", "hopper", "lamport", "scout"];
let expected = ["breaker", "carmack", "euler", "fowler", "hopper", "huffman", "lamport", "scout"];
for name in expected {
assert!(
get_embedded_agent(name).is_some(),
@@ -101,7 +102,7 @@ mod tests {
#[test]
fn test_list_embedded_agents() {
let agents = list_embedded_agents();
assert!(agents.len() >= 7, "Should have at least 7 embedded agents");
assert!(agents.len() >= 8, "Should have at least 8 embedded agents");
assert!(agents.contains(&"carmack"));
assert!(agents.contains(&"hopper"));
}

View File

@@ -9,6 +9,7 @@ use std::process::{Command, Stdio};
use termimad::MadSkin;
mod git;
mod sdlc;
mod session;
use git::GitWorktree;
@@ -82,6 +83,30 @@ enum Commands {
/// Session ID
session_id: String,
},
/// Run the SDLC maintenance pipeline
Sdlc {
#[command(subcommand)]
action: SdlcAction,
},
}
#[derive(Subcommand)]
enum SdlcAction {
/// Run the SDLC pipeline (or resume if interrupted)
Run {
/// Number of commits to process per stage (default: 10)
#[arg(long, short, default_value = "10")]
commits: u32,
/// Set the commit cursor to start from (skips commits before this)
#[arg(long)]
from: Option<String>,
},
/// Show current pipeline status
Status,
/// Reset pipeline state (start fresh)
Reset,
}
fn main() -> Result<()> {
@@ -96,6 +121,11 @@ fn main() -> Result<()> {
Commands::Status { session_id } => cmd_status(&session_id),
Commands::Accept { session_id } => cmd_accept(&session_id),
Commands::Discard { session_id } => cmd_discard(&session_id),
Commands::Sdlc { action } => match action {
SdlcAction::Run { commits, from } => cmd_sdlc_run(commits, from),
SdlcAction::Status => cmd_sdlc_status(),
SdlcAction::Reset => cmd_sdlc_reset(),
},
}
}
@@ -510,6 +540,292 @@ fn cmd_discard(session_id: &str) -> Result<()> {
Ok(())
}
/// Run the SDLC pipeline
fn cmd_sdlc_run(commits_per_run: u32, from_commit: Option<String>) -> Result<()> {
let repo_root = get_repo_root()?;
// Load or create pipeline state
let mut state = match sdlc::PipelineState::load(&repo_root)? {
Some(mut existing) => {
// Resume from where we left off
existing.resume();
println!("\x1b[1;32msdlc:\x1b[0m resuming pipeline run \x1b[38;2;216;177;114m{}\x1b[0m", existing.run_id);
existing
}
None => {
let mut state = sdlc::PipelineState::new(commits_per_run);
// If --from is specified, set the cursor
if let Some(ref from) = from_commit {
// Resolve the commit hash
let resolved = resolve_commit(&repo_root, from)?;
state.commit_cursor = Some(resolved.clone());
println!("\x1b[1;32msdlc:\x1b[0m starting new pipeline run \x1b[38;2;216;177;114m{}\x1b[0m (from {})",
state.run_id, &resolved[..8.min(resolved.len())]);
} else {
println!("\x1b[1;32msdlc:\x1b[0m starting new pipeline run \x1b[38;2;216;177;114m{}\x1b[0m", state.run_id);
}
state
}
};
// Get current HEAD commit
let head_commit = get_head_commit(&repo_root)?;
// Check if there are commits to process
let commits_to_process = if let Some(cursor) = &state.commit_cursor {
count_commits_between(&repo_root, cursor, &head_commit)?
} else {
// First run - use commits_per_run as the count
commits_per_run
};
if commits_to_process == 0 {
println!("\x1b[1;32msdlc:\x1b[0m no new commits since last run");
return Ok(());
}
println!("\x1b[1;32msdlc:\x1b[0m {} commits to process", commits_to_process.min(commits_per_run));
// Display the pipeline
sdlc::display_pipeline(&state);
// Create a dedicated worktree for SDLC
let g3_binary = get_g3_binary_path()?;
let sdlc_session = Session::new("sdlc");
let worktree = GitWorktree::new(&repo_root);
let worktree_path = worktree.create(&sdlc_session)?;
// Save session info for crash recovery
state.session_id = Some(sdlc_session.id.clone());
sdlc_session.save(&repo_root, &worktree_path)?;
state.save(&repo_root)?;
// Run each stage
while !state.is_complete() && state.current_stage < sdlc::PIPELINE_STAGES.len() {
let stage = &sdlc::PIPELINE_STAGES[state.current_stage];
// Display current stage
sdlc::display_current_stage(&state);
println!();
// Mark as running and save
state.mark_running();
state.save(&repo_root)?;
let start_time = std::time::Instant::now();
// Build the task prompt for this agent
let task = format!(
"Focus on changes in the past {} commits (up to {}). {}",
commits_per_run.min(commits_to_process),
&head_commit[..8.min(head_commit.len())],
stage.focus
);
// Run the agent
let result = run_agent_in_worktree(
&g3_binary,
&worktree_path,
stage.name,
&task,
);
let duration = start_time.elapsed().as_secs();
match result {
Ok(true) => {
// Success
state.mark_complete(duration, commits_to_process.min(commits_per_run), &head_commit);
println!();
println!("\x1b[1;32msdlc:\x1b[0m stage \x1b[1m{}\x1b[0m complete in {}",
stage.name, format_duration_short(duration));
}
Ok(false) => {
// Agent completed but with non-zero exit
state.mark_failed("Agent exited with non-zero status");
println!();
println!("\x1b[1;31msdlc:\x1b[0m stage \x1b[1m{}\x1b[0m failed", stage.name);
state.save(&repo_root)?;
break;
}
Err(e) => {
// Error running agent
state.mark_failed(&e.to_string());
println!();
println!("\x1b[1;31msdlc:\x1b[0m stage \x1b[1m{}\x1b[0m error: {}", stage.name, e);
state.save(&repo_root)?;
break;
}
}
state.save(&repo_root)?;
// Display updated pipeline
sdlc::display_pipeline(&state);
}
// Cleanup worktree
worktree.remove(&sdlc_session)?;
sdlc_session.delete(&repo_root)?;
// Generate and display summary
if state.is_complete() {
let summary = sdlc::generate_summary(&state);
println!("{}", summary);
// Save summary to .g3/sessions/sdlc/
let summary_dir = repo_root.join(".g3").join("sessions").join("sdlc");
fs::create_dir_all(&summary_dir).ok();
let summary_path = summary_dir.join(format!("run-{}.md", state.run_id));
fs::write(&summary_path, &summary).ok();
println!("\x1b[1;32msdlc:\x1b[0m pipeline complete!");
} else if state.has_failures() {
println!();
println!("\x1b[1;33msdlc:\x1b[0m pipeline paused due to failures");
println!(" Run 'studio sdlc run' to retry failed stages");
}
Ok(())
}
/// Show SDLC pipeline status
fn cmd_sdlc_status() -> Result<()> {
let repo_root = get_repo_root()?;
match sdlc::PipelineState::load(&repo_root)? {
Some(state) => {
println!("\x1b[1;32msdlc:\x1b[0m pipeline run \x1b[38;2;216;177;114m{}\x1b[0m", state.run_id);
sdlc::display_pipeline(&state);
if state.is_complete() {
println!("Status: \x1b[1;32mComplete\x1b[0m");
} else if state.has_failures() {
println!("Status: \x1b[1;31mFailed\x1b[0m (run 'studio sdlc run' to retry)");
} else {
println!("Status: \x1b[1;33mIn Progress\x1b[0m (stage {}/{})",
state.current_stage + 1, sdlc::PIPELINE_STAGES.len());
}
if let Some(cursor) = &state.commit_cursor {
println!("Commit cursor: {}", cursor);
}
}
None => {
println!("\x1b[1;32msdlc:\x1b[0m no active pipeline");
println!();
println!("Run 'studio sdlc run' to start a new pipeline");
}
}
Ok(())
}
/// Reset SDLC pipeline state
fn cmd_sdlc_reset() -> Result<()> {
let repo_root = get_repo_root()?;
if sdlc::PipelineState::load(&repo_root)?.is_some() {
sdlc::PipelineState::delete(&repo_root)?;
println!("\x1b[1;32msdlc:\x1b[0m pipeline state reset");
} else {
println!("\x1b[1;32msdlc:\x1b[0m no pipeline state to reset");
}
Ok(())
}
/// Format duration in short form
fn format_duration_short(secs: u64) -> String {
if secs < 60 {
format!("{}s", secs)
} else if secs < 3600 {
format!("{}m {}s", secs / 60, secs % 60)
} else {
format!("{}h {}m", secs / 3600, (secs % 3600) / 60)
}
}
/// Resolve a commit reference (hash, branch, tag, etc.) to a full hash
fn resolve_commit(repo_root: &Path, commit_ref: &str) -> Result<String> {
let output = Command::new("git")
.current_dir(repo_root)
.args(["rev-parse", commit_ref])
.output()
.context("Failed to resolve commit")?;
if !output.status.success() {
bail!("Failed to resolve commit '{}'", commit_ref);
}
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
/// Get the current HEAD commit hash
fn get_head_commit(repo_root: &Path) -> Result<String> {
let output = Command::new("git")
.current_dir(repo_root)
.args(["rev-parse", "HEAD"])
.output()
.context("Failed to get HEAD commit")?;
if !output.status.success() {
bail!("Failed to get HEAD commit");
}
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
/// Count commits between two refs
fn count_commits_between(repo_root: &Path, from: &str, to: &str) -> Result<u32> {
let output = Command::new("git")
.current_dir(repo_root)
.args(["rev-list", "--count", &format!("{}..{}", from, to)])
.output()
.context("Failed to count commits")?;
if !output.status.success() {
// If the from commit doesn't exist (first run), return a large number
return Ok(u32::MAX);
}
let count: u32 = String::from_utf8_lossy(&output.stdout)
.trim()
.parse()
.unwrap_or(0);
Ok(count)
}
/// Run a g3 agent in a worktree
fn run_agent_in_worktree(
g3_binary: &Path,
worktree_path: &Path,
agent: &str,
task: &str,
) -> Result<bool> {
let mut cmd = Command::new(g3_binary);
cmd.arg("--workspace").arg(worktree_path);
cmd.arg("--agent").arg(agent);
cmd.arg(task);
cmd.current_dir(worktree_path);
cmd.stdout(Stdio::inherit());
cmd.stderr(Stdio::inherit());
let status = cmd.status().context("Failed to run g3 agent")?;
// If the agent made commits, commit them
if status.success() {
// Stage and commit any changes made by the agent
let _ = Command::new("git")
.current_dir(worktree_path)
.args(["add", "-A"])
.output();
}
Ok(status.success())
}
/// Check if a process is running by PID
fn is_process_running(pid: u32) -> bool {
// Use kill -0 to check if process exists

648
crates/studio/src/sdlc.rs Normal file
View File

@@ -0,0 +1,648 @@
//! SDLC Pipeline - Software Development Life Cycle maintenance pipeline
//!
//! Orchestrates a sequence of g3 agents to maintain and improve the codebase:
//! 1. euler - Dependency graph and hotspots analysis
//! 2. breaker - Whitebox exploration and edge-case discovery
//! 3. hopper - Deep testing and regression integrity
//! 4. fowler - Refactoring to deduplicate and reduce complexity
//! 5. carmack - In-place rewriting for readability and concision
//! 6. lamport - Human-readable documentation and validation
//! 7. huffman - Semantic compression of memory
use anyhow::{Context, Result, bail};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::{Path, PathBuf};
/// Pipeline stage definition
#[derive(Debug, Clone)]
pub struct Stage {
/// Agent name (e.g., "euler")
pub name: &'static str,
/// Human-readable description
pub description: &'static str,
/// What this agent focuses on
pub focus: &'static str,
}
/// The ordered pipeline stages
pub static PIPELINE_STAGES: &[Stage] = &[
Stage {
name: "euler",
description: "Dependency Analysis",
focus: "dependency graph and hotspots",
},
Stage {
name: "breaker",
description: "Edge Case Discovery",
focus: "whitebox exploration and failure cases",
},
Stage {
name: "hopper",
description: "Testing & Verification",
focus: "deep testing and regression integrity",
},
Stage {
name: "fowler",
description: "Refactoring",
focus: "deduplication and complexity reduction",
},
Stage {
name: "carmack",
description: "Code Polish",
focus: "readability, modularity and concision",
},
Stage {
name: "lamport",
description: "Documentation",
focus: "human-readable docs and validation",
},
Stage {
name: "huffman",
description: "Memory Compression",
focus: "semantic compression to preserve signal",
},
];
/// Status of a single stage execution
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum StageStatus {
/// Not yet started
Pending,
/// Currently running
Running,
/// Completed successfully
Complete {
duration_secs: u64,
commits_processed: u32,
},
/// Failed with error
Failed {
error: String,
attempts: u32,
},
/// Skipped (e.g., no new commits)
Skipped { reason: String },
}
/// State of a single stage in the pipeline
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageState {
/// Agent name
pub name: String,
/// Current status
pub status: StageStatus,
/// When this stage started (if running or complete)
pub started_at: Option<DateTime<Utc>>,
/// When this stage completed (if complete)
pub completed_at: Option<DateTime<Utc>>,
/// Commit hash when this stage last ran
pub last_commit: Option<String>,
}
impl StageState {
/// Create a new pending stage state
pub fn new(name: &str) -> Self {
Self {
name: name.to_string(),
status: StageStatus::Pending,
started_at: None,
completed_at: None,
last_commit: None,
}
}
}
/// The full pipeline state, persisted to disk
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PipelineState {
/// Unique run identifier
pub run_id: String,
/// When this pipeline run started
pub started_at: DateTime<Utc>,
/// When this pipeline run completed (if complete)
pub completed_at: Option<DateTime<Utc>>,
/// Current stage index (0-based)
pub current_stage: usize,
/// State of each stage
pub stages: Vec<StageState>,
/// The commit cursor - commits up to this hash have been processed
pub commit_cursor: Option<String>,
/// Number of commits to process per run
pub commits_per_run: u32,
/// Git worktree session ID (for crash recovery)
pub session_id: Option<String>,
}
impl PipelineState {
/// Create a new pipeline state
pub fn new(commits_per_run: u32) -> Self {
let run_id = uuid::Uuid::new_v4().to_string()[..8].to_string();
let stages = PIPELINE_STAGES
.iter()
.map(|s| StageState::new(s.name))
.collect();
Self {
run_id,
started_at: Utc::now(),
completed_at: None,
current_stage: 0,
stages,
commit_cursor: None,
commits_per_run,
session_id: None,
}
}
/// Get the path to the pipeline state file
pub fn state_path(repo_root: &Path) -> PathBuf {
repo_root.join("analysis").join("sdlc").join("pipeline.json")
}
/// Get the path to the SDLC directory
pub fn sdlc_dir(repo_root: &Path) -> PathBuf {
repo_root.join("analysis").join("sdlc")
}
/// Load pipeline state from disk, or return None if not found
pub fn load(repo_root: &Path) -> Result<Option<Self>> {
let path = Self::state_path(repo_root);
if !path.exists() {
return Ok(None);
}
let content = fs::read_to_string(&path)
.context("Failed to read pipeline state")?;
// Handle corrupted state gracefully
match serde_json::from_str(&content) {
Ok(state) => Ok(Some(state)),
Err(e) => {
eprintln!("⚠️ Pipeline state corrupted, starting fresh: {}", e);
Ok(None)
}
}
}
/// Save pipeline state to disk
pub fn save(&self, repo_root: &Path) -> Result<()> {
let dir = Self::sdlc_dir(repo_root);
fs::create_dir_all(&dir)
.context("Failed to create analysis/sdlc directory")?;
let path = Self::state_path(repo_root);
let json = serde_json::to_string_pretty(self)
.context("Failed to serialize pipeline state")?;
fs::write(&path, json)
.context("Failed to write pipeline state")?;
Ok(())
}
/// Delete pipeline state from disk
pub fn delete(repo_root: &Path) -> Result<()> {
let path = Self::state_path(repo_root);
if path.exists() {
fs::remove_file(&path)
.context("Failed to delete pipeline state")?;
}
Ok(())
}
/// Check if the pipeline is complete
pub fn is_complete(&self) -> bool {
self.stages.iter().all(|s| {
matches!(
s.status,
StageStatus::Complete { .. } | StageStatus::Skipped { .. }
)
})
}
/// Check if the pipeline has any failures
pub fn has_failures(&self) -> bool {
self.stages.iter().any(|s| matches!(s.status, StageStatus::Failed { .. }))
}
/// Get the current stage definition
pub fn current_stage_def(&self) -> Option<&'static Stage> {
PIPELINE_STAGES.get(self.current_stage)
}
/// Mark the current stage as running
pub fn mark_running(&mut self) {
if let Some(stage) = self.stages.get_mut(self.current_stage) {
stage.status = StageStatus::Running;
stage.started_at = Some(Utc::now());
}
}
/// Mark the current stage as complete and advance
pub fn mark_complete(&mut self, duration_secs: u64, commits_processed: u32, commit_hash: &str) {
if let Some(stage) = self.stages.get_mut(self.current_stage) {
stage.status = StageStatus::Complete {
duration_secs,
commits_processed,
};
stage.completed_at = Some(Utc::now());
stage.last_commit = Some(commit_hash.to_string());
}
// Advance to next stage
if self.current_stage < PIPELINE_STAGES.len() - 1 {
self.current_stage += 1;
} else {
// Pipeline complete
self.completed_at = Some(Utc::now());
}
// Update cursor
self.commit_cursor = Some(commit_hash.to_string());
}
/// Mark the current stage as failed
pub fn mark_failed(&mut self, error: &str) {
if let Some(stage) = self.stages.get_mut(self.current_stage) {
let attempts = match &stage.status {
StageStatus::Failed { attempts, .. } => attempts + 1,
_ => 1,
};
stage.status = StageStatus::Failed {
error: error.to_string(),
attempts,
};
}
}
/// Mark the current stage as skipped
#[allow(dead_code)]
pub fn mark_skipped(&mut self, reason: &str) {
if let Some(stage) = self.stages.get_mut(self.current_stage) {
stage.status = StageStatus::Skipped {
reason: reason.to_string(),
};
stage.completed_at = Some(Utc::now());
}
// Advance to next stage
if self.current_stage < PIPELINE_STAGES.len() - 1 {
self.current_stage += 1;
} else {
self.completed_at = Some(Utc::now());
}
}
/// Retry the current failed stage
#[allow(dead_code)]
pub fn retry_stage(&mut self) -> Result<()> {
if let Some(stage) = self.stages.get_mut(self.current_stage) {
match &stage.status {
StageStatus::Failed { .. } => {
// Keep the attempt count but reset to pending
stage.status = StageStatus::Pending;
stage.started_at = None;
Ok(())
}
_ => bail!("Stage '{}' is not in failed state", stage.name),
}
} else {
bail!("Invalid current stage index")
}
}
/// Find the first incomplete stage (for resumption)
pub fn find_resume_point(&self) -> usize {
for (i, stage) in self.stages.iter().enumerate() {
match &stage.status {
StageStatus::Pending | StageStatus::Running | StageStatus::Failed { .. } => {
return i;
}
_ => continue,
}
}
// All complete
self.stages.len()
}
/// Resume from the first incomplete stage
pub fn resume(&mut self) {
self.current_stage = self.find_resume_point();
// If current stage was running (crashed), reset to pending
if let Some(stage) = self.stages.get_mut(self.current_stage) {
if matches!(stage.status, StageStatus::Running) {
stage.status = StageStatus::Pending;
stage.started_at = None;
}
}
}
}
/// Get a stage by name
#[allow(dead_code)]
pub fn get_stage(name: &str) -> Option<&'static Stage> {
PIPELINE_STAGES.iter().find(|s| s.name == name)
}
/// Get stage index by name
#[allow(dead_code)]
pub fn get_stage_index(name: &str) -> Option<usize> {
PIPELINE_STAGES.iter().position(|s| s.name == name)
}
/// Display the pipeline with current stage highlighted
pub fn display_pipeline(state: &PipelineState) {
println!();
println!("\x1b[1m┌─────────────────────────────────────────────────────────────┐\x1b[0m");
println!("\x1b[1m│ SDLC Pipeline │\x1b[0m");
println!("\x1b[1m├─────────────────────────────────────────────────────────────┤\x1b[0m");
for (i, stage_def) in PIPELINE_STAGES.iter().enumerate() {
let stage_state = &state.stages[i];
let is_current = i == state.current_stage;
let (icon, color) = match &stage_state.status {
StageStatus::Pending => ("", "\x1b[90m"), // Gray
StageStatus::Running => ("", "\x1b[33m"), // Yellow
StageStatus::Complete { .. } => ("", "\x1b[32m"), // Green
StageStatus::Failed { .. } => ("", "\x1b[31m"), // Red
StageStatus::Skipped { .. } => ("", "\x1b[90m"), // Gray
};
let highlight = if is_current { "\x1b[1m" } else { "" };
let reset = "\x1b[0m";
// Pad to fixed width
let padded = format!("{:<57}", format!("{} {:<10} - {}", icon, stage_def.name, stage_def.description));
if is_current {
println!("{}{}{}{}{}", color, highlight, reset, padded, reset);
} else {
println!("{}{}{} {}", color, highlight, padded, reset);
}
}
println!("\x1b[1m└─────────────────────────────────────────────────────────────┘\x1b[0m");
println!();
}
/// Display a compact single-line status for the current stage
pub fn display_current_stage(state: &PipelineState) {
if let Some(stage) = state.current_stage_def() {
println!(
"\x1b[1;32msdlc:\x1b[0m stage {}/{} \x1b[1m{}\x1b[0m - {}",
state.current_stage + 1,
PIPELINE_STAGES.len(),
stage.name,
stage.focus
);
}
}
/// Generate a summary of the pipeline run
pub fn generate_summary(state: &PipelineState) -> String {
let mut summary = String::new();
summary.push_str("\n## SDLC Pipeline Summary\n\n");
summary.push_str(&format!("**Run ID:** {}\n", state.run_id));
summary.push_str(&format!("**Started:** {}\n", state.started_at.format("%Y-%m-%d %H:%M:%S UTC")));
if let Some(completed) = state.completed_at {
summary.push_str(&format!("**Completed:** {}\n", completed.format("%Y-%m-%d %H:%M:%S UTC")));
let duration = completed.signed_duration_since(state.started_at);
summary.push_str(&format!("**Total Duration:** {}\n", format_duration(duration.num_seconds() as u64)));
}
summary.push_str("\n### Stage Results\n\n");
summary.push_str("| Stage | Status | Duration | Commits |\n");
summary.push_str("|-------|--------|----------|---------|\n");
let mut total_commits = 0u32;
let mut completed_count = 0;
let mut failed_count = 0;
let mut skipped_count = 0;
for (i, stage_def) in PIPELINE_STAGES.iter().enumerate() {
let stage_state = &state.stages[i];
let (status_str, duration_str, commits_str) = match &stage_state.status {
StageStatus::Pending => ("⏳ Pending".to_string(), "-".to_string(), "-".to_string()),
StageStatus::Running => ("🔄 Running".to_string(), "-".to_string(), "-".to_string()),
StageStatus::Complete { duration_secs, commits_processed } => {
completed_count += 1;
total_commits += commits_processed;
(
"✅ Complete".to_string(),
format_duration(*duration_secs),
commits_processed.to_string(),
)
}
StageStatus::Failed { error: _, attempts } => {
failed_count += 1;
(
format!("❌ Failed ({}x)", attempts),
"-".to_string(),
"-".to_string(),
)
}
StageStatus::Skipped { reason: _ } => {
skipped_count += 1;
(
format!("⊘ Skipped"),
"-".to_string(),
"-".to_string(),
)
}
};
summary.push_str(&format!(
"| {} | {} | {} | {} |\n",
stage_def.name, status_str, duration_str, commits_str
));
}
summary.push_str("\n### Summary\n\n");
summary.push_str(&format!("- **Completed:** {} stages\n", completed_count));
summary.push_str(&format!("- **Failed:** {} stages\n", failed_count));
summary.push_str(&format!("- **Skipped:** {} stages\n", skipped_count));
summary.push_str(&format!("- **Total Commits Processed:** {}\n", total_commits));
summary
}
/// Format seconds as human-readable duration
fn format_duration(secs: u64) -> String {
if secs < 60 {
format!("{}s", secs)
} else if secs < 3600 {
format!("{}m {}s", secs / 60, secs % 60)
} else {
format!("{}h {}m", secs / 3600, (secs % 3600) / 60)
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_pipeline_stages_order() {
let names: Vec<_> = PIPELINE_STAGES.iter().map(|s| s.name).collect();
assert_eq!(
names,
vec!["euler", "breaker", "hopper", "fowler", "carmack", "lamport", "huffman"]
);
}
#[test]
fn test_pipeline_state_new() {
let state = PipelineState::new(10);
assert_eq!(state.stages.len(), 7);
assert_eq!(state.current_stage, 0);
assert_eq!(state.commits_per_run, 10);
assert!(state.stages.iter().all(|s| s.status == StageStatus::Pending));
}
#[test]
fn test_pipeline_state_save_load() {
let temp_dir = TempDir::new().unwrap();
let repo_root = temp_dir.path();
let state = PipelineState::new(10);
state.save(repo_root).unwrap();
let loaded = PipelineState::load(repo_root).unwrap().unwrap();
assert_eq!(loaded.run_id, state.run_id);
assert_eq!(loaded.stages.len(), 7);
}
#[test]
fn test_pipeline_state_missing_returns_none() {
let temp_dir = TempDir::new().unwrap();
let result = PipelineState::load(temp_dir.path()).unwrap();
assert!(result.is_none());
}
#[test]
fn test_pipeline_state_corrupted_returns_none() {
let temp_dir = TempDir::new().unwrap();
let repo_root = temp_dir.path();
// Create corrupted state file
let dir = PipelineState::sdlc_dir(repo_root);
fs::create_dir_all(&dir).unwrap();
fs::write(PipelineState::state_path(repo_root), "not valid json").unwrap();
let result = PipelineState::load(repo_root).unwrap();
assert!(result.is_none());
}
#[test]
fn test_mark_complete_advances_stage() {
let mut state = PipelineState::new(10);
assert_eq!(state.current_stage, 0);
state.mark_running();
state.mark_complete(60, 5, "abc123");
assert_eq!(state.current_stage, 1);
assert!(matches!(state.stages[0].status, StageStatus::Complete { .. }));
assert_eq!(state.commit_cursor, Some("abc123".to_string()));
}
#[test]
fn test_mark_failed_tracks_attempts() {
let mut state = PipelineState::new(10);
state.mark_failed("error 1");
if let StageStatus::Failed { attempts, .. } = &state.stages[0].status {
assert_eq!(*attempts, 1);
} else {
panic!("Expected Failed status");
}
state.mark_failed("error 2");
if let StageStatus::Failed { attempts, .. } = &state.stages[0].status {
assert_eq!(*attempts, 2);
} else {
panic!("Expected Failed status");
}
}
#[test]
fn test_retry_stage() {
let mut state = PipelineState::new(10);
state.mark_failed("some error");
state.retry_stage().unwrap();
assert_eq!(state.stages[0].status, StageStatus::Pending);
}
#[test]
fn test_retry_non_failed_stage_errors() {
let mut state = PipelineState::new(10);
let result = state.retry_stage();
assert!(result.is_err());
}
#[test]
fn test_find_resume_point() {
let mut state = PipelineState::new(10);
// Complete first two stages
state.mark_running();
state.mark_complete(60, 5, "abc");
state.mark_running();
state.mark_complete(60, 5, "def");
// Fail the third
state.mark_failed("error");
assert_eq!(state.find_resume_point(), 2);
}
#[test]
fn test_resume_from_running_state() {
let mut state = PipelineState::new(10);
state.mark_running();
// Simulate crash - stage is still "running"
state.resume();
assert_eq!(state.current_stage, 0);
assert_eq!(state.stages[0].status, StageStatus::Pending);
}
#[test]
fn test_is_complete() {
let mut state = PipelineState::new(10);
assert!(!state.is_complete());
// Complete all stages
for _ in 0..7 {
state.mark_running();
state.mark_complete(60, 5, "abc");
}
assert!(state.is_complete());
}
#[test]
fn test_get_stage() {
assert!(get_stage("euler").is_some());
assert!(get_stage("unknown").is_none());
}
#[test]
fn test_get_stage_index() {
assert_eq!(get_stage_index("euler"), Some(0));
assert_eq!(get_stage_index("breaker"), Some(1));
assert_eq!(get_stage_index("huffman"), Some(6));
assert_eq!(get_stage_index("unknown"), None);
}
}