mod bug_ops; mod story_ops; mod test_results; pub use bug_ops::{ create_bug_file, create_refactor_file, create_spike_file, list_bug_files, list_refactor_files, }; pub use story_ops::{ add_criterion_to_file, check_criterion_in_file, create_story_file, update_story_in_file, }; pub use test_results::{ read_test_results_from_story_file, write_coverage_baseline_to_story_file, write_test_results_to_story_file, }; use crate::agents::AgentStatus; use crate::http::context::AppContext; use crate::io::story_metadata::parse_front_matter; use serde::Serialize; use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; /// Agent assignment embedded in a pipeline stage item. #[derive(Clone, Debug, Serialize)] pub struct AgentAssignment { pub agent_name: String, pub model: Option, pub status: String, } #[derive(Clone, Debug, Serialize)] pub struct UpcomingStory { pub story_id: String, pub name: Option, pub error: Option, /// Merge failure reason persisted to front matter by the mergemaster agent. pub merge_failure: Option, /// Active agent working on this item, if any. pub agent: Option, /// True when the item is held in QA for human review. #[serde(skip_serializing_if = "Option::is_none")] pub review_hold: Option, /// QA mode for this item: "human", "server", or "agent". #[serde(skip_serializing_if = "Option::is_none")] pub qa: Option, /// Number of retries at the current pipeline stage. #[serde(skip_serializing_if = "Option::is_none")] pub retry_count: Option, /// True when the story has exceeded its retry limit and will not be auto-assigned. #[serde(skip_serializing_if = "Option::is_none")] pub blocked: Option, /// Story numbers this story depends on. #[serde(skip_serializing_if = "Option::is_none")] pub depends_on: Option>, } pub struct StoryValidationResult { pub story_id: String, pub valid: bool, pub error: Option, } /// Full pipeline state across all stages. #[derive(Clone, Debug, Serialize)] pub struct PipelineState { pub backlog: Vec, pub current: Vec, pub qa: Vec, pub merge: Vec, pub done: Vec, } /// Load the full pipeline state (all 5 active stages). pub fn load_pipeline_state(ctx: &AppContext) -> Result { let agent_map = build_active_agent_map(ctx); Ok(PipelineState { backlog: load_stage_items(ctx, "1_backlog", &HashMap::new())?, current: load_stage_items(ctx, "2_current", &agent_map)?, qa: load_stage_items(ctx, "3_qa", &agent_map)?, merge: load_stage_items(ctx, "4_merge", &agent_map)?, done: load_stage_items(ctx, "5_done", &HashMap::new())?, }) } /// Build a map from story_id → AgentAssignment for all pending/running agents. fn build_active_agent_map(ctx: &AppContext) -> HashMap { let agents = match ctx.agents.list_agents() { Ok(a) => a, Err(_) => return HashMap::new(), }; let config_opt = ctx .state .get_project_root() .ok() .and_then(|root| crate::config::ProjectConfig::load(&root).ok()); let mut map = HashMap::new(); for agent in agents { if !matches!(agent.status, AgentStatus::Pending | AgentStatus::Running) { continue; } let model = config_opt .as_ref() .and_then(|cfg| cfg.find_agent(&agent.agent_name)) .and_then(|ac| ac.model.clone()); map.insert( agent.story_id.clone(), AgentAssignment { agent_name: agent.agent_name, model, status: agent.status.to_string(), }, ); } map } /// Load work items from any pipeline stage directory. fn load_stage_items( ctx: &AppContext, stage_dir: &str, agent_map: &HashMap, ) -> Result, String> { let root = ctx.state.get_project_root()?; let dir = root.join(".huskies").join("work").join(stage_dir); if !dir.exists() { return Ok(Vec::new()); } let mut stories = Vec::new(); for entry in fs::read_dir(&dir) .map_err(|e| format!("Failed to read {stage_dir} directory: {e}"))? { let entry = entry.map_err(|e| format!("Failed to read {stage_dir} entry: {e}"))?; let path = entry.path(); if path.extension().and_then(|ext| ext.to_str()) != Some("md") { continue; } let story_id = path .file_stem() .and_then(|stem| stem.to_str()) .ok_or_else(|| "Invalid story file name.".to_string())? .to_string(); let contents = fs::read_to_string(&path) .map_err(|e| format!("Failed to read story file {}: {e}", path.display()))?; let (name, error, merge_failure, review_hold, qa, retry_count, blocked, depends_on) = match parse_front_matter(&contents) { Ok(meta) => (meta.name, None, meta.merge_failure, meta.review_hold, meta.qa.map(|m| m.as_str().to_string()), meta.retry_count, meta.blocked, meta.depends_on), Err(e) => (None, Some(e.to_string()), None, None, None, None, None, None), }; let agent = agent_map.get(&story_id).cloned(); stories.push(UpcomingStory { story_id, name, error, merge_failure, agent, review_hold, qa, retry_count, blocked, depends_on }); } stories.sort_by(|a, b| a.story_id.cmp(&b.story_id)); Ok(stories) } pub fn load_upcoming_stories(ctx: &AppContext) -> Result, String> { load_stage_items(ctx, "1_backlog", &HashMap::new()) } pub fn validate_story_dirs( root: &std::path::Path, ) -> Result, String> { let mut results = Vec::new(); // Directories to validate: work/2_current/ + work/1_backlog/ let dirs_to_validate: Vec = vec![ root.join(".huskies").join("work").join("2_current"), root.join(".huskies").join("work").join("1_backlog"), ]; for dir in &dirs_to_validate { let subdir = dir.file_name().map(|n| n.to_string_lossy().into_owned()).unwrap_or_default(); if !dir.exists() { continue; } for entry in fs::read_dir(dir).map_err(|e| format!("Failed to read {subdir} directory: {e}"))? { let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?; let path = entry.path(); if path.extension().and_then(|ext| ext.to_str()) != Some("md") { continue; } let story_id = path .file_stem() .and_then(|stem| stem.to_str()) .unwrap_or_default() .to_string(); let contents = fs::read_to_string(&path) .map_err(|e| format!("Failed to read {}: {e}", path.display()))?; match parse_front_matter(&contents) { Ok(meta) => { let mut errors = Vec::new(); if meta.name.is_none() { errors.push("Missing 'name' field".to_string()); } if errors.is_empty() { results.push(StoryValidationResult { story_id, valid: true, error: None, }); } else { results.push(StoryValidationResult { story_id, valid: false, error: Some(errors.join("; ")), }); } } Err(e) => results.push(StoryValidationResult { story_id, valid: false, error: Some(e.to_string()), }), } } } results.sort_by(|a, b| a.story_id.cmp(&b.story_id)); Ok(results) } // ── Shared utilities used by submodules ────────────────────────── /// Locate a work item file by searching all active pipeline stages. /// /// Searches in priority order: 2_current, 1_backlog, 3_qa, 4_merge, 5_done, 6_archived. pub(super) fn find_story_file(project_root: &Path, story_id: &str) -> Result { let filename = format!("{story_id}.md"); let sk = project_root.join(".huskies").join("work"); for stage in &["2_current", "1_backlog", "3_qa", "4_merge", "5_done", "6_archived"] { let path = sk.join(stage).join(&filename); if path.exists() { return Ok(path); } } Err(format!( "Story '{story_id}' not found in any pipeline stage." )) } /// Replace the content of a named `## Section` in a story file. /// /// Finds the first occurrence of `## {section_name}` and replaces everything /// until the next `##` heading (or end of file) with the provided text. /// Returns an error if the section is not found. pub(super) fn replace_section_content(content: &str, section_name: &str, new_text: &str) -> Result { let lines: Vec<&str> = content.lines().collect(); let heading = format!("## {section_name}"); let mut section_start: Option = None; let mut section_end: Option = None; for (i, line) in lines.iter().enumerate() { let trimmed = line.trim(); if trimmed == heading { section_start = Some(i); continue; } if section_start.is_some() && trimmed.starts_with("## ") { section_end = Some(i); break; } } let section_start = section_start.ok_or_else(|| format!("Section '{heading}' not found in story file."))?; let mut new_lines: Vec = Vec::new(); // Keep everything up to and including the section heading. for line in lines.iter().take(section_start + 1) { new_lines.push(line.to_string()); } // Blank line, new content, blank line. new_lines.push(String::new()); new_lines.push(new_text.to_string()); new_lines.push(String::new()); // Resume from the next section heading (or EOF). let resume_from = section_end.unwrap_or(lines.len()); for line in lines.iter().skip(resume_from) { new_lines.push(line.to_string()); } let mut new_str = new_lines.join("\n"); if content.ends_with('\n') { new_str.push('\n'); } Ok(new_str) } /// Replace the `## Test Results` section in `contents` with `new_section`, /// or append it if not present. pub(super) fn replace_or_append_section(contents: &str, header: &str, new_section: &str) -> String { let lines: Vec<&str> = contents.lines().collect(); let header_trimmed = header.trim(); // Find the start of the existing section let section_start = lines.iter().position(|l| l.trim() == header_trimmed); if let Some(start) = section_start { // Find the next `##` heading after the section start (the end of this section) let section_end = lines[start + 1..] .iter() .position(|l| { let t = l.trim(); t.starts_with("## ") && t != header_trimmed }) .map(|i| start + 1 + i) .unwrap_or(lines.len()); let mut result = lines[..start].join("\n"); if !result.is_empty() { result.push('\n'); } result.push_str(new_section); if section_end < lines.len() { result.push('\n'); result.push_str(&lines[section_end..].join("\n")); } if contents.ends_with('\n') { result.push('\n'); } result } else { // Append at the end let mut result = contents.trim_end_matches('\n').to_string(); result.push_str("\n\n"); result.push_str(new_section); if !result.ends_with('\n') { result.push('\n'); } result } } pub(super) fn slugify_name(name: &str) -> String { let slug: String = name .chars() .map(|c| { if c.is_ascii_alphanumeric() { c.to_ascii_lowercase() } else { '_' } }) .collect(); // Collapse consecutive underscores and trim edges let mut result = String::new(); let mut prev_underscore = true; // start true to trim leading _ for ch in slug.chars() { if ch == '_' { if !prev_underscore { result.push('_'); } prev_underscore = true; } else { result.push(ch); prev_underscore = false; } } // Trim trailing underscore if result.ends_with('_') { result.pop(); } result } /// Scan all `work/` subdirectories for the highest item number across all types (stories, bugs, spikes). pub(super) fn next_item_number(root: &std::path::Path) -> Result { let work_base = root.join(".huskies").join("work"); let mut max_num: u32 = 0; for subdir in &["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] { let dir = work_base.join(subdir); if !dir.exists() { continue; } for entry in fs::read_dir(&dir).map_err(|e| format!("Failed to read {subdir} directory: {e}"))? { let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?; let name = entry.file_name(); let name_str = name.to_string_lossy(); // Filename format: {N}_{type}_{slug}.md — extract leading N let num_str: String = name_str.chars().take_while(|c| c.is_ascii_digit()).collect(); if let Ok(n) = num_str.parse::() && n > max_num { max_num = n; } } } Ok(max_num + 1) } #[cfg(test)] mod tests { use super::*; #[test] fn load_pipeline_state_loads_all_stages() { let tmp = tempfile::tempdir().unwrap(); let root = tmp.path().to_path_buf(); for (stage, id) in &[ ("1_backlog", "10_story_upcoming"), ("2_current", "20_story_current"), ("3_qa", "30_story_qa"), ("4_merge", "40_story_merge"), ("5_done", "50_story_done"), ] { let dir = root.join(".huskies").join("work").join(stage); fs::create_dir_all(&dir).unwrap(); fs::write( dir.join(format!("{id}.md")), format!("---\nname: {id}\n---\n"), ) .unwrap(); } let ctx = crate::http::context::AppContext::new_test(root); let state = load_pipeline_state(&ctx).unwrap(); assert_eq!(state.backlog.len(), 1); assert_eq!(state.backlog[0].story_id, "10_story_upcoming"); assert_eq!(state.current.len(), 1); assert_eq!(state.current[0].story_id, "20_story_current"); assert_eq!(state.qa.len(), 1); assert_eq!(state.qa[0].story_id, "30_story_qa"); assert_eq!(state.merge.len(), 1); assert_eq!(state.merge[0].story_id, "40_story_merge"); assert_eq!(state.done.len(), 1); assert_eq!(state.done[0].story_id, "50_story_done"); } #[test] fn load_upcoming_returns_empty_when_no_dir() { let tmp = tempfile::tempdir().unwrap(); let root = tmp.path().to_path_buf(); // No .huskies directory at all let ctx = crate::http::context::AppContext::new_test(root); let result = load_upcoming_stories(&ctx).unwrap(); assert!(result.is_empty()); } #[test] fn pipeline_state_includes_agent_for_running_story() { let tmp = tempfile::tempdir().unwrap(); let root = tmp.path().to_path_buf(); let current = root.join(".huskies/work/2_current"); fs::create_dir_all(¤t).unwrap(); fs::write( current.join("10_story_test.md"), "---\nname: Test Story\n---\n# Story\n", ) .unwrap(); let ctx = crate::http::context::AppContext::new_test(root); ctx.agents.inject_test_agent("10_story_test", "coder-1", crate::agents::AgentStatus::Running); let state = load_pipeline_state(&ctx).unwrap(); assert_eq!(state.current.len(), 1); let item = &state.current[0]; assert!(item.agent.is_some(), "running agent should appear on work item"); let agent = item.agent.as_ref().unwrap(); assert_eq!(agent.agent_name, "coder-1"); assert_eq!(agent.status, "running"); } #[test] fn pipeline_state_no_agent_for_completed_story() { let tmp = tempfile::tempdir().unwrap(); let root = tmp.path().to_path_buf(); let current = root.join(".huskies/work/2_current"); fs::create_dir_all(¤t).unwrap(); fs::write( current.join("11_story_done.md"), "---\nname: Done Story\n---\n# Story\n", ) .unwrap(); let ctx = crate::http::context::AppContext::new_test(root); ctx.agents.inject_test_agent("11_story_done", "coder-1", crate::agents::AgentStatus::Completed); let state = load_pipeline_state(&ctx).unwrap(); assert_eq!(state.current.len(), 1); assert!( state.current[0].agent.is_none(), "completed agent should not appear on work item" ); } #[test] fn pipeline_state_pending_agent_included() { let tmp = tempfile::tempdir().unwrap(); let root = tmp.path().to_path_buf(); let current = root.join(".huskies/work/2_current"); fs::create_dir_all(¤t).unwrap(); fs::write( current.join("12_story_pending.md"), "---\nname: Pending Story\n---\n# Story\n", ) .unwrap(); let ctx = crate::http::context::AppContext::new_test(root); ctx.agents.inject_test_agent("12_story_pending", "coder-1", crate::agents::AgentStatus::Pending); let state = load_pipeline_state(&ctx).unwrap(); assert_eq!(state.current.len(), 1); let item = &state.current[0]; assert!(item.agent.is_some(), "pending agent should appear on work item"); assert_eq!(item.agent.as_ref().unwrap().status, "pending"); } #[test] fn pipeline_state_includes_depends_on() { let tmp = tempfile::tempdir().unwrap(); let backlog = tmp.path().join(".huskies/work/1_backlog"); fs::create_dir_all(&backlog).unwrap(); fs::write( backlog.join("20_story_dependent.md"), "---\nname: Dependent Story\ndepends_on: [10, 11]\n---\n", ) .unwrap(); fs::write( backlog.join("21_story_independent.md"), "---\nname: Independent Story\n---\n", ) .unwrap(); let ctx = crate::http::context::AppContext::new_test(tmp.path().to_path_buf()); let state = load_pipeline_state(&ctx).unwrap(); let dependent = state.backlog.iter().find(|s| s.story_id == "20_story_dependent").unwrap(); assert_eq!(dependent.depends_on, Some(vec![10, 11])); let independent = state.backlog.iter().find(|s| s.story_id == "21_story_independent").unwrap(); assert_eq!(independent.depends_on, None); } #[test] fn load_upcoming_parses_metadata() { let tmp = tempfile::tempdir().unwrap(); let backlog = tmp.path().join(".huskies/work/1_backlog"); fs::create_dir_all(&backlog).unwrap(); fs::write( backlog.join("31_story_view_upcoming.md"), "---\nname: View Upcoming\n---\n# Story\n", ) .unwrap(); fs::write( backlog.join("32_story_worktree.md"), "---\nname: Worktree Orchestration\n---\n# Story\n", ) .unwrap(); let ctx = crate::http::context::AppContext::new_test(tmp.path().to_path_buf()); let stories = load_upcoming_stories(&ctx).unwrap(); assert_eq!(stories.len(), 2); assert_eq!(stories[0].story_id, "31_story_view_upcoming"); assert_eq!(stories[0].name.as_deref(), Some("View Upcoming")); assert_eq!(stories[1].story_id, "32_story_worktree"); assert_eq!(stories[1].name.as_deref(), Some("Worktree Orchestration")); } #[test] fn load_upcoming_skips_non_md_files() { let tmp = tempfile::tempdir().unwrap(); let backlog = tmp.path().join(".huskies/work/1_backlog"); fs::create_dir_all(&backlog).unwrap(); fs::write(backlog.join(".gitkeep"), "").unwrap(); fs::write( backlog.join("31_story_example.md"), "---\nname: A Story\n---\n", ) .unwrap(); let ctx = crate::http::context::AppContext::new_test(tmp.path().to_path_buf()); let stories = load_upcoming_stories(&ctx).unwrap(); assert_eq!(stories.len(), 1); assert_eq!(stories[0].story_id, "31_story_example"); } #[test] fn validate_story_dirs_valid_files() { let tmp = tempfile::tempdir().unwrap(); let current = tmp.path().join(".huskies/work/2_current"); let backlog = tmp.path().join(".huskies/work/1_backlog"); fs::create_dir_all(¤t).unwrap(); fs::create_dir_all(&backlog).unwrap(); fs::write( current.join("28_story_todos.md"), "---\nname: Show TODOs\n---\n# Story\n", ) .unwrap(); fs::write( backlog.join("36_story_front_matter.md"), "---\nname: Enforce Front Matter\n---\n# Story\n", ) .unwrap(); let results = validate_story_dirs(tmp.path()).unwrap(); assert_eq!(results.len(), 2); assert!(results.iter().all(|r| r.valid)); assert!(results.iter().all(|r| r.error.is_none())); } #[test] fn validate_story_dirs_missing_front_matter() { let tmp = tempfile::tempdir().unwrap(); let current = tmp.path().join(".huskies/work/2_current"); fs::create_dir_all(¤t).unwrap(); fs::write(current.join("28_story_todos.md"), "# No front matter\n").unwrap(); let results = validate_story_dirs(tmp.path()).unwrap(); assert_eq!(results.len(), 1); assert!(!results[0].valid); assert_eq!(results[0].error.as_deref(), Some("Missing front matter")); } #[test] fn validate_story_dirs_missing_required_fields() { let tmp = tempfile::tempdir().unwrap(); let current = tmp.path().join(".huskies/work/2_current"); fs::create_dir_all(¤t).unwrap(); fs::write(current.join("28_story_todos.md"), "---\n---\n# Story\n").unwrap(); let results = validate_story_dirs(tmp.path()).unwrap(); assert_eq!(results.len(), 1); assert!(!results[0].valid); let err = results[0].error.as_deref().unwrap(); assert!(err.contains("Missing 'name' field")); } #[test] fn validate_story_dirs_empty_when_no_dirs() { let tmp = tempfile::tempdir().unwrap(); let results = validate_story_dirs(tmp.path()).unwrap(); assert!(results.is_empty()); } // --- slugify_name tests --- #[test] fn slugify_simple_name() { assert_eq!( slugify_name("Enforce Front Matter on All Story Files"), "enforce_front_matter_on_all_story_files" ); } #[test] fn slugify_with_special_chars() { assert_eq!(slugify_name("Hello, World! (v2)"), "hello_world_v2"); } #[test] fn slugify_leading_trailing_underscores() { assert_eq!(slugify_name(" spaces "), "spaces"); } #[test] fn slugify_consecutive_separators() { assert_eq!(slugify_name("a--b__c d"), "a_b_c_d"); } #[test] fn slugify_empty_after_strip() { assert_eq!(slugify_name("!!!"), ""); } #[test] fn slugify_already_snake_case() { assert_eq!(slugify_name("my_story_name"), "my_story_name"); } // --- next_item_number tests --- #[test] fn next_item_number_empty_dirs() { let tmp = tempfile::tempdir().unwrap(); let base = tmp.path().join(".huskies/work/1_backlog"); fs::create_dir_all(&base).unwrap(); assert_eq!(next_item_number(tmp.path()).unwrap(), 1); } #[test] fn next_item_number_scans_all_dirs() { let tmp = tempfile::tempdir().unwrap(); let backlog = tmp.path().join(".huskies/work/1_backlog"); let current = tmp.path().join(".huskies/work/2_current"); let archived = tmp.path().join(".huskies/work/5_done"); fs::create_dir_all(&backlog).unwrap(); fs::create_dir_all(¤t).unwrap(); fs::create_dir_all(&archived).unwrap(); fs::write(backlog.join("10_story_foo.md"), "").unwrap(); fs::write(current.join("20_story_bar.md"), "").unwrap(); fs::write(archived.join("15_story_baz.md"), "").unwrap(); assert_eq!(next_item_number(tmp.path()).unwrap(), 21); } #[test] fn next_item_number_no_work_dirs() { let tmp = tempfile::tempdir().unwrap(); // No .huskies at all assert_eq!(next_item_number(tmp.path()).unwrap(), 1); } // --- find_story_file tests --- #[test] fn find_story_file_searches_current_then_backlog() { let tmp = tempfile::tempdir().unwrap(); let current = tmp.path().join(".huskies/work/2_current"); let backlog = tmp.path().join(".huskies/work/1_backlog"); fs::create_dir_all(¤t).unwrap(); fs::create_dir_all(&backlog).unwrap(); // Only in backlog fs::write(backlog.join("6_test.md"), "").unwrap(); let found = find_story_file(tmp.path(), "6_test").unwrap(); assert!(found.ends_with("1_backlog/6_test.md") || found.ends_with("1_backlog\\6_test.md")); // Also in current — current should win fs::write(current.join("6_test.md"), "").unwrap(); let found = find_story_file(tmp.path(), "6_test").unwrap(); assert!(found.ends_with("2_current/6_test.md") || found.ends_with("2_current\\6_test.md")); } #[test] fn find_story_file_returns_error_when_not_found() { let tmp = tempfile::tempdir().unwrap(); let result = find_story_file(tmp.path(), "99_missing"); assert!(result.is_err()); assert!(result.unwrap_err().contains("not found")); } // --- replace_or_append_section tests --- #[test] fn replace_or_append_section_appends_when_absent() { let contents = "---\nname: T\n---\n# Story\n"; let new = replace_or_append_section(contents, "## Test Results", "## Test Results\n\nfoo\n"); assert!(new.contains("## Test Results")); assert!(new.contains("foo")); assert!(new.contains("# Story")); } #[test] fn replace_or_append_section_replaces_existing() { let contents = "# Story\n\n## Test Results\n\nold content\n\n## Other\n\nother content\n"; let new = replace_or_append_section(contents, "## Test Results", "## Test Results\n\nnew content\n"); assert!(new.contains("new content")); assert!(!new.contains("old content")); assert!(new.contains("## Other")); } }