//! Acceptance-criteria MCP tools (todos, record_tests, ensure_acceptance, check/edit/add/remove). #![allow(unused_imports, dead_code)] #[allow(unused_imports)] use crate::agents::{ close_bug_to_archive, feature_branch_has_unmerged_changes, move_story_to_done, }; use crate::db::yaml_legacy::parse_front_matter; use crate::http::context::AppContext; use crate::http::workflow::{ add_criterion_to_file, check_criterion_in_file, create_bug_file, create_refactor_file, create_spike_file, create_story_file, edit_criterion_in_file, list_bug_files, list_refactor_files, load_pipeline_state, load_upcoming_stories, remove_criterion_from_file, update_story_in_file, validate_story_dirs, }; use crate::io::story_metadata::{ check_archived_deps, check_archived_deps_from_list, parse_unchecked_todos, }; use crate::service::story::parse_test_cases; use crate::slog_warn; #[allow(unused_imports)] use crate::workflow::{ TestCaseResult, TestStatus, WorkflowState, evaluate_acceptance_with_coverage, }; use serde_json::{Value, json}; use std::collections::HashMap; use std::fs; use std::path::Path; use std::process::Command; pub(crate) fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let root = ctx.state.get_project_root()?; // Read from DB content store, falling back to filesystem. let contents = crate::http::workflow::read_story_content(&root, story_id) .map_err(|_| format!("Story file not found: {story_id}.md"))?; let story_name = parse_front_matter(&contents).ok().and_then(|m| m.name); let todos = parse_unchecked_todos(&contents); serde_json::to_string_pretty(&json!({ "story_id": story_id, "story_name": story_name, "todos": todos, })) .map_err(|e| format!("Serialization error: {e}")) } pub(crate) fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let unit = parse_test_cases(args.get("unit"))?; let integration = parse_test_cases(args.get("integration"))?; let mut workflow = ctx .workflow .lock() .map_err(|e| format!("Lock error: {e}"))?; workflow.record_test_results_validated(story_id.to_string(), unit, integration)?; // Persist to story file (best-effort — file write errors are warnings, not failures). if let Ok(project_root) = ctx.state.get_project_root() && let Some(results) = workflow.results.get(story_id) && let Err(e) = crate::http::workflow::write_test_results_to_story_file( &project_root, story_id, results, ) { slog_warn!("[record_tests] Could not persist results to story file: {e}"); } Ok("Test results recorded.".to_string()) } pub(crate) fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let workflow = ctx .workflow .lock() .map_err(|e| format!("Lock error: {e}"))?; // Use in-memory results if present; otherwise fall back to file-persisted results. let file_results; let results = if let Some(r) = workflow.results.get(story_id) { r } else { let project_root = ctx.state.get_project_root().ok(); file_results = project_root.as_deref().and_then(|root| { crate::http::workflow::read_test_results_from_story_file(root, story_id) }); file_results.as_ref().map_or_else( || { // No results anywhere — use empty default for the acceptance check // (it will fail with "No test results recorded") static EMPTY: std::sync::OnceLock = std::sync::OnceLock::new(); EMPTY.get_or_init(Default::default) }, |r| r, ) }; let coverage = workflow.coverage.get(story_id); let decision = evaluate_acceptance_with_coverage(results, coverage); if decision.can_accept { Ok("Story can be accepted. All gates pass.".to_string()) } else { let mut parts = decision.reasons; if let Some(w) = decision.warning { parts.push(w); } Err(format!("Acceptance blocked: {}", parts.join("; "))) } } pub(crate) fn tool_check_criterion(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let criterion_index = args .get("criterion_index") .and_then(|v| v.as_u64()) .ok_or("Missing required argument: criterion_index")? as usize; let root = ctx.state.get_project_root()?; // Hard gate: reject if no corroborating evidence exists for this criterion. if let Ok(contents) = crate::http::workflow::read_story_content(&root, story_id) { let ac_text = find_unchecked_criterion_text(&contents, criterion_index).unwrap_or_default(); let workflow = ctx .workflow .lock() .map_err(|e| format!("Lock error: {e}"))?; validate_criterion_check(&root, story_id, &ac_text, &workflow)?; } check_criterion_in_file(&root, story_id, criterion_index)?; Ok(format!( "Criterion {criterion_index} checked for story '{story_id}'. Committed to master." )) } /// Extract the text of the Nth unchecked criterion (`- [ ]`) from story content. fn find_unchecked_criterion_text(contents: &str, criterion_index: usize) -> Option { let mut count = 0usize; for line in contents.lines() { let trimmed = line.trim(); if let Some(rest) = trimmed.strip_prefix("- [ ] ") { if count == criterion_index { return Some(rest.to_string()); } count += 1; } } None } /// Validate that there is corroborating evidence before marking a criterion done. /// /// Checks three signals (OR-joined): /// - A: feature branch has at least one commit since master /// - B: AC text mentions a file path that the branch's commits touched /// - C: a recorded test name fuzzy-matches the AC text /// /// Returns `Ok(())` when at least one signal passes, or `Err(message)` describing /// what evidence is missing so the agent knows what to do next. fn validate_criterion_check( project_root: &Path, story_id: &str, ac_text: &str, workflow: &WorkflowState, ) -> Result<(), String> { let branch = format!("feature/story-{story_id}"); // ── A: branch has commits vs master ────────────────────────────────────── let commits = Command::new("git") .args(["log", &format!("master..{branch}"), "--oneline"]) .current_dir(project_root) .output() .ok() .filter(|o| o.status.success()) .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string()) .unwrap_or_default(); if !commits.is_empty() { return Ok(()); } // A fails. Check B and C as fallback evidence. // ── B: AC text mentions a file touched by the branch ───────────────────── let changed_files: Vec = Command::new("git") .args(["diff", &format!("master...{branch}"), "--name-only"]) .current_dir(project_root) .output() .ok() .filter(|o| o.status.success()) .map(|o| { String::from_utf8_lossy(&o.stdout) .lines() .map(|l| l.to_string()) .collect() }) .unwrap_or_default(); let ac_lower = ac_text.to_lowercase(); let file_mentioned = changed_files.iter().any(|f| { let fname = Path::new(f) .file_name() .map(|n| n.to_string_lossy().to_lowercase()) .unwrap_or_default(); ac_lower.contains(f) || (!fname.is_empty() && ac_lower.contains(fname.as_str())) }); if file_mentioned { return Ok(()); } // ── C: a recorded test name fuzzy-matches the AC text ──────────────────── let ac_words: Vec<&str> = ac_lower .split(|c: char| !c.is_alphanumeric() && c != '_') .filter(|w| w.len() >= 3) .collect(); let test_match = workflow.results.get(story_id).is_some_and(|results| { let names: Vec = results .unit .iter() .chain(results.integration.iter()) .map(|t| t.name.to_lowercase()) .collect(); ac_words .iter() .any(|word| names.iter().any(|n| n.contains(word))) }); if test_match { return Ok(()); } Err(format!( "No corroborating evidence for criterion '{ac_text}'. \ To proceed: commit your work to '{branch}' (currently has no commits vs master), \ add a passing test whose name matches the criterion, \ or change a file mentioned in the criterion text." )) } pub(crate) fn tool_edit_criterion(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let criterion_index = args .get("criterion_index") .and_then(|v| v.as_u64()) .ok_or("Missing required argument: criterion_index")? as usize; let new_text = args .get("new_text") .and_then(|v| v.as_str()) .ok_or("Missing required argument: new_text")?; let root = ctx.state.get_project_root()?; edit_criterion_in_file(&root, story_id, criterion_index, new_text)?; Ok(format!( "Criterion {criterion_index} updated for story '{story_id}'." )) } pub(crate) fn tool_add_criterion(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let criterion = args .get("criterion") .and_then(|v| v.as_str()) .ok_or("Missing required argument: criterion")?; let root = ctx.state.get_project_root()?; add_criterion_to_file(&root, story_id, criterion)?; Ok(format!( "Added criterion to story '{story_id}': - [ ] {criterion}" )) } pub(crate) fn tool_remove_criterion(args: &Value, ctx: &AppContext) -> Result { let story_id = args .get("story_id") .and_then(|v| v.as_str()) .ok_or("Missing required argument: story_id")?; let criterion_index = args .get("criterion_index") .and_then(|v| v.as_u64()) .ok_or("Missing required argument: criterion_index")? as usize; let root = ctx.state.get_project_root()?; remove_criterion_from_file(&root, story_id, criterion_index)?; Ok(format!( "Removed criterion {criterion_index} from story '{story_id}'." )) } #[cfg(test)] mod tests { use super::*; use crate::http::test_helpers::test_ctx; fn setup_git_repo_in(dir: &std::path::Path) { std::process::Command::new("git") .args(["init"]) .current_dir(dir) .output() .unwrap(); std::process::Command::new("git") .args(["config", "user.email", "test@test.com"]) .current_dir(dir) .output() .unwrap(); std::process::Command::new("git") .args(["config", "user.name", "Test"]) .current_dir(dir) .output() .unwrap(); std::process::Command::new("git") .args(["commit", "--allow-empty", "-m", "init"]) .current_dir(dir) .output() .unwrap(); } #[test] fn parse_test_cases_empty() { let result = parse_test_cases(None).unwrap(); assert!(result.is_empty()); } #[test] fn parse_test_cases_valid() { let input = json!([ {"name": "test1", "status": "pass"}, {"name": "test2", "status": "fail", "details": "assertion failed"} ]); let result = parse_test_cases(Some(&input)).unwrap(); assert_eq!(result.len(), 2); assert_eq!(result[0].status, TestStatus::Pass); assert_eq!(result[1].status, TestStatus::Fail); assert_eq!(result[1].details, Some("assertion failed".to_string())); } #[test] fn parse_test_cases_invalid_status() { let input = json!([{"name": "t", "status": "maybe"}]); assert!(parse_test_cases(Some(&input)).is_err()); } #[test] fn parse_test_cases_null_value_returns_empty() { let null_val = json!(null); let result = parse_test_cases(Some(&null_val)).unwrap(); assert!(result.is_empty()); } #[test] fn parse_test_cases_non_array_returns_error() { let obj = json!({"invalid": "input"}); let result = parse_test_cases(Some(&obj)); assert!(result.is_err()); assert!(result.unwrap_err().contains("Expected array")); } #[test] fn parse_test_cases_missing_name_returns_error() { let input = json!([{"status": "pass"}]); let result = parse_test_cases(Some(&input)); assert!(result.is_err()); assert!(result.unwrap_err().contains("name")); } #[test] fn parse_test_cases_missing_status_returns_error() { let input = json!([{"name": "test1"}]); let result = parse_test_cases(Some(&input)); assert!(result.is_err()); assert!(result.unwrap_err().contains("status")); } #[test] fn tool_get_story_todos_missing_file() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_get_story_todos(&json!({"story_id": "99_nonexistent"}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("not found")); } #[test] fn tool_get_story_todos_returns_unchecked() { let tmp = tempfile::tempdir().unwrap(); crate::db::ensure_content_store(); crate::db::write_item_with_content( "9901_test", "2_current", "---\nname: Test\n---\n## AC\n- [ ] First\n- [x] Done\n- [ ] Second\n", crate::db::ItemMeta::from_yaml( "---\nname: Test\n---\n## AC\n- [ ] First\n- [x] Done\n- [ ] Second\n", ), ); let ctx = test_ctx(tmp.path()); let result = tool_get_story_todos(&json!({"story_id": "9901_test"}), &ctx).unwrap(); let parsed: Value = serde_json::from_str(&result).unwrap(); assert_eq!(parsed["todos"].as_array().unwrap().len(), 2); assert_eq!(parsed["story_name"], "Test"); } #[test] fn tool_record_tests_and_ensure_acceptance() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); // Record passing tests let result = tool_record_tests( &json!({ "story_id": "1_test", "unit": [{"name": "u1", "status": "pass"}], "integration": [{"name": "i1", "status": "pass"}] }), &ctx, ) .unwrap(); assert!(result.contains("recorded")); // Should be acceptable let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx).unwrap(); assert!(result.contains("All gates pass")); } #[test] fn tool_ensure_acceptance_blocks_on_failures() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); tool_record_tests( &json!({ "story_id": "1_test", "unit": [{"name": "u1", "status": "fail"}], "integration": [] }), &ctx, ) .unwrap(); let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("blocked")); } #[test] fn tool_record_tests_missing_story_id() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_record_tests(&json!({"unit": [], "integration": []}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("story_id")); } #[test] fn tool_record_tests_invalid_unit_type_returns_error() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_record_tests( &json!({ "story_id": "1_test", "unit": "not_an_array", "integration": [] }), &ctx, ); assert!(result.is_err()); } #[test] fn tool_ensure_acceptance_missing_story_id() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_ensure_acceptance(&json!({}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("story_id")); } #[test] fn record_tests_persists_to_story_file() { let tmp = tempfile::tempdir().unwrap(); crate::db::ensure_content_store(); crate::db::write_item_with_content( "9906_story_persist", "2_current", "---\nname: Persist\n---\n# Story\n", crate::db::ItemMeta::from_yaml("---\nname: Persist\n---\n# Story\n"), ); let ctx = test_ctx(tmp.path()); tool_record_tests( &json!({ "story_id": "9906_story_persist", "unit": [{"name": "u1", "status": "pass"}], "integration": [] }), &ctx, ) .unwrap(); let contents = crate::db::read_content("9906_story_persist") .expect("story content should exist in CRDT"); assert!( contents.contains("## Test Results"), "content should have Test Results section" ); assert!( contents.contains("huskies-test-results:"), "content should have JSON marker" ); assert!(contents.contains("u1"), "content should contain test name"); } #[test] fn ensure_acceptance_reads_from_file_when_not_in_memory() { let tmp = tempfile::tempdir().unwrap(); // Write story content to CRDT with a pre-populated Test Results section let story_content = "---\nname: Persist\n---\n# Story\n\n## Test Results\n\n\n"; crate::db::ensure_content_store(); crate::db::write_item_with_content( "9905_story_file_only", "2_current", story_content, crate::db::ItemMeta::from_yaml(story_content), ); let ctx = test_ctx(tmp.path()); // ensure_acceptance should read from content store and succeed let result = tool_ensure_acceptance(&json!({"story_id": "9905_story_file_only"}), &ctx); assert!( result.is_ok(), "should accept based on content store data, got: {:?}", result ); assert!(result.unwrap().contains("All gates pass")); } #[test] fn ensure_acceptance_file_with_failures_still_blocks() { let tmp = tempfile::tempdir().unwrap(); let current = tmp.path().join(".huskies/work/2_current"); fs::create_dir_all(¤t).unwrap(); let story_content = "---\nname: Fail\n---\n# Story\n\n## Test Results\n\n\n"; fs::write(current.join("3_story_fail.md"), story_content).unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_ensure_acceptance(&json!({"story_id": "3_story_fail"}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("blocked")); } #[test] fn tool_check_criterion_empty_branch_returns_error() { let tmp = tempfile::tempdir().unwrap(); // Init a git repo with an initial commit on master. std::process::Command::new("git") .args(["init"]) .current_dir(tmp.path()) .output() .unwrap(); std::process::Command::new("git") .args(["config", "user.email", "test@test.com"]) .current_dir(tmp.path()) .output() .unwrap(); std::process::Command::new("git") .args(["config", "user.name", "Test"]) .current_dir(tmp.path()) .output() .unwrap(); std::process::Command::new("git") .args(["commit", "--allow-empty", "-m", "init"]) .current_dir(tmp.path()) .output() .unwrap(); // Create an empty feature branch (no commits vs master). std::process::Command::new("git") .args(["checkout", "-b", "feature/story-9997_empty_branch"]) .current_dir(tmp.path()) .output() .unwrap(); std::process::Command::new("git") .args(["checkout", "master"]) .current_dir(tmp.path()) .output() .unwrap(); crate::db::ensure_content_store(); crate::db::write_item_with_content( "9997_empty_branch", "2_current", "---\nname: Empty Branch Test\n---\n## AC\n- [ ] Implement the feature\n", crate::db::ItemMeta::from_yaml( "---\nname: Empty Branch Test\n---\n## AC\n- [ ] Implement the feature\n", ), ); let ctx = test_ctx(tmp.path()); let result = tool_check_criterion( &json!({"story_id": "9997_empty_branch", "criterion_index": 0}), &ctx, ); // No evidence — must error and NOT mark the criterion. assert!( result.is_err(), "Expected error when branch has no commits: {result:?}" ); let err = result.unwrap_err(); assert!( err.contains("No corroborating evidence"), "Error should describe missing evidence, got: {err}" ); assert!( err.contains("feature/story-9997_empty_branch"), "Error should name the branch, got: {err}" ); // Criterion must still be unchecked in the CRDT. let contents = crate::db::read_content("9997_empty_branch") .expect("story content should still be in CRDT"); assert!( contents.contains("- [ ] Implement the feature"), "Criterion should remain unchecked after rejected check_criterion" ); } #[test] fn tool_check_criterion_missing_story_id() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_check_criterion(&json!({"criterion_index": 0}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("story_id")); } #[test] fn tool_check_criterion_missing_criterion_index() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_check_criterion(&json!({"story_id": "1_test"}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("criterion_index")); } #[test] fn tool_check_criterion_marks_unchecked_item() { let tmp = tempfile::tempdir().unwrap(); setup_git_repo_in(tmp.path()); crate::db::ensure_content_store(); crate::db::write_item_with_content( "9904_test", "2_current", "---\nname: Test\n---\n## AC\n- [ ] First criterion\n- [x] Already done\n", crate::db::ItemMeta::from_yaml( "---\nname: Test\n---\n## AC\n- [ ] First criterion\n- [x] Already done\n", ), ); let ctx = test_ctx(tmp.path()); // Provide signal-C evidence: a test whose name matches "first" from the criterion text. tool_record_tests( &json!({ "story_id": "9904_test", "unit": [{"name": "first_criterion_check", "status": "pass"}], "integration": [] }), &ctx, ) .unwrap(); let result = tool_check_criterion( &json!({"story_id": "9904_test", "criterion_index": 0}), &ctx, ); assert!(result.is_ok(), "Expected ok: {result:?}"); assert!(result.unwrap().contains("Criterion 0 checked")); } #[test] fn tool_remove_criterion_missing_story_id() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_remove_criterion(&json!({"criterion_index": 0}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("story_id")); } #[test] fn tool_remove_criterion_missing_criterion_index() { let tmp = tempfile::tempdir().unwrap(); let ctx = test_ctx(tmp.path()); let result = tool_remove_criterion(&json!({"story_id": "1_test"}), &ctx); assert!(result.is_err()); assert!(result.unwrap_err().contains("criterion_index")); } #[test] fn tool_remove_criterion_removes_item() { let tmp = tempfile::tempdir().unwrap(); setup_git_repo_in(tmp.path()); crate::db::ensure_content_store(); crate::db::write_item_with_content( "9905_test", "2_current", "---\nname: Test\n---\n## Acceptance Criteria\n- [ ] Keep me\n- [ ] Remove me\n", crate::db::ItemMeta::from_yaml( "---\nname: Test\n---\n## Acceptance Criteria\n- [ ] Keep me\n- [ ] Remove me\n", ), ); let ctx = test_ctx(tmp.path()); let result = tool_remove_criterion( &json!({"story_id": "9905_test", "criterion_index": 1}), &ctx, ); assert!(result.is_ok(), "Expected ok: {result:?}"); assert!(result.unwrap().contains("Removed criterion 1")); } #[test] fn tool_remove_criterion_out_of_range() { let tmp = tempfile::tempdir().unwrap(); setup_git_repo_in(tmp.path()); crate::db::ensure_content_store(); crate::db::write_item_with_content( "9906_test", "2_current", "---\nname: Test\n---\n## Acceptance Criteria\n- [ ] Only one\n", crate::db::ItemMeta::from_yaml( "---\nname: Test\n---\n## Acceptance Criteria\n- [ ] Only one\n", ), ); let ctx = test_ctx(tmp.path()); let result = tool_remove_criterion( &json!({"story_id": "9906_test", "criterion_index": 5}), &ctx, ); assert!(result.is_err()); assert!(result.unwrap_err().contains("out of range")); } }