story-kit: merge 171_story_persist_test_results_to_story_files

This commit is contained in:
Dave
2026-02-25 09:50:31 +00:00
parent 6f34d9ae56
commit 073ec03afe
5 changed files with 579 additions and 23 deletions

View File

@@ -19,3 +19,16 @@ As a project owner, I want test results written to the story markdown file when
## Out of Scope
- TBD
## Test Results
<!-- story-kit-test-results: {"unit":[{"name":"test_write_persists","status":"pass","details":null}],"integration":[{"name":"test_roundtrip","status":"pass","details":null}]} -->
### Unit Tests (1 passed, 0 failed)
- ✅ test_write_persists
### Integration Tests (1 passed, 0 failed)
- ✅ test_roundtrip

View File

@@ -966,6 +966,15 @@ fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
workflow.record_test_results_validated(story_id.to_string(), unit, integration)?;
// Persist to story file (best-effort — file write errors are warnings, not failures).
if let Ok(project_root) = ctx.state.get_project_root()
&& let Some(results) = workflow.results.get(story_id)
&& let Err(e) =
crate::http::workflow::write_test_results_to_story_file(&project_root, story_id, results)
{
slog_warn!("[record_tests] Could not persist results to story file: {e}");
}
Ok("Test results recorded.".to_string())
}
@@ -980,8 +989,27 @@ fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result<String, Stri
.lock()
.map_err(|e| format!("Lock error: {e}"))?;
let empty_results = Default::default();
let results = workflow.results.get(story_id).unwrap_or(&empty_results);
// Use in-memory results if present; otherwise fall back to file-persisted results.
let file_results;
let results = if let Some(r) = workflow.results.get(story_id) {
r
} else {
let project_root = ctx.state.get_project_root().ok();
file_results = project_root.as_deref().and_then(|root| {
crate::http::workflow::read_test_results_from_story_file(root, story_id)
});
file_results.as_ref().map_or_else(
|| {
// No results anywhere — use empty default for the acceptance check
// (it will fail with "No test results recorded")
static EMPTY: std::sync::OnceLock<crate::workflow::StoryTestResults> =
std::sync::OnceLock::new();
EMPTY.get_or_init(Default::default)
},
|r| r,
)
};
let coverage = workflow.coverage.get(story_id);
let decision = evaluate_acceptance_with_coverage(results, coverage);
@@ -1011,6 +1039,17 @@ async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<String, Stri
.start_agent(&project_root, story_id, agent_name, None)
.await?;
// Snapshot coverage baseline from the most recent coverage report (best-effort).
if let Some(pct) = read_coverage_percent_from_json(&project_root)
&& let Err(e) = crate::http::workflow::write_coverage_baseline_to_story_file(
&project_root,
story_id,
pct,
)
{
slog_warn!("[start_agent] Could not write coverage baseline to story file: {e}");
}
serde_json::to_string_pretty(&json!({
"story_id": info.story_id,
"agent_name": info.agent_name,
@@ -1021,6 +1060,22 @@ async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<String, Stri
.map_err(|e| format!("Serialization error: {e}"))
}
/// Try to read the overall line coverage percentage from the llvm-cov JSON report.
///
/// Expects the file at `{project_root}/.story_kit/coverage/server.json`.
/// Returns `None` if the file is absent, unreadable, or cannot be parsed.
fn read_coverage_percent_from_json(project_root: &std::path::Path) -> Option<f64> {
let path = project_root
.join(".story_kit")
.join("coverage")
.join("server.json");
let contents = std::fs::read_to_string(&path).ok()?;
let json: Value = serde_json::from_str(&contents).ok()?;
// cargo llvm-cov --json format: data[0].totals.lines.percent
json.pointer("/data/0/totals/lines/percent")
.and_then(|v| v.as_f64())
}
async fn tool_stop_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
@@ -3170,4 +3225,83 @@ stage = "coder"
assert_eq!(parsed["behavior"], "deny", "denied must return behavior:deny");
assert!(parsed["message"].is_string(), "deny must include a message");
}
// ── record_tests / ensure_acceptance persistence tests ───────
#[test]
fn record_tests_persists_to_story_file() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("1_story_persist.md"), "---\nname: Persist\n---\n# Story\n").unwrap();
let ctx = test_ctx(tmp.path());
tool_record_tests(
&json!({
"story_id": "1_story_persist",
"unit": [{"name": "u1", "status": "pass"}],
"integration": []
}),
&ctx,
)
.unwrap();
let contents = fs::read_to_string(current.join("1_story_persist.md")).unwrap();
assert!(contents.contains("## Test Results"), "file should have Test Results section");
assert!(contents.contains("story-kit-test-results:"), "file should have JSON marker");
assert!(contents.contains("u1"), "file should contain test name");
}
#[test]
fn ensure_acceptance_reads_from_file_when_not_in_memory() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
// Write a story file with a pre-populated Test Results section (simulating a restart)
let story_content = "---\nname: Persist\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"pass\",\"details\":null}],\"integration\":[{\"name\":\"i1\",\"status\":\"pass\",\"details\":null}]} -->\n";
fs::write(current.join("2_story_file_only.md"), story_content).unwrap();
// Use a fresh context (empty in-memory state, simulating a restart)
let ctx = test_ctx(tmp.path());
// ensure_acceptance should read from file and succeed
let result = tool_ensure_acceptance(&json!({"story_id": "2_story_file_only"}), &ctx);
assert!(result.is_ok(), "should accept based on file data, got: {:?}", result);
assert!(result.unwrap().contains("All gates pass"));
}
#[test]
fn ensure_acceptance_file_with_failures_still_blocks() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_content = "---\nname: Fail\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"fail\",\"details\":\"error\"}],\"integration\":[]} -->\n";
fs::write(current.join("3_story_fail.md"), story_content).unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_ensure_acceptance(&json!({"story_id": "3_story_fail"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("blocked"));
}
#[test]
fn read_coverage_percent_from_json_parses_llvm_cov_format() {
let tmp = tempfile::tempdir().unwrap();
let cov_dir = tmp.path().join(".story_kit/coverage");
fs::create_dir_all(&cov_dir).unwrap();
let json_content = r#"{"data":[{"totals":{"lines":{"count":100,"covered":78,"percent":78.0}}}]}"#;
fs::write(cov_dir.join("server.json"), json_content).unwrap();
let pct = read_coverage_percent_from_json(tmp.path());
assert_eq!(pct, Some(78.0));
}
#[test]
fn read_coverage_percent_from_json_returns_none_when_absent() {
let tmp = tempfile::tempdir().unwrap();
let pct = read_coverage_percent_from_json(tmp.path());
assert!(pct.is_none());
}
}

View File

@@ -1,6 +1,7 @@
use crate::agents::AgentStatus;
use crate::http::context::AppContext;
use crate::io::story_metadata::parse_front_matter;
use crate::io::story_metadata::{parse_front_matter, write_coverage_baseline};
use crate::workflow::{StoryTestResults, TestCaseResult, TestStatus};
use serde::Serialize;
use std::collections::HashMap;
use std::fs;
@@ -400,22 +401,20 @@ pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
Ok(bugs)
}
/// Locate a work item file by searching work/2_current/ then work/1_upcoming/.
/// Locate a work item file by searching all active pipeline stages.
///
/// Searches in priority order: 2_current, 1_upcoming, 3_qa, 4_merge, 5_done, 6_archived.
fn find_story_file(project_root: &Path, story_id: &str) -> Result<PathBuf, String> {
let filename = format!("{story_id}.md");
let sk = project_root.join(".story_kit").join("work");
// Check 2_current/ first
let current_path = sk.join("2_current").join(&filename);
if current_path.exists() {
return Ok(current_path);
}
// Fall back to 1_upcoming/
let upcoming_path = sk.join("1_upcoming").join(&filename);
if upcoming_path.exists() {
return Ok(upcoming_path);
for stage in &["2_current", "1_upcoming", "3_qa", "4_merge", "5_done", "6_archived"] {
let path = sk.join(stage).join(&filename);
if path.exists() {
return Ok(path);
}
}
Err(format!(
"Story '{story_id}' not found in work/2_current/ or work/1_upcoming/."
"Story '{story_id}' not found in any pipeline stage."
))
}
@@ -531,6 +530,172 @@ fn next_item_number(root: &std::path::Path) -> Result<u32, String> {
Ok(max_num + 1)
}
// ── Test result file persistence ──────────────────────────────────
const TEST_RESULTS_MARKER: &str = "<!-- story-kit-test-results:";
/// Write (or overwrite) the `## Test Results` section in a story file.
///
/// The section contains an HTML comment with JSON for machine parsing and a
/// human-readable summary below it. If the section already exists it is
/// replaced in-place. If the story file is not found, this is a no-op.
pub fn write_test_results_to_story_file(
project_root: &Path,
story_id: &str,
results: &StoryTestResults,
) -> Result<(), String> {
let path = find_story_file(project_root, story_id)?;
let contents =
fs::read_to_string(&path).map_err(|e| format!("Failed to read story file: {e}"))?;
let json = serde_json::to_string(results)
.map_err(|e| format!("Failed to serialize test results: {e}"))?;
let section = build_test_results_section(&json, results);
let new_contents = replace_or_append_section(&contents, "## Test Results", &section);
fs::write(&path, &new_contents)
.map_err(|e| format!("Failed to write story file: {e}"))?;
Ok(())
}
/// Read test results from the `## Test Results` section of a story file.
///
/// Returns `None` if the file is not found or contains no test results section.
pub fn read_test_results_from_story_file(
project_root: &Path,
story_id: &str,
) -> Option<StoryTestResults> {
let path = find_story_file(project_root, story_id).ok()?;
let contents = fs::read_to_string(&path).ok()?;
parse_test_results_from_contents(&contents)
}
/// Write coverage baseline to the front matter of a story file.
///
/// If the story file is not found, this is a no-op (returns Ok).
pub fn write_coverage_baseline_to_story_file(
project_root: &Path,
story_id: &str,
coverage_pct: f64,
) -> Result<(), String> {
let path = match find_story_file(project_root, story_id) {
Ok(p) => p,
Err(_) => return Ok(()), // No story file — skip silently
};
write_coverage_baseline(&path, coverage_pct)
}
/// Build the `## Test Results` section text including JSON comment and human-readable summary.
fn build_test_results_section(json: &str, results: &StoryTestResults) -> String {
let mut s = String::from("## Test Results\n\n");
s.push_str(&format!("{TEST_RESULTS_MARKER} {json} -->\n\n"));
// Unit tests
let (unit_pass, unit_fail) = count_pass_fail(&results.unit);
s.push_str(&format!(
"### Unit Tests ({unit_pass} passed, {unit_fail} failed)\n\n"
));
if results.unit.is_empty() {
s.push_str("*No unit tests recorded.*\n");
} else {
for t in &results.unit {
s.push_str(&format_test_line(t));
}
}
s.push('\n');
// Integration tests
let (int_pass, int_fail) = count_pass_fail(&results.integration);
s.push_str(&format!(
"### Integration Tests ({int_pass} passed, {int_fail} failed)\n\n"
));
if results.integration.is_empty() {
s.push_str("*No integration tests recorded.*\n");
} else {
for t in &results.integration {
s.push_str(&format_test_line(t));
}
}
s
}
fn count_pass_fail(tests: &[TestCaseResult]) -> (usize, usize) {
let pass = tests.iter().filter(|t| t.status == TestStatus::Pass).count();
(pass, tests.len() - pass)
}
fn format_test_line(t: &TestCaseResult) -> String {
let icon = if t.status == TestStatus::Pass { "" } else { "" };
match &t.details {
Some(d) if !d.is_empty() => format!("- {icon} {}{d}\n", t.name),
_ => format!("- {icon} {}\n", t.name),
}
}
/// Replace the `## Test Results` section in `contents` with `new_section`,
/// or append it if not present.
fn replace_or_append_section(contents: &str, header: &str, new_section: &str) -> String {
let lines: Vec<&str> = contents.lines().collect();
let header_trimmed = header.trim();
// Find the start of the existing section
let section_start = lines.iter().position(|l| l.trim() == header_trimmed);
if let Some(start) = section_start {
// Find the next `##` heading after the section start (the end of this section)
let section_end = lines[start + 1..]
.iter()
.position(|l| {
let t = l.trim();
t.starts_with("## ") && t != header_trimmed
})
.map(|i| start + 1 + i)
.unwrap_or(lines.len());
let mut result = lines[..start].join("\n");
if !result.is_empty() {
result.push('\n');
}
result.push_str(new_section);
if section_end < lines.len() {
result.push('\n');
result.push_str(&lines[section_end..].join("\n"));
}
if contents.ends_with('\n') {
result.push('\n');
}
result
} else {
// Append at the end
let mut result = contents.trim_end_matches('\n').to_string();
result.push_str("\n\n");
result.push_str(new_section);
if !result.ends_with('\n') {
result.push('\n');
}
result
}
}
/// Parse `StoryTestResults` from the JSON embedded in the `## Test Results` section.
fn parse_test_results_from_contents(contents: &str) -> Option<StoryTestResults> {
for line in contents.lines() {
let trimmed = line.trim();
if let Some(rest) = trimmed.strip_prefix(TEST_RESULTS_MARKER) {
// rest looks like: ` {...} -->`
if let Some(json_end) = rest.rfind("-->") {
let json_str = rest[..json_end].trim();
if let Ok(results) = serde_json::from_str::<StoryTestResults>(json_str) {
return Some(results);
}
}
}
}
None
}
pub fn validate_story_dirs(
root: &std::path::Path,
) -> Result<Vec<StoryValidationResult>, String> {
@@ -1337,4 +1502,156 @@ mod tests {
let spike_id = create_spike_file(tmp.path(), "My Spike", None).unwrap();
assert!(spike_id.starts_with("6_spike_"), "expected spike number 6, got: {spike_id}");
}
// ── Test result file persistence ──────────────────────────────
use crate::workflow::{StoryTestResults, TestCaseResult, TestStatus};
fn make_results() -> StoryTestResults {
StoryTestResults {
unit: vec![
TestCaseResult { name: "unit-pass".to_string(), status: TestStatus::Pass, details: None },
TestCaseResult { name: "unit-fail".to_string(), status: TestStatus::Fail, details: Some("assertion failed".to_string()) },
],
integration: vec![
TestCaseResult { name: "int-pass".to_string(), status: TestStatus::Pass, details: None },
],
}
}
#[test]
fn write_and_read_test_results_roundtrip() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("1_story_test.md"), "---\nname: Test\n---\n# Story\n").unwrap();
let results = make_results();
write_test_results_to_story_file(tmp.path(), "1_story_test", &results).unwrap();
let read_back = read_test_results_from_story_file(tmp.path(), "1_story_test")
.expect("should read back results");
assert_eq!(read_back.unit.len(), 2);
assert_eq!(read_back.integration.len(), 1);
assert_eq!(read_back.unit[0].name, "unit-pass");
assert_eq!(read_back.unit[1].status, TestStatus::Fail);
assert_eq!(read_back.unit[1].details.as_deref(), Some("assertion failed"));
}
#[test]
fn write_test_results_creates_readable_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_path = current.join("2_story_check.md");
fs::write(&story_path, "---\nname: Check\n---\n# Story\n\n## Acceptance Criteria\n\n- [ ] AC1\n").unwrap();
let results = make_results();
write_test_results_to_story_file(tmp.path(), "2_story_check", &results).unwrap();
let contents = fs::read_to_string(&story_path).unwrap();
assert!(contents.contains("## Test Results"));
assert!(contents.contains("✅ unit-pass"));
assert!(contents.contains("❌ unit-fail"));
assert!(contents.contains("assertion failed"));
assert!(contents.contains("story-kit-test-results:"));
// Original content still present
assert!(contents.contains("## Acceptance Criteria"));
}
#[test]
fn write_test_results_overwrites_existing_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_path = current.join("3_story_overwrite.md");
fs::write(
&story_path,
"---\nname: Overwrite\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {} -->\n\n### Unit Tests (0 passed, 0 failed)\n\n*No unit tests recorded.*\n",
)
.unwrap();
let results = make_results();
write_test_results_to_story_file(tmp.path(), "3_story_overwrite", &results).unwrap();
let contents = fs::read_to_string(&story_path).unwrap();
assert!(contents.contains("✅ unit-pass"));
// Should have only one ## Test Results header
let count = contents.matches("## Test Results").count();
assert_eq!(count, 1, "should have exactly one ## Test Results section");
}
#[test]
fn read_test_results_returns_none_when_no_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("4_story_empty.md"), "---\nname: Empty\n---\n# Story\n").unwrap();
let result = read_test_results_from_story_file(tmp.path(), "4_story_empty");
assert!(result.is_none());
}
#[test]
fn read_test_results_returns_none_for_unknown_story() {
let tmp = tempfile::tempdir().unwrap();
let result = read_test_results_from_story_file(tmp.path(), "99_story_unknown");
assert!(result.is_none());
}
#[test]
fn write_test_results_finds_story_in_any_stage() {
let tmp = tempfile::tempdir().unwrap();
let qa_dir = tmp.path().join(".story_kit/work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();
fs::write(qa_dir.join("5_story_qa.md"), "---\nname: QA Story\n---\n# Story\n").unwrap();
let results = StoryTestResults {
unit: vec![TestCaseResult { name: "u1".to_string(), status: TestStatus::Pass, details: None }],
integration: vec![],
};
write_test_results_to_story_file(tmp.path(), "5_story_qa", &results).unwrap();
let read_back = read_test_results_from_story_file(tmp.path(), "5_story_qa").unwrap();
assert_eq!(read_back.unit.len(), 1);
}
#[test]
fn write_coverage_baseline_to_story_file_updates_front_matter() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("6_story_cov.md"), "---\nname: Cov Story\n---\n# Story\n").unwrap();
write_coverage_baseline_to_story_file(tmp.path(), "6_story_cov", 75.4).unwrap();
let contents = fs::read_to_string(current.join("6_story_cov.md")).unwrap();
assert!(contents.contains("coverage_baseline: 75.4%"), "got: {contents}");
}
#[test]
fn write_coverage_baseline_to_story_file_silent_on_missing_story() {
let tmp = tempfile::tempdir().unwrap();
// Story doesn't exist — should succeed silently
let result = write_coverage_baseline_to_story_file(tmp.path(), "99_story_missing", 50.0);
assert!(result.is_ok());
}
#[test]
fn replace_or_append_section_appends_when_absent() {
let contents = "---\nname: T\n---\n# Story\n";
let new = replace_or_append_section(contents, "## Test Results", "## Test Results\n\nfoo\n");
assert!(new.contains("## Test Results"));
assert!(new.contains("foo"));
assert!(new.contains("# Story"));
}
#[test]
fn replace_or_append_section_replaces_existing() {
let contents = "# Story\n\n## Test Results\n\nold content\n\n## Other\n\nother content\n";
let new = replace_or_append_section(contents, "## Test Results", "## Test Results\n\nnew content\n");
assert!(new.contains("new content"));
assert!(!new.contains("old content"));
assert!(new.contains("## Other"));
}
}

View File

@@ -1,8 +1,11 @@
use serde::Deserialize;
use std::fs;
use std::path::Path;
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct StoryMetadata {
pub name: Option<String>,
pub coverage_baseline: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -23,6 +26,7 @@ impl std::fmt::Display for StoryMetaError {
#[derive(Debug, Deserialize)]
struct FrontMatter {
name: Option<String>,
coverage_baseline: Option<String>,
}
pub fn parse_front_matter(contents: &str) -> Result<StoryMetadata, StoryMetaError> {
@@ -53,9 +57,58 @@ pub fn parse_front_matter(contents: &str) -> Result<StoryMetadata, StoryMetaErro
fn build_metadata(front: FrontMatter) -> StoryMetadata {
StoryMetadata {
name: front.name,
coverage_baseline: front.coverage_baseline,
}
}
/// Write or update a `coverage_baseline:` field in the YAML front matter of a story file.
///
/// If front matter is present, adds or replaces `coverage_baseline:` before the closing `---`.
/// If no front matter is present, this is a no-op (returns Ok).
pub fn write_coverage_baseline(path: &Path, coverage_pct: f64) -> Result<(), String> {
let contents =
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
let updated = set_front_matter_field(&contents, "coverage_baseline", &format!("{coverage_pct:.1}%"));
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
Ok(())
}
/// Insert or update a key: value pair in the YAML front matter of a markdown string.
///
/// If no front matter (opening `---`) is found, returns the content unchanged.
fn set_front_matter_field(contents: &str, key: &str, value: &str) -> String {
let mut lines: Vec<String> = contents.lines().map(String::from).collect();
if lines.is_empty() || lines[0].trim() != "---" {
return contents.to_string();
}
// Find closing --- (search from index 1)
let close_idx = match lines[1..].iter().position(|l| l.trim() == "---") {
Some(i) => i + 1,
None => return contents.to_string(),
};
let key_prefix = format!("{key}:");
let existing_idx = lines[1..close_idx]
.iter()
.position(|l| l.trim_start().starts_with(&key_prefix))
.map(|i| i + 1);
let new_line = format!("{key}: {value}");
if let Some(idx) = existing_idx {
lines[idx] = new_line;
} else {
lines.insert(close_idx, new_line);
}
let mut result = lines.join("\n");
if contents.ends_with('\n') {
result.push('\n');
}
result
}
pub fn parse_unchecked_todos(contents: &str) -> Vec<String> {
contents
.lines()
@@ -82,12 +135,49 @@ workflow: tdd
"#;
let meta = parse_front_matter(input).expect("front matter");
assert_eq!(
meta,
StoryMetadata {
name: Some("Establish the TDD Workflow and Gates".to_string()),
}
);
assert_eq!(meta.name.as_deref(), Some("Establish the TDD Workflow and Gates"));
assert_eq!(meta.coverage_baseline, None);
}
#[test]
fn parses_coverage_baseline_from_front_matter() {
let input = "---\nname: Test Story\ncoverage_baseline: 78.5%\n---\n# Story\n";
let meta = parse_front_matter(input).expect("front matter");
assert_eq!(meta.coverage_baseline.as_deref(), Some("78.5%"));
}
#[test]
fn set_front_matter_field_inserts_new_key() {
let input = "---\nname: My Story\n---\n# Body\n";
let output = set_front_matter_field(input, "coverage_baseline", "55.0%");
assert!(output.contains("coverage_baseline: 55.0%"));
assert!(output.contains("name: My Story"));
assert!(output.ends_with('\n'));
}
#[test]
fn set_front_matter_field_updates_existing_key() {
let input = "---\nname: My Story\ncoverage_baseline: 40.0%\n---\n# Body\n";
let output = set_front_matter_field(input, "coverage_baseline", "55.0%");
assert!(output.contains("coverage_baseline: 55.0%"));
assert!(!output.contains("40.0%"));
}
#[test]
fn set_front_matter_field_no_op_without_front_matter() {
let input = "# No front matter\n";
let output = set_front_matter_field(input, "coverage_baseline", "55.0%");
assert_eq!(output, input);
}
#[test]
fn write_coverage_baseline_updates_file() {
let tmp = tempfile::tempdir().unwrap();
let path = tmp.path().join("story.md");
std::fs::write(&path, "---\nname: Test\n---\n# Story\n").unwrap();
write_coverage_baseline(&path, 82.3).unwrap();
let contents = std::fs::read_to_string(&path).unwrap();
assert!(contents.contains("coverage_baseline: 82.3%"));
}
#[test]

View File

@@ -1,14 +1,16 @@
//! Workflow module: test result tracking and acceptance evaluation.
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum TestStatus {
Pass,
Fail,
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct TestCaseResult {
pub name: String,
pub status: TestStatus,
@@ -27,7 +29,7 @@ pub struct AcceptanceDecision {
pub warning: Option<String>,
}
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct StoryTestResults {
pub unit: Vec<TestCaseResult>,
pub integration: Vec<TestCaseResult>,