story-kit: merge 171_story_persist_test_results_to_story_files
This commit is contained in:
@@ -966,6 +966,15 @@ fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
|
||||
workflow.record_test_results_validated(story_id.to_string(), unit, integration)?;
|
||||
|
||||
// Persist to story file (best-effort — file write errors are warnings, not failures).
|
||||
if let Ok(project_root) = ctx.state.get_project_root()
|
||||
&& let Some(results) = workflow.results.get(story_id)
|
||||
&& let Err(e) =
|
||||
crate::http::workflow::write_test_results_to_story_file(&project_root, story_id, results)
|
||||
{
|
||||
slog_warn!("[record_tests] Could not persist results to story file: {e}");
|
||||
}
|
||||
|
||||
Ok("Test results recorded.".to_string())
|
||||
}
|
||||
|
||||
@@ -980,8 +989,27 @@ fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result<String, Stri
|
||||
.lock()
|
||||
.map_err(|e| format!("Lock error: {e}"))?;
|
||||
|
||||
let empty_results = Default::default();
|
||||
let results = workflow.results.get(story_id).unwrap_or(&empty_results);
|
||||
// Use in-memory results if present; otherwise fall back to file-persisted results.
|
||||
let file_results;
|
||||
let results = if let Some(r) = workflow.results.get(story_id) {
|
||||
r
|
||||
} else {
|
||||
let project_root = ctx.state.get_project_root().ok();
|
||||
file_results = project_root.as_deref().and_then(|root| {
|
||||
crate::http::workflow::read_test_results_from_story_file(root, story_id)
|
||||
});
|
||||
file_results.as_ref().map_or_else(
|
||||
|| {
|
||||
// No results anywhere — use empty default for the acceptance check
|
||||
// (it will fail with "No test results recorded")
|
||||
static EMPTY: std::sync::OnceLock<crate::workflow::StoryTestResults> =
|
||||
std::sync::OnceLock::new();
|
||||
EMPTY.get_or_init(Default::default)
|
||||
},
|
||||
|r| r,
|
||||
)
|
||||
};
|
||||
|
||||
let coverage = workflow.coverage.get(story_id);
|
||||
let decision = evaluate_acceptance_with_coverage(results, coverage);
|
||||
|
||||
@@ -1011,6 +1039,17 @@ async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<String, Stri
|
||||
.start_agent(&project_root, story_id, agent_name, None)
|
||||
.await?;
|
||||
|
||||
// Snapshot coverage baseline from the most recent coverage report (best-effort).
|
||||
if let Some(pct) = read_coverage_percent_from_json(&project_root)
|
||||
&& let Err(e) = crate::http::workflow::write_coverage_baseline_to_story_file(
|
||||
&project_root,
|
||||
story_id,
|
||||
pct,
|
||||
)
|
||||
{
|
||||
slog_warn!("[start_agent] Could not write coverage baseline to story file: {e}");
|
||||
}
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": info.story_id,
|
||||
"agent_name": info.agent_name,
|
||||
@@ -1021,6 +1060,22 @@ async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<String, Stri
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
/// Try to read the overall line coverage percentage from the llvm-cov JSON report.
|
||||
///
|
||||
/// Expects the file at `{project_root}/.story_kit/coverage/server.json`.
|
||||
/// Returns `None` if the file is absent, unreadable, or cannot be parsed.
|
||||
fn read_coverage_percent_from_json(project_root: &std::path::Path) -> Option<f64> {
|
||||
let path = project_root
|
||||
.join(".story_kit")
|
||||
.join("coverage")
|
||||
.join("server.json");
|
||||
let contents = std::fs::read_to_string(&path).ok()?;
|
||||
let json: Value = serde_json::from_str(&contents).ok()?;
|
||||
// cargo llvm-cov --json format: data[0].totals.lines.percent
|
||||
json.pointer("/data/0/totals/lines/percent")
|
||||
.and_then(|v| v.as_f64())
|
||||
}
|
||||
|
||||
async fn tool_stop_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
@@ -3170,4 +3225,83 @@ stage = "coder"
|
||||
assert_eq!(parsed["behavior"], "deny", "denied must return behavior:deny");
|
||||
assert!(parsed["message"].is_string(), "deny must include a message");
|
||||
}
|
||||
|
||||
// ── record_tests / ensure_acceptance persistence tests ───────
|
||||
|
||||
#[test]
|
||||
fn record_tests_persists_to_story_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let current = tmp.path().join(".story_kit/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::write(current.join("1_story_persist.md"), "---\nname: Persist\n---\n# Story\n").unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
tool_record_tests(
|
||||
&json!({
|
||||
"story_id": "1_story_persist",
|
||||
"unit": [{"name": "u1", "status": "pass"}],
|
||||
"integration": []
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = fs::read_to_string(current.join("1_story_persist.md")).unwrap();
|
||||
assert!(contents.contains("## Test Results"), "file should have Test Results section");
|
||||
assert!(contents.contains("story-kit-test-results:"), "file should have JSON marker");
|
||||
assert!(contents.contains("u1"), "file should contain test name");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_acceptance_reads_from_file_when_not_in_memory() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let current = tmp.path().join(".story_kit/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
|
||||
// Write a story file with a pre-populated Test Results section (simulating a restart)
|
||||
let story_content = "---\nname: Persist\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"pass\",\"details\":null}],\"integration\":[{\"name\":\"i1\",\"status\":\"pass\",\"details\":null}]} -->\n";
|
||||
fs::write(current.join("2_story_file_only.md"), story_content).unwrap();
|
||||
|
||||
// Use a fresh context (empty in-memory state, simulating a restart)
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
// ensure_acceptance should read from file and succeed
|
||||
let result = tool_ensure_acceptance(&json!({"story_id": "2_story_file_only"}), &ctx);
|
||||
assert!(result.is_ok(), "should accept based on file data, got: {:?}", result);
|
||||
assert!(result.unwrap().contains("All gates pass"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_acceptance_file_with_failures_still_blocks() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let current = tmp.path().join(".story_kit/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
|
||||
let story_content = "---\nname: Fail\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"fail\",\"details\":\"error\"}],\"integration\":[]} -->\n";
|
||||
fs::write(current.join("3_story_fail.md"), story_content).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_ensure_acceptance(&json!({"story_id": "3_story_fail"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("blocked"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_coverage_percent_from_json_parses_llvm_cov_format() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cov_dir = tmp.path().join(".story_kit/coverage");
|
||||
fs::create_dir_all(&cov_dir).unwrap();
|
||||
let json_content = r#"{"data":[{"totals":{"lines":{"count":100,"covered":78,"percent":78.0}}}]}"#;
|
||||
fs::write(cov_dir.join("server.json"), json_content).unwrap();
|
||||
|
||||
let pct = read_coverage_percent_from_json(tmp.path());
|
||||
assert_eq!(pct, Some(78.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_coverage_percent_from_json_returns_none_when_absent() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let pct = read_coverage_percent_from_json(tmp.path());
|
||||
assert!(pct.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user