1270 lines
45 KiB
Rust
1270 lines
45 KiB
Rust
|
|
use crate::http::context::AppContext;
|
||
|
|
use crate::http::workflow::{
|
||
|
|
add_criterion_to_file, check_criterion_in_file, create_bug_file, create_refactor_file,
|
||
|
|
create_spike_file, create_story_file, list_bug_files, list_refactor_files,
|
||
|
|
load_pipeline_state, load_upcoming_stories, update_story_in_file, validate_story_dirs,
|
||
|
|
};
|
||
|
|
use crate::agents::{close_bug_to_archive, feature_branch_has_unmerged_changes, move_story_to_archived};
|
||
|
|
use crate::slog_warn;
|
||
|
|
use crate::io::story_metadata::{parse_front_matter, parse_unchecked_todos};
|
||
|
|
use crate::workflow::{evaluate_acceptance_with_coverage, TestCaseResult, TestStatus};
|
||
|
|
use serde_json::{json, Value};
|
||
|
|
use std::collections::HashMap;
|
||
|
|
use std::fs;
|
||
|
|
|
||
|
|
pub(super) fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let name = args
|
||
|
|
.get("name")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: name")?;
|
||
|
|
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
||
|
|
let acceptance_criteria: Option<Vec<String>> = args
|
||
|
|
.get("acceptance_criteria")
|
||
|
|
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||
|
|
// Spike 61: write the file only — the filesystem watcher detects the new
|
||
|
|
// .md file in work/1_backlog/ and auto-commits with a deterministic message.
|
||
|
|
let commit = false;
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let story_id = create_story_file(
|
||
|
|
&root,
|
||
|
|
name,
|
||
|
|
user_story,
|
||
|
|
acceptance_criteria.as_deref(),
|
||
|
|
commit,
|
||
|
|
)?;
|
||
|
|
|
||
|
|
Ok(format!("Created story: {story_id}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_validate_stories(ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let results = validate_story_dirs(&root)?;
|
||
|
|
serde_json::to_string_pretty(&json!(results
|
||
|
|
.iter()
|
||
|
|
.map(|r| json!({
|
||
|
|
"story_id": r.story_id,
|
||
|
|
"valid": r.valid,
|
||
|
|
"error": r.error,
|
||
|
|
}))
|
||
|
|
.collect::<Vec<_>>()))
|
||
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_list_upcoming(ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let stories = load_upcoming_stories(ctx)?;
|
||
|
|
serde_json::to_string_pretty(&json!(stories
|
||
|
|
.iter()
|
||
|
|
.map(|s| json!({
|
||
|
|
"story_id": s.story_id,
|
||
|
|
"name": s.name,
|
||
|
|
"error": s.error,
|
||
|
|
}))
|
||
|
|
.collect::<Vec<_>>()))
|
||
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let state = load_pipeline_state(ctx)?;
|
||
|
|
|
||
|
|
fn map_items(items: &[crate::http::workflow::UpcomingStory], stage: &str) -> Vec<Value> {
|
||
|
|
items
|
||
|
|
.iter()
|
||
|
|
.map(|s| {
|
||
|
|
let mut item = json!({
|
||
|
|
"story_id": s.story_id,
|
||
|
|
"name": s.name,
|
||
|
|
"stage": stage,
|
||
|
|
"agent": s.agent.as_ref().map(|a| json!({
|
||
|
|
"agent_name": a.agent_name,
|
||
|
|
"model": a.model,
|
||
|
|
"status": a.status,
|
||
|
|
})),
|
||
|
|
});
|
||
|
|
// Include blocked/retry_count when present so callers can
|
||
|
|
// identify stories stuck in the pipeline.
|
||
|
|
if let Some(true) = s.blocked {
|
||
|
|
item["blocked"] = json!(true);
|
||
|
|
}
|
||
|
|
if let Some(rc) = s.retry_count {
|
||
|
|
item["retry_count"] = json!(rc);
|
||
|
|
}
|
||
|
|
if let Some(ref mf) = s.merge_failure {
|
||
|
|
item["merge_failure"] = json!(mf);
|
||
|
|
}
|
||
|
|
item
|
||
|
|
})
|
||
|
|
.collect()
|
||
|
|
}
|
||
|
|
|
||
|
|
let mut active: Vec<Value> = Vec::new();
|
||
|
|
active.extend(map_items(&state.current, "current"));
|
||
|
|
active.extend(map_items(&state.qa, "qa"));
|
||
|
|
active.extend(map_items(&state.merge, "merge"));
|
||
|
|
active.extend(map_items(&state.done, "done"));
|
||
|
|
|
||
|
|
let backlog: Vec<Value> = state
|
||
|
|
.backlog
|
||
|
|
.iter()
|
||
|
|
.map(|s| json!({ "story_id": s.story_id, "name": s.name }))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
serde_json::to_string_pretty(&json!({
|
||
|
|
"active": active,
|
||
|
|
"backlog": backlog,
|
||
|
|
"backlog_count": backlog.len(),
|
||
|
|
}))
|
||
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let current_dir = root.join(".story_kit").join("work").join("2_current");
|
||
|
|
let filepath = current_dir.join(format!("{story_id}.md"));
|
||
|
|
|
||
|
|
if !filepath.exists() {
|
||
|
|
return Err(format!("Story file not found: {story_id}.md"));
|
||
|
|
}
|
||
|
|
|
||
|
|
let contents = fs::read_to_string(&filepath)
|
||
|
|
.map_err(|e| format!("Failed to read story file: {e}"))?;
|
||
|
|
|
||
|
|
let story_name = parse_front_matter(&contents)
|
||
|
|
.ok()
|
||
|
|
.and_then(|m| m.name);
|
||
|
|
let todos = parse_unchecked_todos(&contents);
|
||
|
|
|
||
|
|
serde_json::to_string_pretty(&json!({
|
||
|
|
"story_id": story_id,
|
||
|
|
"story_name": story_name,
|
||
|
|
"todos": todos,
|
||
|
|
}))
|
||
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
|
||
|
|
let unit = parse_test_cases(args.get("unit"))?;
|
||
|
|
let integration = parse_test_cases(args.get("integration"))?;
|
||
|
|
|
||
|
|
let mut workflow = ctx
|
||
|
|
.workflow
|
||
|
|
.lock()
|
||
|
|
.map_err(|e| format!("Lock error: {e}"))?;
|
||
|
|
|
||
|
|
workflow.record_test_results_validated(story_id.to_string(), unit, integration)?;
|
||
|
|
|
||
|
|
// Persist to story file (best-effort — file write errors are warnings, not failures).
|
||
|
|
if let Ok(project_root) = ctx.state.get_project_root()
|
||
|
|
&& let Some(results) = workflow.results.get(story_id)
|
||
|
|
&& let Err(e) =
|
||
|
|
crate::http::workflow::write_test_results_to_story_file(&project_root, story_id, results)
|
||
|
|
{
|
||
|
|
slog_warn!("[record_tests] Could not persist results to story file: {e}");
|
||
|
|
}
|
||
|
|
|
||
|
|
Ok("Test results recorded.".to_string())
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
|
||
|
|
let workflow = ctx
|
||
|
|
.workflow
|
||
|
|
.lock()
|
||
|
|
.map_err(|e| format!("Lock error: {e}"))?;
|
||
|
|
|
||
|
|
// Use in-memory results if present; otherwise fall back to file-persisted results.
|
||
|
|
let file_results;
|
||
|
|
let results = if let Some(r) = workflow.results.get(story_id) {
|
||
|
|
r
|
||
|
|
} else {
|
||
|
|
let project_root = ctx.state.get_project_root().ok();
|
||
|
|
file_results = project_root.as_deref().and_then(|root| {
|
||
|
|
crate::http::workflow::read_test_results_from_story_file(root, story_id)
|
||
|
|
});
|
||
|
|
file_results.as_ref().map_or_else(
|
||
|
|
|| {
|
||
|
|
// No results anywhere — use empty default for the acceptance check
|
||
|
|
// (it will fail with "No test results recorded")
|
||
|
|
static EMPTY: std::sync::OnceLock<crate::workflow::StoryTestResults> =
|
||
|
|
std::sync::OnceLock::new();
|
||
|
|
EMPTY.get_or_init(Default::default)
|
||
|
|
},
|
||
|
|
|r| r,
|
||
|
|
)
|
||
|
|
};
|
||
|
|
|
||
|
|
let coverage = workflow.coverage.get(story_id);
|
||
|
|
let decision = evaluate_acceptance_with_coverage(results, coverage);
|
||
|
|
|
||
|
|
if decision.can_accept {
|
||
|
|
Ok("Story can be accepted. All gates pass.".to_string())
|
||
|
|
} else {
|
||
|
|
let mut parts = decision.reasons;
|
||
|
|
if let Some(w) = decision.warning {
|
||
|
|
parts.push(w);
|
||
|
|
}
|
||
|
|
Err(format!("Acceptance blocked: {}", parts.join("; ")))
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
|
||
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||
|
|
|
||
|
|
// Bug 226: Refuse to accept if the feature branch has unmerged code.
|
||
|
|
// The code must be squash-merged via merge_agent_work first.
|
||
|
|
if feature_branch_has_unmerged_changes(&project_root, story_id) {
|
||
|
|
return Err(format!(
|
||
|
|
"Cannot accept story '{story_id}': feature branch 'feature/story-{story_id}' \
|
||
|
|
has unmerged changes. Use merge_agent_work to squash-merge the code into \
|
||
|
|
master first."
|
||
|
|
));
|
||
|
|
}
|
||
|
|
|
||
|
|
move_story_to_archived(&project_root, story_id)?;
|
||
|
|
ctx.agents.remove_agents_for_story(story_id);
|
||
|
|
|
||
|
|
Ok(format!(
|
||
|
|
"Story '{story_id}' accepted, moved to done/, and committed to master."
|
||
|
|
))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_check_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
let criterion_index = args
|
||
|
|
.get("criterion_index")
|
||
|
|
.and_then(|v| v.as_u64())
|
||
|
|
.ok_or("Missing required argument: criterion_index")? as usize;
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
check_criterion_in_file(&root, story_id, criterion_index)?;
|
||
|
|
|
||
|
|
Ok(format!(
|
||
|
|
"Criterion {criterion_index} checked for story '{story_id}'. Committed to master."
|
||
|
|
))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_add_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
let criterion = args
|
||
|
|
.get("criterion")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: criterion")?;
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
add_criterion_to_file(&root, story_id, criterion)?;
|
||
|
|
|
||
|
|
Ok(format!(
|
||
|
|
"Added criterion to story '{story_id}': - [ ] {criterion}"
|
||
|
|
))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_update_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let story_id = args
|
||
|
|
.get("story_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: story_id")?;
|
||
|
|
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
||
|
|
let description = args.get("description").and_then(|v| v.as_str());
|
||
|
|
|
||
|
|
// Collect front matter fields: explicit `agent` param + arbitrary `front_matter` object.
|
||
|
|
let mut front_matter: HashMap<String, String> = HashMap::new();
|
||
|
|
if let Some(agent) = args.get("agent").and_then(|v| v.as_str()) {
|
||
|
|
front_matter.insert("agent".to_string(), agent.to_string());
|
||
|
|
}
|
||
|
|
if let Some(obj) = args.get("front_matter").and_then(|v| v.as_object()) {
|
||
|
|
for (k, v) in obj {
|
||
|
|
let val = match v {
|
||
|
|
Value::String(s) => s.clone(),
|
||
|
|
other => other.to_string(),
|
||
|
|
};
|
||
|
|
front_matter.insert(k.clone(), val);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
let front_matter_opt = if front_matter.is_empty() { None } else { Some(&front_matter) };
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
update_story_in_file(&root, story_id, user_story, description, front_matter_opt)?;
|
||
|
|
|
||
|
|
Ok(format!("Updated story '{story_id}'."))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_create_spike(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let name = args
|
||
|
|
.get("name")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: name")?;
|
||
|
|
let description = args.get("description").and_then(|v| v.as_str());
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let spike_id = create_spike_file(&root, name, description)?;
|
||
|
|
|
||
|
|
Ok(format!("Created spike: {spike_id}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_create_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let name = args
|
||
|
|
.get("name")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: name")?;
|
||
|
|
let description = args
|
||
|
|
.get("description")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: description")?;
|
||
|
|
let steps_to_reproduce = args
|
||
|
|
.get("steps_to_reproduce")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: steps_to_reproduce")?;
|
||
|
|
let actual_result = args
|
||
|
|
.get("actual_result")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: actual_result")?;
|
||
|
|
let expected_result = args
|
||
|
|
.get("expected_result")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: expected_result")?;
|
||
|
|
let acceptance_criteria: Option<Vec<String>> = args
|
||
|
|
.get("acceptance_criteria")
|
||
|
|
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let bug_id = create_bug_file(
|
||
|
|
&root,
|
||
|
|
name,
|
||
|
|
description,
|
||
|
|
steps_to_reproduce,
|
||
|
|
actual_result,
|
||
|
|
expected_result,
|
||
|
|
acceptance_criteria.as_deref(),
|
||
|
|
)?;
|
||
|
|
|
||
|
|
Ok(format!("Created bug: {bug_id}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_list_bugs(ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let bugs = list_bug_files(&root)?;
|
||
|
|
serde_json::to_string_pretty(&json!(bugs
|
||
|
|
.iter()
|
||
|
|
.map(|(id, name)| json!({ "bug_id": id, "name": name }))
|
||
|
|
.collect::<Vec<_>>()))
|
||
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_close_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let bug_id = args
|
||
|
|
.get("bug_id")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: bug_id")?;
|
||
|
|
|
||
|
|
let root = ctx.agents.get_project_root(&ctx.state)?;
|
||
|
|
close_bug_to_archive(&root, bug_id)?;
|
||
|
|
ctx.agents.remove_agents_for_story(bug_id);
|
||
|
|
|
||
|
|
Ok(format!(
|
||
|
|
"Bug '{bug_id}' closed, moved to bugs/archive/, and committed to master."
|
||
|
|
))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_create_refactor(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let name = args
|
||
|
|
.get("name")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Missing required argument: name")?;
|
||
|
|
let description = args.get("description").and_then(|v| v.as_str());
|
||
|
|
let acceptance_criteria: Option<Vec<String>> = args
|
||
|
|
.get("acceptance_criteria")
|
||
|
|
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||
|
|
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let refactor_id = create_refactor_file(
|
||
|
|
&root,
|
||
|
|
name,
|
||
|
|
description,
|
||
|
|
acceptance_criteria.as_deref(),
|
||
|
|
)?;
|
||
|
|
|
||
|
|
Ok(format!("Created refactor: {refactor_id}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn tool_list_refactors(ctx: &AppContext) -> Result<String, String> {
|
||
|
|
let root = ctx.state.get_project_root()?;
|
||
|
|
let refactors = list_refactor_files(&root)?;
|
||
|
|
serde_json::to_string_pretty(&json!(refactors
|
||
|
|
.iter()
|
||
|
|
.map(|(id, name)| json!({ "refactor_id": id, "name": name }))
|
||
|
|
.collect::<Vec<_>>()))
|
||
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
||
|
|
}
|
||
|
|
|
||
|
|
pub(super) fn parse_test_cases(value: Option<&Value>) -> Result<Vec<TestCaseResult>, String> {
|
||
|
|
let arr = match value {
|
||
|
|
Some(Value::Array(a)) => a,
|
||
|
|
Some(Value::Null) | None => return Ok(Vec::new()),
|
||
|
|
_ => return Err("Expected array for test cases".to_string()),
|
||
|
|
};
|
||
|
|
|
||
|
|
arr.iter()
|
||
|
|
.map(|item| {
|
||
|
|
let name = item
|
||
|
|
.get("name")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Test case missing 'name'")?
|
||
|
|
.to_string();
|
||
|
|
let status_str = item
|
||
|
|
.get("status")
|
||
|
|
.and_then(|v| v.as_str())
|
||
|
|
.ok_or("Test case missing 'status'")?;
|
||
|
|
let status = match status_str {
|
||
|
|
"pass" => TestStatus::Pass,
|
||
|
|
"fail" => TestStatus::Fail,
|
||
|
|
other => return Err(format!("Invalid test status '{other}'. Use 'pass' or 'fail'.")),
|
||
|
|
};
|
||
|
|
let details = item.get("details").and_then(|v| v.as_str()).map(String::from);
|
||
|
|
Ok(TestCaseResult {
|
||
|
|
name,
|
||
|
|
status,
|
||
|
|
details,
|
||
|
|
})
|
||
|
|
})
|
||
|
|
.collect()
|
||
|
|
}
|
||
|
|
|
||
|
|
#[cfg(test)]
|
||
|
|
mod tests {
|
||
|
|
use super::*;
|
||
|
|
use crate::http::context::AppContext;
|
||
|
|
|
||
|
|
fn test_ctx(dir: &std::path::Path) -> AppContext {
|
||
|
|
AppContext::new_test(dir.to_path_buf())
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_empty() {
|
||
|
|
let result = parse_test_cases(None).unwrap();
|
||
|
|
assert!(result.is_empty());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_valid() {
|
||
|
|
let input = json!([
|
||
|
|
{"name": "test1", "status": "pass"},
|
||
|
|
{"name": "test2", "status": "fail", "details": "assertion failed"}
|
||
|
|
]);
|
||
|
|
let result = parse_test_cases(Some(&input)).unwrap();
|
||
|
|
assert_eq!(result.len(), 2);
|
||
|
|
assert_eq!(result[0].status, TestStatus::Pass);
|
||
|
|
assert_eq!(result[1].status, TestStatus::Fail);
|
||
|
|
assert_eq!(result[1].details, Some("assertion failed".to_string()));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_invalid_status() {
|
||
|
|
let input = json!([{"name": "t", "status": "maybe"}]);
|
||
|
|
assert!(parse_test_cases(Some(&input)).is_err());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_null_value_returns_empty() {
|
||
|
|
let null_val = json!(null);
|
||
|
|
let result = parse_test_cases(Some(&null_val)).unwrap();
|
||
|
|
assert!(result.is_empty());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_non_array_returns_error() {
|
||
|
|
let obj = json!({"invalid": "input"});
|
||
|
|
let result = parse_test_cases(Some(&obj));
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("Expected array"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_missing_name_returns_error() {
|
||
|
|
let input = json!([{"status": "pass"}]);
|
||
|
|
let result = parse_test_cases(Some(&input));
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("name"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn parse_test_cases_missing_status_returns_error() {
|
||
|
|
let input = json!([{"name": "test1"}]);
|
||
|
|
let result = parse_test_cases(Some(&input));
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("status"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_validate_stories_empty_project() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_validate_stories(&ctx).unwrap();
|
||
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||
|
|
assert!(parsed.is_empty());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_story_and_list_upcoming() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
// No git repo needed: spike 61 — create_story just writes the file;
|
||
|
|
// the filesystem watcher handles the commit asynchronously.
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
let result = tool_create_story(
|
||
|
|
&json!({"name": "Test Story", "acceptance_criteria": ["AC1", "AC2"]}),
|
||
|
|
&ctx,
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
assert!(result.contains("Created story:"));
|
||
|
|
|
||
|
|
// List should return it
|
||
|
|
let list = tool_list_upcoming(&ctx).unwrap();
|
||
|
|
let parsed: Vec<Value> = serde_json::from_str(&list).unwrap();
|
||
|
|
assert_eq!(parsed.len(), 1);
|
||
|
|
assert_eq!(parsed[0]["name"], "Test Story");
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_story_rejects_empty_name() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_create_story(&json!({"name": "!!!"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("alphanumeric"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_story_missing_name() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_create_story(&json!({}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("Missing required argument"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_get_pipeline_status_returns_structured_response() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let root = tmp.path();
|
||
|
|
|
||
|
|
for (stage, id, name) in &[
|
||
|
|
("1_backlog", "10_story_upcoming", "Upcoming Story"),
|
||
|
|
("2_current", "20_story_current", "Current Story"),
|
||
|
|
("3_qa", "30_story_qa", "QA Story"),
|
||
|
|
("4_merge", "40_story_merge", "Merge Story"),
|
||
|
|
("5_done", "50_story_done", "Done Story"),
|
||
|
|
] {
|
||
|
|
let dir = root.join(".story_kit/work").join(stage);
|
||
|
|
std::fs::create_dir_all(&dir).unwrap();
|
||
|
|
std::fs::write(
|
||
|
|
dir.join(format!("{id}.md")),
|
||
|
|
format!("---\nname: \"{name}\"\n---\n"),
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
}
|
||
|
|
|
||
|
|
let ctx = test_ctx(root);
|
||
|
|
let result = tool_get_pipeline_status(&ctx).unwrap();
|
||
|
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||
|
|
|
||
|
|
// Active stages include current, qa, merge, done
|
||
|
|
let active = parsed["active"].as_array().unwrap();
|
||
|
|
assert_eq!(active.len(), 4);
|
||
|
|
|
||
|
|
let stages: Vec<&str> = active.iter().map(|i| i["stage"].as_str().unwrap()).collect();
|
||
|
|
assert!(stages.contains(&"current"));
|
||
|
|
assert!(stages.contains(&"qa"));
|
||
|
|
assert!(stages.contains(&"merge"));
|
||
|
|
assert!(stages.contains(&"done"));
|
||
|
|
|
||
|
|
// Backlog
|
||
|
|
let backlog = parsed["backlog"].as_array().unwrap();
|
||
|
|
assert_eq!(backlog.len(), 1);
|
||
|
|
assert_eq!(backlog[0]["story_id"], "10_story_upcoming");
|
||
|
|
assert_eq!(parsed["backlog_count"], 1);
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_get_pipeline_status_includes_agent_assignment() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let root = tmp.path();
|
||
|
|
|
||
|
|
let current = root.join(".story_kit/work/2_current");
|
||
|
|
std::fs::create_dir_all(¤t).unwrap();
|
||
|
|
std::fs::write(
|
||
|
|
current.join("20_story_active.md"),
|
||
|
|
"---\nname: \"Active Story\"\n---\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(root);
|
||
|
|
ctx.agents.inject_test_agent(
|
||
|
|
"20_story_active",
|
||
|
|
"coder-1",
|
||
|
|
crate::agents::AgentStatus::Running,
|
||
|
|
);
|
||
|
|
|
||
|
|
let result = tool_get_pipeline_status(&ctx).unwrap();
|
||
|
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||
|
|
|
||
|
|
let active = parsed["active"].as_array().unwrap();
|
||
|
|
assert_eq!(active.len(), 1);
|
||
|
|
let item = &active[0];
|
||
|
|
assert_eq!(item["story_id"], "20_story_active");
|
||
|
|
assert_eq!(item["stage"], "current");
|
||
|
|
assert!(!item["agent"].is_null(), "agent should be present");
|
||
|
|
assert_eq!(item["agent"]["agent_name"], "coder-1");
|
||
|
|
assert_eq!(item["agent"]["status"], "running");
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_get_story_todos_missing_file() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_get_story_todos(&json!({"story_id": "99_nonexistent"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("not found"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_get_story_todos_returns_unchecked() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
|
||
|
|
fs::create_dir_all(¤t_dir).unwrap();
|
||
|
|
fs::write(
|
||
|
|
current_dir.join("1_test.md"),
|
||
|
|
"---\nname: Test\n---\n## AC\n- [ ] First\n- [x] Done\n- [ ] Second\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_get_story_todos(&json!({"story_id": "1_test"}), &ctx).unwrap();
|
||
|
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||
|
|
assert_eq!(parsed["todos"].as_array().unwrap().len(), 2);
|
||
|
|
assert_eq!(parsed["story_name"], "Test");
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_record_tests_and_ensure_acceptance() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
// Record passing tests
|
||
|
|
let result = tool_record_tests(
|
||
|
|
&json!({
|
||
|
|
"story_id": "1_test",
|
||
|
|
"unit": [{"name": "u1", "status": "pass"}],
|
||
|
|
"integration": [{"name": "i1", "status": "pass"}]
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
assert!(result.contains("recorded"));
|
||
|
|
|
||
|
|
// Should be acceptable
|
||
|
|
let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx).unwrap();
|
||
|
|
assert!(result.contains("All gates pass"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_ensure_acceptance_blocks_on_failures() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
tool_record_tests(
|
||
|
|
&json!({
|
||
|
|
"story_id": "1_test",
|
||
|
|
"unit": [{"name": "u1", "status": "fail"}],
|
||
|
|
"integration": []
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("blocked"));
|
||
|
|
}
|
||
|
|
|
||
|
|
fn setup_git_repo_in(dir: &std::path::Path) {
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["init"])
|
||
|
|
.current_dir(dir)
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["config", "user.email", "test@test.com"])
|
||
|
|
.current_dir(dir)
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["config", "user.name", "Test"])
|
||
|
|
.current_dir(dir)
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
||
|
|
.current_dir(dir)
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn create_bug_in_tools_list() {
|
||
|
|
use super::super::{handle_tools_list};
|
||
|
|
let resp = handle_tools_list(Some(json!(1)));
|
||
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||
|
|
let tool = tools.iter().find(|t| t["name"] == "create_bug");
|
||
|
|
assert!(tool.is_some(), "create_bug missing from tools list");
|
||
|
|
let t = tool.unwrap();
|
||
|
|
let desc = t["description"].as_str().unwrap();
|
||
|
|
assert!(
|
||
|
|
desc.contains("work/1_backlog/"),
|
||
|
|
"create_bug description should reference work/1_backlog/, got: {desc}"
|
||
|
|
);
|
||
|
|
assert!(
|
||
|
|
!desc.contains(".story_kit/bugs"),
|
||
|
|
"create_bug description should not reference nonexistent .story_kit/bugs/, got: {desc}"
|
||
|
|
);
|
||
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||
|
|
assert!(req_names.contains(&"name"));
|
||
|
|
assert!(req_names.contains(&"description"));
|
||
|
|
assert!(req_names.contains(&"steps_to_reproduce"));
|
||
|
|
assert!(req_names.contains(&"actual_result"));
|
||
|
|
assert!(req_names.contains(&"expected_result"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn list_bugs_in_tools_list() {
|
||
|
|
use super::super::{handle_tools_list};
|
||
|
|
let resp = handle_tools_list(Some(json!(1)));
|
||
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||
|
|
let tool = tools.iter().find(|t| t["name"] == "list_bugs");
|
||
|
|
assert!(tool.is_some(), "list_bugs missing from tools list");
|
||
|
|
let t = tool.unwrap();
|
||
|
|
let desc = t["description"].as_str().unwrap();
|
||
|
|
assert!(
|
||
|
|
desc.contains("work/1_backlog/"),
|
||
|
|
"list_bugs description should reference work/1_backlog/, got: {desc}"
|
||
|
|
);
|
||
|
|
assert!(
|
||
|
|
!desc.contains(".story_kit/bugs"),
|
||
|
|
"list_bugs description should not reference nonexistent .story_kit/bugs/, got: {desc}"
|
||
|
|
);
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn close_bug_in_tools_list() {
|
||
|
|
use super::super::{handle_tools_list};
|
||
|
|
let resp = handle_tools_list(Some(json!(1)));
|
||
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||
|
|
let tool = tools.iter().find(|t| t["name"] == "close_bug");
|
||
|
|
assert!(tool.is_some(), "close_bug missing from tools list");
|
||
|
|
let t = tool.unwrap();
|
||
|
|
let desc = t["description"].as_str().unwrap();
|
||
|
|
assert!(
|
||
|
|
!desc.contains(".story_kit/bugs"),
|
||
|
|
"close_bug description should not reference nonexistent .story_kit/bugs/, got: {desc}"
|
||
|
|
);
|
||
|
|
assert!(
|
||
|
|
desc.contains("work/5_done/"),
|
||
|
|
"close_bug description should reference work/5_done/, got: {desc}"
|
||
|
|
);
|
||
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||
|
|
assert!(req_names.contains(&"bug_id"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_bug_missing_name() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_create_bug(
|
||
|
|
&json!({
|
||
|
|
"description": "d",
|
||
|
|
"steps_to_reproduce": "s",
|
||
|
|
"actual_result": "a",
|
||
|
|
"expected_result": "e"
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("name"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_bug_missing_description() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_create_bug(
|
||
|
|
&json!({
|
||
|
|
"name": "Bug",
|
||
|
|
"steps_to_reproduce": "s",
|
||
|
|
"actual_result": "a",
|
||
|
|
"expected_result": "e"
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("description"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_bug_creates_file_and_commits() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
setup_git_repo_in(tmp.path());
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
let result = tool_create_bug(
|
||
|
|
&json!({
|
||
|
|
"name": "Login Crash",
|
||
|
|
"description": "The app crashes on login.",
|
||
|
|
"steps_to_reproduce": "1. Open app\n2. Click login",
|
||
|
|
"actual_result": "500 error",
|
||
|
|
"expected_result": "Successful login"
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
assert!(result.contains("1_bug_login_crash"));
|
||
|
|
let bug_file = tmp
|
||
|
|
.path()
|
||
|
|
.join(".story_kit/work/1_backlog/1_bug_login_crash.md");
|
||
|
|
assert!(bug_file.exists());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_list_bugs_empty() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_list_bugs(&ctx).unwrap();
|
||
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||
|
|
assert!(parsed.is_empty());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_list_bugs_returns_open_bugs() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
|
||
|
|
std::fs::create_dir_all(&backlog_dir).unwrap();
|
||
|
|
std::fs::write(
|
||
|
|
backlog_dir.join("1_bug_crash.md"),
|
||
|
|
"# Bug 1: App Crash\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
std::fs::write(
|
||
|
|
backlog_dir.join("2_bug_typo.md"),
|
||
|
|
"# Bug 2: Typo in Header\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_list_bugs(&ctx).unwrap();
|
||
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||
|
|
assert_eq!(parsed.len(), 2);
|
||
|
|
assert_eq!(parsed[0]["bug_id"], "1_bug_crash");
|
||
|
|
assert_eq!(parsed[0]["name"], "App Crash");
|
||
|
|
assert_eq!(parsed[1]["bug_id"], "2_bug_typo");
|
||
|
|
assert_eq!(parsed[1]["name"], "Typo in Header");
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_close_bug_missing_bug_id() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_close_bug(&json!({}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("bug_id"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_close_bug_moves_to_archive() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
setup_git_repo_in(tmp.path());
|
||
|
|
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
|
||
|
|
std::fs::create_dir_all(&backlog_dir).unwrap();
|
||
|
|
let bug_file = backlog_dir.join("1_bug_crash.md");
|
||
|
|
std::fs::write(&bug_file, "# Bug 1: Crash\n").unwrap();
|
||
|
|
// Stage the file so it's tracked
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["add", "."])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["commit", "-m", "add bug"])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_close_bug(&json!({"bug_id": "1_bug_crash"}), &ctx).unwrap();
|
||
|
|
assert!(result.contains("1_bug_crash"));
|
||
|
|
assert!(!bug_file.exists());
|
||
|
|
assert!(tmp.path().join(".story_kit/work/5_done/1_bug_crash.md").exists());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn create_spike_in_tools_list() {
|
||
|
|
use super::super::{handle_tools_list};
|
||
|
|
let resp = handle_tools_list(Some(json!(1)));
|
||
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||
|
|
let tool = tools.iter().find(|t| t["name"] == "create_spike");
|
||
|
|
assert!(tool.is_some(), "create_spike missing from tools list");
|
||
|
|
let t = tool.unwrap();
|
||
|
|
assert!(t["description"].is_string());
|
||
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||
|
|
assert!(req_names.contains(&"name"));
|
||
|
|
// description is optional
|
||
|
|
assert!(!req_names.contains(&"description"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_spike_missing_name() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_create_spike(&json!({}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("name"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_spike_rejects_empty_name() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_create_spike(&json!({"name": "!!!"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("alphanumeric"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_spike_creates_file() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
let result = tool_create_spike(
|
||
|
|
&json!({"name": "Compare Encoders", "description": "Which encoder is fastest?"}),
|
||
|
|
&ctx,
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
assert!(result.contains("1_spike_compare_encoders"));
|
||
|
|
let spike_file = tmp
|
||
|
|
.path()
|
||
|
|
.join(".story_kit/work/1_backlog/1_spike_compare_encoders.md");
|
||
|
|
assert!(spike_file.exists());
|
||
|
|
let contents = std::fs::read_to_string(&spike_file).unwrap();
|
||
|
|
assert!(contents.starts_with("---\nname: \"Compare Encoders\"\n---"));
|
||
|
|
assert!(contents.contains("Which encoder is fastest?"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_create_spike_creates_file_without_description() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
let result = tool_create_spike(&json!({"name": "My Spike"}), &ctx).unwrap();
|
||
|
|
assert!(result.contains("1_spike_my_spike"));
|
||
|
|
|
||
|
|
let spike_file = tmp.path().join(".story_kit/work/1_backlog/1_spike_my_spike.md");
|
||
|
|
assert!(spike_file.exists());
|
||
|
|
let contents = std::fs::read_to_string(&spike_file).unwrap();
|
||
|
|
assert!(contents.starts_with("---\nname: \"My Spike\"\n---"));
|
||
|
|
assert!(contents.contains("## Question\n\n- TBD\n"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_record_tests_missing_story_id() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_record_tests(
|
||
|
|
&json!({"unit": [], "integration": []}),
|
||
|
|
&ctx,
|
||
|
|
);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("story_id"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_record_tests_invalid_unit_type_returns_error() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_record_tests(
|
||
|
|
&json!({
|
||
|
|
"story_id": "1_test",
|
||
|
|
"unit": "not_an_array",
|
||
|
|
"integration": []
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
);
|
||
|
|
assert!(result.is_err());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_ensure_acceptance_missing_story_id() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_ensure_acceptance(&json!({}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("story_id"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_validate_stories_with_valid_story() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
|
||
|
|
fs::create_dir_all(¤t_dir).unwrap();
|
||
|
|
fs::write(
|
||
|
|
current_dir.join("1_test.md"),
|
||
|
|
"---\nname: \"Valid Story\"\n---\n## AC\n- [ ] First\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_validate_stories(&ctx).unwrap();
|
||
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||
|
|
assert_eq!(parsed.len(), 1);
|
||
|
|
assert_eq!(parsed[0]["valid"], true);
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_validate_stories_with_invalid_front_matter() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
|
||
|
|
fs::create_dir_all(¤t_dir).unwrap();
|
||
|
|
fs::write(
|
||
|
|
current_dir.join("1_test.md"),
|
||
|
|
"## No front matter at all\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_validate_stories(&ctx).unwrap();
|
||
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||
|
|
assert!(!parsed.is_empty());
|
||
|
|
assert_eq!(parsed[0]["valid"], false);
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn record_tests_persists_to_story_file() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let current = tmp.path().join(".story_kit/work/2_current");
|
||
|
|
fs::create_dir_all(¤t).unwrap();
|
||
|
|
fs::write(current.join("1_story_persist.md"), "---\nname: Persist\n---\n# Story\n").unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
tool_record_tests(
|
||
|
|
&json!({
|
||
|
|
"story_id": "1_story_persist",
|
||
|
|
"unit": [{"name": "u1", "status": "pass"}],
|
||
|
|
"integration": []
|
||
|
|
}),
|
||
|
|
&ctx,
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let contents = fs::read_to_string(current.join("1_story_persist.md")).unwrap();
|
||
|
|
assert!(contents.contains("## Test Results"), "file should have Test Results section");
|
||
|
|
assert!(contents.contains("story-kit-test-results:"), "file should have JSON marker");
|
||
|
|
assert!(contents.contains("u1"), "file should contain test name");
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn ensure_acceptance_reads_from_file_when_not_in_memory() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let current = tmp.path().join(".story_kit/work/2_current");
|
||
|
|
fs::create_dir_all(¤t).unwrap();
|
||
|
|
|
||
|
|
// Write a story file with a pre-populated Test Results section (simulating a restart)
|
||
|
|
let story_content = "---\nname: Persist\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"pass\",\"details\":null}],\"integration\":[{\"name\":\"i1\",\"status\":\"pass\",\"details\":null}]} -->\n";
|
||
|
|
fs::write(current.join("2_story_file_only.md"), story_content).unwrap();
|
||
|
|
|
||
|
|
// Use a fresh context (empty in-memory state, simulating a restart)
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
|
||
|
|
// ensure_acceptance should read from file and succeed
|
||
|
|
let result = tool_ensure_acceptance(&json!({"story_id": "2_story_file_only"}), &ctx);
|
||
|
|
assert!(result.is_ok(), "should accept based on file data, got: {:?}", result);
|
||
|
|
assert!(result.unwrap().contains("All gates pass"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn ensure_acceptance_file_with_failures_still_blocks() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let current = tmp.path().join(".story_kit/work/2_current");
|
||
|
|
fs::create_dir_all(¤t).unwrap();
|
||
|
|
|
||
|
|
let story_content = "---\nname: Fail\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"fail\",\"details\":\"error\"}],\"integration\":[]} -->\n";
|
||
|
|
fs::write(current.join("3_story_fail.md"), story_content).unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_ensure_acceptance(&json!({"story_id": "3_story_fail"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("blocked"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_accept_story_missing_story_id() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_accept_story(&json!({}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("story_id"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_accept_story_nonexistent_story_returns_error() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
setup_git_repo_in(tmp.path());
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
// No story file in current/ — should fail
|
||
|
|
let result = tool_accept_story(&json!({"story_id": "99_nonexistent"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Bug 226: accept_story must refuse when the feature branch has unmerged code.
|
||
|
|
#[test]
|
||
|
|
fn tool_accept_story_refuses_when_feature_branch_has_unmerged_code() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
setup_git_repo_in(tmp.path());
|
||
|
|
|
||
|
|
// Create a feature branch with code changes.
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["checkout", "-b", "feature/story-50_story_test"])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::fs::write(tmp.path().join("feature.rs"), "fn main() {}").unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["add", "."])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["commit", "-m", "add feature"])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["checkout", "master"])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
// Create story file in current/ so move_story_to_archived would work.
|
||
|
|
let current_dir = tmp.path().join(".story_kit/work/2_current");
|
||
|
|
std::fs::create_dir_all(¤t_dir).unwrap();
|
||
|
|
std::fs::write(
|
||
|
|
current_dir.join("50_story_test.md"),
|
||
|
|
"---\nname: Test\n---\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result =
|
||
|
|
tool_accept_story(&json!({"story_id": "50_story_test"}), &ctx);
|
||
|
|
assert!(result.is_err(), "should refuse when feature branch has unmerged code");
|
||
|
|
let err = result.unwrap_err();
|
||
|
|
assert!(
|
||
|
|
err.contains("unmerged"),
|
||
|
|
"error should mention unmerged changes: {err}"
|
||
|
|
);
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Bug 226: accept_story succeeds when no feature branch exists (e.g. manual stories).
|
||
|
|
#[test]
|
||
|
|
fn tool_accept_story_succeeds_when_no_feature_branch() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
setup_git_repo_in(tmp.path());
|
||
|
|
|
||
|
|
// Create story file in current/ (no feature branch).
|
||
|
|
let current_dir = tmp.path().join(".story_kit/work/2_current");
|
||
|
|
std::fs::create_dir_all(¤t_dir).unwrap();
|
||
|
|
std::fs::write(
|
||
|
|
current_dir.join("51_story_no_branch.md"),
|
||
|
|
"---\nname: No Branch\n---\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result =
|
||
|
|
tool_accept_story(&json!({"story_id": "51_story_no_branch"}), &ctx);
|
||
|
|
assert!(result.is_ok(), "should succeed when no feature branch: {result:?}");
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_check_criterion_missing_story_id() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_check_criterion(&json!({"criterion_index": 0}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("story_id"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_check_criterion_missing_criterion_index() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_check_criterion(&json!({"story_id": "1_test"}), &ctx);
|
||
|
|
assert!(result.is_err());
|
||
|
|
assert!(result.unwrap_err().contains("criterion_index"));
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn tool_check_criterion_marks_unchecked_item() {
|
||
|
|
let tmp = tempfile::tempdir().unwrap();
|
||
|
|
setup_git_repo_in(tmp.path());
|
||
|
|
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
|
||
|
|
fs::create_dir_all(¤t_dir).unwrap();
|
||
|
|
fs::write(
|
||
|
|
current_dir.join("1_test.md"),
|
||
|
|
"---\nname: Test\n---\n## AC\n- [ ] First criterion\n- [x] Already done\n",
|
||
|
|
)
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["add", "."])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
std::process::Command::new("git")
|
||
|
|
.args(["commit", "-m", "add story"])
|
||
|
|
.current_dir(tmp.path())
|
||
|
|
.output()
|
||
|
|
.unwrap();
|
||
|
|
|
||
|
|
let ctx = test_ctx(tmp.path());
|
||
|
|
let result = tool_check_criterion(
|
||
|
|
&json!({"story_id": "1_test", "criterion_index": 0}),
|
||
|
|
&ctx,
|
||
|
|
);
|
||
|
|
assert!(result.is_ok(), "Expected ok: {result:?}");
|
||
|
|
assert!(result.unwrap().contains("Criterion 0 checked"));
|
||
|
|
}
|
||
|
|
}
|