Files
huskies/server/src/http/mcp/story_tools.rs
T

1750 lines
64 KiB
Rust

use crate::agents::{
close_bug_to_archive, feature_branch_has_unmerged_changes, move_story_to_done,
};
use crate::http::context::AppContext;
use crate::http::workflow::{
add_criterion_to_file, check_criterion_in_file, create_bug_file, create_refactor_file,
create_spike_file, create_story_file, list_bug_files, list_refactor_files, load_pipeline_state,
load_upcoming_stories, update_story_in_file, validate_story_dirs,
};
use crate::io::story_metadata::{check_archived_deps, check_archived_deps_from_list, parse_front_matter, parse_unchecked_todos};
use crate::slog_warn;
use crate::workflow::{TestCaseResult, TestStatus, evaluate_acceptance_with_coverage};
use serde_json::{Value, json};
use std::collections::HashMap;
use std::fs;
pub(super) fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
let name = args
.get("name")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: name")?;
let user_story = args.get("user_story").and_then(|v| v.as_str());
let description = args.get("description").and_then(|v| v.as_str());
let acceptance_criteria: Option<Vec<String>> = args
.get("acceptance_criteria")
.and_then(|v| serde_json::from_value(v.clone()).ok());
let depends_on: Option<Vec<u32>> = args
.get("depends_on")
.and_then(|v| serde_json::from_value(v.clone()).ok());
// Spike 61: write the file only — the filesystem watcher detects the new
// .md file in work/1_backlog/ and auto-commits with a deterministic message.
let commit = false;
let root = ctx.state.get_project_root()?;
let story_id = create_story_file(
&root,
name,
user_story,
description,
acceptance_criteria.as_deref(),
depends_on.as_deref(),
commit,
)?;
// Bug 503: warn at creation time if any depends_on points at an already-archived story.
// Archived = satisfied semantics: the dep will resolve immediately on the next promotion
// tick, which is surprising if the archived story was abandoned rather than cleanly done.
let archived_deps = depends_on
.as_deref()
.map(|deps| check_archived_deps_from_list(&root, deps))
.unwrap_or_default();
if !archived_deps.is_empty() {
slog_warn!(
"[create-story] Story '{story_id}' depends_on {archived_deps:?} which \
are already in 6_archived. The dep will be treated as satisfied on the \
next promotion tick. If these deps were abandoned (not cleanly completed), \
consider removing the depends_on or keeping the story in backlog manually."
);
return Ok(format!(
"Created story: {story_id}\n\n\
WARNING: depends_on {archived_deps:?} point at stories already in \
6_archived. These deps are treated as satisfied (archived = satisfied \
semantics), so this story may be auto-promoted from backlog immediately. \
If the archived deps were abandoned rather than completed, remove the \
depends_on or move the story back to backlog manually after promotion."
));
}
Ok(format!("Created story: {story_id}"))
}
/// Purge a story from the in-memory CRDT by writing a tombstone op (story 521).
///
/// This is the eviction primitive for the four-state-machine drift problem
/// we hit on 2026-04-09 — when a story gets stuck in the running server's
/// in-memory CRDT and can't be cleared by sqlite deletes alone (because the
/// in-memory state outlives any pipeline_items / crdt_ops manipulation),
/// this tool writes a proper CRDT delete op via `crdt_state::evict_item`.
///
/// The tombstone op:
/// - Marks the in-memory CRDT item as `is_deleted = true` immediately
/// (so subsequent `read_all_items` / `read_item` calls skip it)
/// - Is persisted to `crdt_ops` so the eviction survives a server restart
/// - Drops the in-memory `CONTENT_STORE` entry for the story
///
/// This tool does NOT touch: running agents, worktrees, the `pipeline_items`
/// shadow table, `timers.json`, or filesystem shadows. Compose with
/// `stop_agent`, `remove_worktree`, etc. as needed for a full purge — or
/// see story 514 (delete_story full cleanup) for a future "do it all" tool.
pub(super) fn tool_purge_story(args: &Value, _ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
crate::crdt_state::evict_item(story_id)?;
Ok(format!(
"Evicted '{story_id}' from in-memory CRDT (tombstone op persisted to crdt_ops; CONTENT_STORE entry dropped)."
))
}
pub(super) fn tool_validate_stories(ctx: &AppContext) -> Result<String, String> {
let root = ctx.state.get_project_root()?;
let results = validate_story_dirs(&root)?;
serde_json::to_string_pretty(&json!(
results
.iter()
.map(|r| json!({
"story_id": r.story_id,
"valid": r.valid,
"error": r.error,
}))
.collect::<Vec<_>>()
))
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) fn tool_list_upcoming(ctx: &AppContext) -> Result<String, String> {
let stories = load_upcoming_stories(ctx)?;
serde_json::to_string_pretty(&json!(
stories
.iter()
.map(|s| json!({
"story_id": s.story_id,
"name": s.name,
"error": s.error,
}))
.collect::<Vec<_>>()
))
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, String> {
let state = load_pipeline_state(ctx)?;
fn map_items(items: &[crate::http::workflow::UpcomingStory], stage: &str) -> Vec<Value> {
items
.iter()
.map(|s| {
let mut item = json!({
"story_id": s.story_id,
"name": s.name,
"stage": stage,
"agent": s.agent.as_ref().map(|a| json!({
"agent_name": a.agent_name,
"model": a.model,
"status": a.status,
})),
});
// Include blocked/retry_count when present so callers can
// identify stories stuck in the pipeline.
if let Some(true) = s.blocked {
item["blocked"] = json!(true);
}
if let Some(rc) = s.retry_count {
item["retry_count"] = json!(rc);
}
if let Some(ref mf) = s.merge_failure {
item["merge_failure"] = json!(mf);
}
item
})
.collect()
}
let mut active: Vec<Value> = Vec::new();
active.extend(map_items(&state.current, "current"));
active.extend(map_items(&state.qa, "qa"));
active.extend(map_items(&state.merge, "merge"));
active.extend(map_items(&state.done, "done"));
let backlog: Vec<Value> = state
.backlog
.iter()
.map(|s| json!({ "story_id": s.story_id, "name": s.name }))
.collect();
serde_json::to_string_pretty(&json!({
"active": active,
"backlog": backlog,
"backlog_count": backlog.len(),
}))
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let root = ctx.state.get_project_root()?;
// Read from DB content store, falling back to filesystem.
let contents = crate::http::workflow::read_story_content(&root, story_id)
.map_err(|_| format!("Story file not found: {story_id}.md"))?;
let story_name = parse_front_matter(&contents).ok().and_then(|m| m.name);
let todos = parse_unchecked_todos(&contents);
serde_json::to_string_pretty(&json!({
"story_id": story_id,
"story_name": story_name,
"todos": todos,
}))
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let unit = parse_test_cases(args.get("unit"))?;
let integration = parse_test_cases(args.get("integration"))?;
let mut workflow = ctx
.workflow
.lock()
.map_err(|e| format!("Lock error: {e}"))?;
workflow.record_test_results_validated(story_id.to_string(), unit, integration)?;
// Persist to story file (best-effort — file write errors are warnings, not failures).
if let Ok(project_root) = ctx.state.get_project_root()
&& let Some(results) = workflow.results.get(story_id)
&& let Err(e) = crate::http::workflow::write_test_results_to_story_file(
&project_root,
story_id,
results,
)
{
slog_warn!("[record_tests] Could not persist results to story file: {e}");
}
Ok("Test results recorded.".to_string())
}
pub(super) fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let workflow = ctx
.workflow
.lock()
.map_err(|e| format!("Lock error: {e}"))?;
// Use in-memory results if present; otherwise fall back to file-persisted results.
let file_results;
let results = if let Some(r) = workflow.results.get(story_id) {
r
} else {
let project_root = ctx.state.get_project_root().ok();
file_results = project_root.as_deref().and_then(|root| {
crate::http::workflow::read_test_results_from_story_file(root, story_id)
});
file_results.as_ref().map_or_else(
|| {
// No results anywhere — use empty default for the acceptance check
// (it will fail with "No test results recorded")
static EMPTY: std::sync::OnceLock<crate::workflow::StoryTestResults> =
std::sync::OnceLock::new();
EMPTY.get_or_init(Default::default)
},
|r| r,
)
};
let coverage = workflow.coverage.get(story_id);
let decision = evaluate_acceptance_with_coverage(results, coverage);
if decision.can_accept {
Ok("Story can be accepted. All gates pass.".to_string())
} else {
let mut parts = decision.reasons;
if let Some(w) = decision.warning {
parts.push(w);
}
Err(format!("Acceptance blocked: {}", parts.join("; ")))
}
}
pub(super) fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let project_root = ctx.agents.get_project_root(&ctx.state)?;
// Bug 226: Refuse to accept if the feature branch has unmerged code.
// The code must be squash-merged via merge_agent_work first.
if feature_branch_has_unmerged_changes(&project_root, story_id) {
return Err(format!(
"Cannot accept story '{story_id}': feature branch 'feature/story-{story_id}' \
has unmerged changes. Use merge_agent_work to squash-merge the code into \
master first."
));
}
move_story_to_done(&project_root, story_id)?;
ctx.agents.remove_agents_for_story(story_id);
Ok(format!(
"Story '{story_id}' accepted, moved to done/, and committed to master."
))
}
pub(super) fn tool_check_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let criterion_index = args
.get("criterion_index")
.and_then(|v| v.as_u64())
.ok_or("Missing required argument: criterion_index")? as usize;
let root = ctx.state.get_project_root()?;
check_criterion_in_file(&root, story_id, criterion_index)?;
Ok(format!(
"Criterion {criterion_index} checked for story '{story_id}'. Committed to master."
))
}
pub(super) fn tool_add_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let criterion = args
.get("criterion")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: criterion")?;
let root = ctx.state.get_project_root()?;
add_criterion_to_file(&root, story_id, criterion)?;
Ok(format!(
"Added criterion to story '{story_id}': - [ ] {criterion}"
))
}
pub(super) fn tool_update_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let user_story = args.get("user_story").and_then(|v| v.as_str());
let description = args.get("description").and_then(|v| v.as_str());
// Collect front matter fields: explicit `agent` param + arbitrary `front_matter` object.
// Values are passed as serde_json::Value so native booleans, numbers, and arrays are
// preserved and encoded correctly as unquoted YAML by update_story_in_file.
let mut front_matter: HashMap<String, Value> = HashMap::new();
if let Some(agent) = args.get("agent").and_then(|v| v.as_str()) {
front_matter.insert("agent".to_string(), Value::String(agent.to_string()));
}
if let Some(obj) = args.get("front_matter").and_then(|v| v.as_object()) {
for (k, v) in obj {
front_matter.insert(k.clone(), v.clone());
}
}
let front_matter_opt = if front_matter.is_empty() {
None
} else {
Some(&front_matter)
};
let root = ctx.state.get_project_root()?;
update_story_in_file(&root, story_id, user_story, description, front_matter_opt)?;
// Bug 503: warn if any depends_on in the (now updated) story points at an archived story.
let stage = crate::pipeline_state::read_typed(story_id)
.ok()
.flatten()
.map(|i| i.stage.dir_name().to_string())
.unwrap_or_else(|| "1_backlog".to_string());
let archived_deps = check_archived_deps(&root, &stage, story_id);
if !archived_deps.is_empty() {
slog_warn!(
"[update-story] Story '{story_id}' depends_on {archived_deps:?} which \
are already in 6_archived. The dep will be treated as satisfied on the \
next promotion tick. If these deps were abandoned (not cleanly completed), \
consider removing the depends_on or keeping the story in backlog manually."
);
return Ok(format!(
"Updated story '{story_id}'.\n\n\
WARNING: depends_on {archived_deps:?} point at stories already in \
6_archived. These deps are treated as satisfied (archived = satisfied \
semantics), so this story may be auto-promoted from backlog immediately. \
If the archived deps were abandoned rather than completed, remove the \
depends_on or move the story back to backlog manually after promotion."
));
}
Ok(format!("Updated story '{story_id}'."))
}
pub(super) fn tool_create_spike(args: &Value, ctx: &AppContext) -> Result<String, String> {
let name = args
.get("name")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: name")?;
let description = args.get("description").and_then(|v| v.as_str());
let root = ctx.state.get_project_root()?;
let spike_id = create_spike_file(&root, name, description)?;
Ok(format!("Created spike: {spike_id}"))
}
pub(super) fn tool_create_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
let name = args
.get("name")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: name")?;
let description = args
.get("description")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: description")?;
let steps_to_reproduce = args
.get("steps_to_reproduce")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: steps_to_reproduce")?;
let actual_result = args
.get("actual_result")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: actual_result")?;
let expected_result = args
.get("expected_result")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: expected_result")?;
let acceptance_criteria: Option<Vec<String>> = args
.get("acceptance_criteria")
.and_then(|v| serde_json::from_value(v.clone()).ok());
let root = ctx.state.get_project_root()?;
let bug_id = create_bug_file(
&root,
name,
description,
steps_to_reproduce,
actual_result,
expected_result,
acceptance_criteria.as_deref(),
)?;
Ok(format!("Created bug: {bug_id}"))
}
pub(super) fn tool_list_bugs(ctx: &AppContext) -> Result<String, String> {
let root = ctx.state.get_project_root()?;
let bugs = list_bug_files(&root)?;
serde_json::to_string_pretty(&json!(
bugs.iter()
.map(|(id, name)| json!({ "bug_id": id, "name": name }))
.collect::<Vec<_>>()
))
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) fn tool_close_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
let bug_id = args
.get("bug_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: bug_id")?;
let root = ctx.agents.get_project_root(&ctx.state)?;
close_bug_to_archive(&root, bug_id)?;
ctx.agents.remove_agents_for_story(bug_id);
Ok(format!(
"Bug '{bug_id}' closed, moved to bugs/archive/, and committed to master."
))
}
pub(super) fn tool_unblock_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let root = ctx.state.get_project_root()?;
// Extract the numeric prefix (e.g. "42" from "42_story_foo")
let story_number = story_id
.split('_')
.next()
.filter(|s| !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()))
.ok_or_else(|| format!("Invalid story_id format: '{story_id}'. Expected a numeric prefix (e.g. '42_story_foo')."))?;
Ok(crate::chat::commands::unblock::unblock_by_number(&root, story_number))
}
pub(super) async fn tool_delete_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let project_root = ctx.agents.get_project_root(&ctx.state)?;
let mut failed_steps: Vec<String> = Vec::new();
// 0. Cancel any pending rate-limit retry timers for this story (bug 514).
// Must happen before stopping agents so the tick loop cannot re-spawn
// an agent after we tear everything else down.
let timer_removed = ctx.timer_store.remove(story_id);
if timer_removed {
slog_warn!("[delete_story] Cancelled pending timer for '{story_id}'");
} else {
slog_warn!("[delete_story] No pending timer found for '{story_id}'");
}
// 1. Stop any running agents for this story (best-effort).
if let Ok(agents) = ctx.agents.list_agents() {
for agent in agents.iter().filter(|a| a.story_id == story_id) {
match ctx
.agents
.stop_agent(&project_root, story_id, &agent.agent_name)
.await
{
Ok(()) => {
slog_warn!(
"[delete_story] Stopped agent '{}' for '{story_id}'",
agent.agent_name
);
}
Err(e) => {
slog_warn!(
"[delete_story] Failed to stop agent '{}' for '{story_id}': {e}",
agent.agent_name
);
failed_steps.push(format!("stop_agent({}): {e}", agent.agent_name));
}
}
}
}
// 2. Remove agent pool entries.
let removed_count = ctx.agents.remove_agents_for_story(story_id);
slog_warn!("[delete_story] Removed {removed_count} agent pool entries for '{story_id}'");
// 3. Remove worktree (best-effort).
if let Ok(config) = crate::config::ProjectConfig::load(&project_root) {
match crate::worktree::remove_worktree_by_story_id(&project_root, story_id, &config).await
{
Ok(()) => slog_warn!("[delete_story] Removed worktree for '{story_id}'"),
Err(e) => slog_warn!("[delete_story] Worktree removal for '{story_id}': {e}"),
}
}
// 4. Write a CRDT tombstone op so the story is evicted from the in-memory
// state machine and the deletion is persisted to crdt_ops (survives
// restart). Best-effort: legacy filesystem-only stories may not have a
// CRDT entry, so a "not found" error is expected and non-fatal.
match crate::crdt_state::evict_item(story_id) {
Ok(()) => {
slog_warn!(
"[delete_story] Evicted '{story_id}' from CRDT (tombstone persisted to crdt_ops)"
);
}
Err(e) => {
slog_warn!("[delete_story] CRDT eviction for '{story_id}': {e}");
}
}
// 5. Delete from database content store and shadow table.
let found_in_db = crate::db::read_content(story_id).is_some()
|| crate::pipeline_state::read_typed(story_id).ok().flatten().is_some();
crate::db::delete_item(story_id);
slog_warn!("[delete_story] Deleted '{story_id}' from content store / shadow table");
// 6. Remove the filesystem shadow file from work/N_stage/.
let sk = project_root.join(".huskies").join("work");
let stage_dirs = [
"1_backlog",
"2_current",
"3_qa",
"4_merge",
"5_done",
"6_archived",
];
let mut deleted_from_fs = false;
for stage in &stage_dirs {
let path = sk.join(stage).join(format!("{story_id}.md"));
if path.exists() {
match fs::remove_file(&path) {
Ok(()) => {
slog_warn!(
"[delete_story] Deleted filesystem shadow '{story_id}' from work/{stage}/"
);
deleted_from_fs = true;
}
Err(e) => {
slog_warn!("[delete_story] Failed to delete filesystem shadow '{story_id}' from work/{stage}/: {e}");
failed_steps.push(format!("delete_filesystem({stage}): {e}"));
}
}
break;
}
}
if !found_in_db && !deleted_from_fs && !timer_removed {
return Err(format!(
"Story '{story_id}' not found in any pipeline stage."
));
}
if !failed_steps.is_empty() {
return Err(format!(
"Story '{story_id}' partially deleted. Failed steps: {}.",
failed_steps.join("; ")
));
}
Ok(format!("Story '{story_id}' deleted from pipeline."))
}
pub(super) fn tool_create_refactor(args: &Value, ctx: &AppContext) -> Result<String, String> {
let name = args
.get("name")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: name")?;
let description = args.get("description").and_then(|v| v.as_str());
let acceptance_criteria: Option<Vec<String>> = args
.get("acceptance_criteria")
.and_then(|v| serde_json::from_value(v.clone()).ok());
let root = ctx.state.get_project_root()?;
let refactor_id =
create_refactor_file(&root, name, description, acceptance_criteria.as_deref())?;
Ok(format!("Created refactor: {refactor_id}"))
}
pub(super) fn tool_list_refactors(ctx: &AppContext) -> Result<String, String> {
let root = ctx.state.get_project_root()?;
let refactors = list_refactor_files(&root)?;
serde_json::to_string_pretty(&json!(
refactors
.iter()
.map(|(id, name)| json!({ "refactor_id": id, "name": name }))
.collect::<Vec<_>>()
))
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) fn parse_test_cases(value: Option<&Value>) -> Result<Vec<TestCaseResult>, String> {
let arr = match value {
Some(Value::Array(a)) => a,
Some(Value::Null) | None => return Ok(Vec::new()),
_ => return Err("Expected array for test cases".to_string()),
};
arr.iter()
.map(|item| {
let name = item
.get("name")
.and_then(|v| v.as_str())
.ok_or("Test case missing 'name'")?
.to_string();
let status_str = item
.get("status")
.and_then(|v| v.as_str())
.ok_or("Test case missing 'status'")?;
let status = match status_str {
"pass" => TestStatus::Pass,
"fail" => TestStatus::Fail,
other => {
return Err(format!(
"Invalid test status '{other}'. Use 'pass' or 'fail'."
));
}
};
let details = item
.get("details")
.and_then(|v| v.as_str())
.map(String::from);
Ok(TestCaseResult {
name,
status,
details,
})
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::http::test_helpers::test_ctx;
#[test]
fn parse_test_cases_empty() {
let result = parse_test_cases(None).unwrap();
assert!(result.is_empty());
}
#[test]
fn parse_test_cases_valid() {
let input = json!([
{"name": "test1", "status": "pass"},
{"name": "test2", "status": "fail", "details": "assertion failed"}
]);
let result = parse_test_cases(Some(&input)).unwrap();
assert_eq!(result.len(), 2);
assert_eq!(result[0].status, TestStatus::Pass);
assert_eq!(result[1].status, TestStatus::Fail);
assert_eq!(result[1].details, Some("assertion failed".to_string()));
}
#[test]
fn parse_test_cases_invalid_status() {
let input = json!([{"name": "t", "status": "maybe"}]);
assert!(parse_test_cases(Some(&input)).is_err());
}
#[test]
fn parse_test_cases_null_value_returns_empty() {
let null_val = json!(null);
let result = parse_test_cases(Some(&null_val)).unwrap();
assert!(result.is_empty());
}
#[test]
fn parse_test_cases_non_array_returns_error() {
let obj = json!({"invalid": "input"});
let result = parse_test_cases(Some(&obj));
assert!(result.is_err());
assert!(result.unwrap_err().contains("Expected array"));
}
#[test]
fn parse_test_cases_missing_name_returns_error() {
let input = json!([{"status": "pass"}]);
let result = parse_test_cases(Some(&input));
assert!(result.is_err());
assert!(result.unwrap_err().contains("name"));
}
#[test]
fn parse_test_cases_missing_status_returns_error() {
let input = json!([{"name": "test1"}]);
let result = parse_test_cases(Some(&input));
assert!(result.is_err());
assert!(result.unwrap_err().contains("status"));
}
#[test]
fn tool_validate_stories_empty_project() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_validate_stories(&ctx).unwrap();
// CRDT is global; other tests may have inserted items.
// Just verify it parses without error.
let _parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
}
#[test]
fn tool_create_story_and_list_upcoming() {
let tmp = tempfile::tempdir().unwrap();
// No git repo needed: spike 61 — create_story just writes the file;
// the filesystem watcher handles the commit asynchronously.
let ctx = test_ctx(tmp.path());
let result = tool_create_story(
&json!({"name": "Test Story", "acceptance_criteria": ["AC1", "AC2"]}),
&ctx,
)
.unwrap();
assert!(result.contains("Created story:"));
// List should return it (CRDT is global, so filter for our story)
let list = tool_list_upcoming(&ctx).unwrap();
let parsed: Vec<Value> = serde_json::from_str(&list).unwrap();
assert!(
parsed.iter().any(|s| s["name"] == "Test Story"),
"expected 'Test Story' in upcoming list: {parsed:?}"
);
}
#[test]
fn tool_create_story_rejects_empty_name() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_story(&json!({"name": "!!!"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("alphanumeric"));
}
#[test]
fn tool_create_story_missing_name() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_story(&json!({}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("Missing required argument"));
}
// Regression test for bug 509: description was silently dropped.
#[test]
fn tool_create_story_description_is_written_to_file() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_story(
&json!({
"name": "Story With Description",
"description": "This is the background context."
}),
&ctx,
)
.unwrap();
assert!(result.contains("Created story:"));
let story_id = result.trim_start_matches("Created story: ").trim().to_string();
let content = crate::db::read_content(&story_id).expect("story content should exist");
assert!(
content.contains("## Description"),
"Description section missing from story: {content}"
);
assert!(
content.contains("This is the background context."),
"Description text missing from story: {content}"
);
}
#[test]
fn tool_get_pipeline_status_returns_structured_response() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
for (stage, id, name) in &[
("1_backlog", "9910_story_upcoming", "Upcoming Story"),
("2_current", "9920_story_current", "Current Story"),
("3_qa", "9930_story_qa", "QA Story"),
("4_merge", "9940_story_merge", "Merge Story"),
("5_done", "9950_story_done", "Done Story"),
] {
crate::db::write_item_with_content(
id,
stage,
&format!("---\nname: \"{name}\"\n---\n"),
);
}
let ctx = test_ctx(tmp.path());
let result = tool_get_pipeline_status(&ctx).unwrap();
let parsed: Value = serde_json::from_str(&result).unwrap();
// Active stages include current, qa, merge, done
let active = parsed["active"].as_array().unwrap();
let stages: Vec<&str> = active
.iter()
.map(|i| i["stage"].as_str().unwrap())
.collect();
assert!(stages.contains(&"current"));
assert!(stages.contains(&"qa"));
assert!(stages.contains(&"merge"));
assert!(stages.contains(&"done"));
// Backlog should contain our item
let backlog = parsed["backlog"].as_array().unwrap();
assert!(
backlog.iter().any(|b| b["story_id"] == "9910_story_upcoming"),
"expected 9910_story_upcoming in backlog: {backlog:?}"
);
}
#[test]
fn tool_get_pipeline_status_includes_agent_assignment() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9921_story_active",
"2_current",
"---\nname: \"Active Story\"\n---\n",
);
let ctx = test_ctx(tmp.path());
ctx.agents.inject_test_agent(
"9921_story_active",
"coder-1",
crate::agents::AgentStatus::Running,
);
let result = tool_get_pipeline_status(&ctx).unwrap();
let parsed: Value = serde_json::from_str(&result).unwrap();
let active = parsed["active"].as_array().unwrap();
let item = active.iter().find(|i| i["story_id"] == "9921_story_active")
.expect("expected 9921_story_active in active items");
assert_eq!(item["stage"], "current");
assert!(!item["agent"].is_null(), "agent should be present");
assert_eq!(item["agent"]["agent_name"], "coder-1");
assert_eq!(item["agent"]["status"], "running");
}
#[test]
fn tool_get_story_todos_missing_file() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_get_story_todos(&json!({"story_id": "99_nonexistent"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("not found"));
}
#[test]
fn tool_get_story_todos_returns_unchecked() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9901_test",
"2_current",
"---\nname: Test\n---\n## AC\n- [ ] First\n- [x] Done\n- [ ] Second\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_get_story_todos(&json!({"story_id": "9901_test"}), &ctx).unwrap();
let parsed: Value = serde_json::from_str(&result).unwrap();
assert_eq!(parsed["todos"].as_array().unwrap().len(), 2);
assert_eq!(parsed["story_name"], "Test");
}
#[test]
fn tool_record_tests_and_ensure_acceptance() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
// Record passing tests
let result = tool_record_tests(
&json!({
"story_id": "1_test",
"unit": [{"name": "u1", "status": "pass"}],
"integration": [{"name": "i1", "status": "pass"}]
}),
&ctx,
)
.unwrap();
assert!(result.contains("recorded"));
// Should be acceptable
let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx).unwrap();
assert!(result.contains("All gates pass"));
}
#[test]
fn tool_ensure_acceptance_blocks_on_failures() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
tool_record_tests(
&json!({
"story_id": "1_test",
"unit": [{"name": "u1", "status": "fail"}],
"integration": []
}),
&ctx,
)
.unwrap();
let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("blocked"));
}
fn setup_git_repo_in(dir: &std::path::Path) {
std::process::Command::new("git")
.args(["init"])
.current_dir(dir)
.output()
.unwrap();
std::process::Command::new("git")
.args(["config", "user.email", "test@test.com"])
.current_dir(dir)
.output()
.unwrap();
std::process::Command::new("git")
.args(["config", "user.name", "Test"])
.current_dir(dir)
.output()
.unwrap();
std::process::Command::new("git")
.args(["commit", "--allow-empty", "-m", "init"])
.current_dir(dir)
.output()
.unwrap();
}
#[test]
fn create_bug_in_tools_list() {
use super::super::handle_tools_list;
let resp = handle_tools_list(Some(json!(1)));
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
let tool = tools.iter().find(|t| t["name"] == "create_bug");
assert!(tool.is_some(), "create_bug missing from tools list");
let t = tool.unwrap();
let desc = t["description"].as_str().unwrap();
assert!(
desc.contains("work/1_backlog/"),
"create_bug description should reference work/1_backlog/, got: {desc}"
);
assert!(
!desc.contains(".huskies/bugs"),
"create_bug description should not reference nonexistent .huskies/bugs/, got: {desc}"
);
let required = t["inputSchema"]["required"].as_array().unwrap();
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
assert!(req_names.contains(&"name"));
assert!(req_names.contains(&"description"));
assert!(req_names.contains(&"steps_to_reproduce"));
assert!(req_names.contains(&"actual_result"));
assert!(req_names.contains(&"expected_result"));
}
#[test]
fn list_bugs_in_tools_list() {
use super::super::handle_tools_list;
let resp = handle_tools_list(Some(json!(1)));
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
let tool = tools.iter().find(|t| t["name"] == "list_bugs");
assert!(tool.is_some(), "list_bugs missing from tools list");
let t = tool.unwrap();
let desc = t["description"].as_str().unwrap();
assert!(
desc.contains("work/1_backlog/"),
"list_bugs description should reference work/1_backlog/, got: {desc}"
);
assert!(
!desc.contains(".huskies/bugs"),
"list_bugs description should not reference nonexistent .huskies/bugs/, got: {desc}"
);
}
#[test]
fn close_bug_in_tools_list() {
use super::super::handle_tools_list;
let resp = handle_tools_list(Some(json!(1)));
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
let tool = tools.iter().find(|t| t["name"] == "close_bug");
assert!(tool.is_some(), "close_bug missing from tools list");
let t = tool.unwrap();
let desc = t["description"].as_str().unwrap();
assert!(
!desc.contains(".huskies/bugs"),
"close_bug description should not reference nonexistent .huskies/bugs/, got: {desc}"
);
assert!(
desc.contains("work/5_done/"),
"close_bug description should reference work/5_done/, got: {desc}"
);
let required = t["inputSchema"]["required"].as_array().unwrap();
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
assert!(req_names.contains(&"bug_id"));
}
#[test]
fn tool_create_bug_missing_name() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_bug(
&json!({
"description": "d",
"steps_to_reproduce": "s",
"actual_result": "a",
"expected_result": "e"
}),
&ctx,
);
assert!(result.is_err());
assert!(result.unwrap_err().contains("name"));
}
#[test]
fn tool_create_bug_missing_description() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_bug(
&json!({
"name": "Bug",
"steps_to_reproduce": "s",
"actual_result": "a",
"expected_result": "e"
}),
&ctx,
);
assert!(result.is_err());
assert!(result.unwrap_err().contains("description"));
}
#[test]
fn tool_create_bug_creates_file_and_commits() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let ctx = test_ctx(tmp.path());
let result = tool_create_bug(
&json!({
"name": "Login Crash",
"description": "The app crashes on login.",
"steps_to_reproduce": "1. Open app\n2. Click login",
"actual_result": "500 error",
"expected_result": "Successful login"
}),
&ctx,
)
.unwrap();
assert!(result.contains("_bug_login_crash"), "result should contain bug ID: {result}");
// Extract the actual bug ID from the result message (format: "Created bug: <id>").
let bug_id = result.trim_start_matches("Created bug: ").trim();
// Bug content should exist in the CRDT content store.
assert!(
crate::db::read_content(bug_id).is_some(),
"expected bug content in CRDT for {bug_id}"
);
}
#[test]
fn tool_list_bugs_no_crash_on_empty_root() {
// list_bugs reads from the global CRDT, not the filesystem.
// Verify it returns valid JSON without panicking.
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_list_bugs(&ctx).unwrap();
// Verify result is valid JSON array (may contain bugs from
// the shared global CRDT populated by other tests).
let _parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
}
#[test]
fn tool_list_bugs_returns_open_bugs() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9902_bug_crash",
"1_backlog",
"---\nname: \"App Crash\"\n---\n# Bug 9902: App Crash\n",
);
crate::db::write_item_with_content(
"9903_bug_typo",
"1_backlog",
"---\nname: \"Typo in Header\"\n---\n# Bug 9903: Typo in Header\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_list_bugs(&ctx).unwrap();
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
assert!(
parsed.iter().any(|b| b["bug_id"] == "9902_bug_crash" && b["name"] == "App Crash"),
"expected 9902_bug_crash in bugs list: {parsed:?}"
);
assert!(
parsed.iter().any(|b| b["bug_id"] == "9903_bug_typo" && b["name"] == "Typo in Header"),
"expected 9903_bug_typo in bugs list: {parsed:?}"
);
}
#[test]
fn tool_close_bug_missing_bug_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_close_bug(&json!({}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("bug_id"));
}
#[test]
fn tool_close_bug_moves_to_archive() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let backlog_dir = tmp.path().join(".huskies/work/1_backlog");
std::fs::create_dir_all(&backlog_dir).unwrap();
let bug_file = backlog_dir.join("9901_bug_crash.md");
let content = "# Bug 9901: Crash\n";
std::fs::write(&bug_file, content).unwrap();
crate::db::ensure_content_store();
crate::db::write_content("9901_bug_crash", content);
// Stage the file so it's tracked
std::process::Command::new("git")
.args(["add", "."])
.current_dir(tmp.path())
.output()
.unwrap();
std::process::Command::new("git")
.args(["commit", "-m", "add bug"])
.current_dir(tmp.path())
.output()
.unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_close_bug(&json!({"bug_id": "9901_bug_crash"}), &ctx).unwrap();
assert!(result.contains("9901_bug_crash"));
assert!(
crate::db::read_content("9901_bug_crash").is_some(),
"content store should have the bug after close"
);
}
#[test]
fn create_spike_in_tools_list() {
use super::super::handle_tools_list;
let resp = handle_tools_list(Some(json!(1)));
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
let tool = tools.iter().find(|t| t["name"] == "create_spike");
assert!(tool.is_some(), "create_spike missing from tools list");
let t = tool.unwrap();
assert!(t["description"].is_string());
let required = t["inputSchema"]["required"].as_array().unwrap();
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
assert!(req_names.contains(&"name"));
// description is optional
assert!(!req_names.contains(&"description"));
}
#[test]
fn tool_create_spike_missing_name() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_spike(&json!({}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("name"));
}
#[test]
fn tool_create_spike_rejects_empty_name() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_spike(&json!({"name": "!!!"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("alphanumeric"));
}
#[test]
fn tool_create_spike_creates_file() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_spike(
&json!({"name": "Compare Encoders", "description": "Which encoder is fastest?"}),
&ctx,
)
.unwrap();
assert!(result.contains("_spike_compare_encoders"), "result should contain spike ID: {result}");
// Extract the actual spike ID from the result message (format: "Created spike: <id>").
let spike_id = result.trim_start_matches("Created spike: ").trim();
// Spike content should exist in the CRDT content store.
let contents = crate::db::read_content(spike_id)
.expect("expected spike content in CRDT");
assert!(contents.starts_with("---\nname: \"Compare Encoders\"\n---"));
assert!(contents.contains("Which encoder is fastest?"));
}
#[test]
fn tool_create_spike_creates_file_without_description() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_spike(&json!({"name": "My Spike"}), &ctx).unwrap();
assert!(result.contains("_spike_my_spike"), "result should contain spike ID: {result}");
// Extract the actual spike ID from the result message (format: "Created spike: <id>").
let spike_id = result.trim_start_matches("Created spike: ").trim();
// Spike content should exist in the CRDT content store.
let contents = crate::db::read_content(spike_id)
.expect("expected spike content in CRDT");
assert!(contents.starts_with("---\nname: \"My Spike\"\n---"));
assert!(contents.contains("## Question\n\n- TBD\n"));
}
#[test]
fn tool_record_tests_missing_story_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_record_tests(&json!({"unit": [], "integration": []}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("story_id"));
}
#[test]
fn tool_record_tests_invalid_unit_type_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_record_tests(
&json!({
"story_id": "1_test",
"unit": "not_an_array",
"integration": []
}),
&ctx,
);
assert!(result.is_err());
}
#[test]
fn tool_ensure_acceptance_missing_story_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_ensure_acceptance(&json!({}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("story_id"));
}
#[test]
fn tool_validate_stories_with_valid_story() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9907_test",
"2_current",
"---\nname: \"Valid Story\"\n---\n## AC\n- [ ] First\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_validate_stories(&ctx).unwrap();
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
let item = parsed.iter().find(|v| v["story_id"] == "9907_test")
.expect("expected 9907_test in validation results");
assert_eq!(item["valid"], true);
}
#[test]
fn tool_validate_stories_with_invalid_front_matter() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9908_test",
"2_current",
"## No front matter at all\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_validate_stories(&ctx).unwrap();
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
let item = parsed.iter().find(|v| v["story_id"] == "9908_test")
.expect("expected 9908_test in validation results");
assert_eq!(item["valid"], false);
}
#[test]
fn record_tests_persists_to_story_file() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9906_story_persist",
"2_current",
"---\nname: Persist\n---\n# Story\n",
);
let ctx = test_ctx(tmp.path());
tool_record_tests(
&json!({
"story_id": "9906_story_persist",
"unit": [{"name": "u1", "status": "pass"}],
"integration": []
}),
&ctx,
)
.unwrap();
let contents = crate::db::read_content("9906_story_persist")
.expect("story content should exist in CRDT");
assert!(
contents.contains("## Test Results"),
"content should have Test Results section"
);
assert!(
contents.contains("huskies-test-results:"),
"content should have JSON marker"
);
assert!(contents.contains("u1"), "content should contain test name");
}
#[test]
fn ensure_acceptance_reads_from_file_when_not_in_memory() {
let tmp = tempfile::tempdir().unwrap();
// Write story content to CRDT with a pre-populated Test Results section
let story_content = "---\nname: Persist\n---\n# Story\n\n## Test Results\n\n<!-- huskies-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"pass\",\"details\":null}],\"integration\":[{\"name\":\"i1\",\"status\":\"pass\",\"details\":null}]} -->\n";
crate::db::ensure_content_store();
crate::db::write_item_with_content("9905_story_file_only", "2_current", story_content);
let ctx = test_ctx(tmp.path());
// ensure_acceptance should read from content store and succeed
let result = tool_ensure_acceptance(&json!({"story_id": "9905_story_file_only"}), &ctx);
assert!(
result.is_ok(),
"should accept based on content store data, got: {:?}",
result
);
assert!(result.unwrap().contains("All gates pass"));
}
#[test]
fn ensure_acceptance_file_with_failures_still_blocks() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".huskies/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_content = "---\nname: Fail\n---\n# Story\n\n## Test Results\n\n<!-- huskies-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"fail\",\"details\":\"error\"}],\"integration\":[]} -->\n";
fs::write(current.join("3_story_fail.md"), story_content).unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_ensure_acceptance(&json!({"story_id": "3_story_fail"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("blocked"));
}
#[tokio::test]
async fn tool_delete_story_missing_story_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_delete_story(&json!({}), &ctx).await;
assert!(result.is_err());
assert!(result.unwrap_err().contains("story_id"));
}
#[tokio::test]
async fn tool_delete_story_not_found_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_delete_story(&json!({"story_id": "99_nonexistent"}), &ctx).await;
assert!(result.is_err());
assert!(
result
.unwrap_err()
.contains("not found in any pipeline stage")
);
}
#[tokio::test]
async fn tool_delete_story_deletes_file_from_backlog() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".huskies/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
let story_file = backlog.join("10_story_cleanup.md");
fs::write(&story_file, "---\nname: Cleanup\n---\n").unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_delete_story(&json!({"story_id": "10_story_cleanup"}), &ctx).await;
assert!(result.is_ok(), "expected ok: {result:?}");
assert!(!story_file.exists(), "story file should be deleted");
}
#[tokio::test]
async fn tool_delete_story_deletes_file_from_current() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".huskies/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_file = current.join("11_story_active.md");
fs::write(&story_file, "---\nname: Active\n---\n").unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_delete_story(&json!({"story_id": "11_story_active"}), &ctx).await;
assert!(result.is_ok(), "expected ok: {result:?}");
assert!(!story_file.exists(), "story file should be deleted");
}
#[test]
fn tool_accept_story_missing_story_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_accept_story(&json!({}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("story_id"));
}
#[test]
fn tool_accept_story_nonexistent_story_returns_error() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let ctx = test_ctx(tmp.path());
// No story file in current/ — should fail
let result = tool_accept_story(&json!({"story_id": "99_nonexistent"}), &ctx);
assert!(result.is_err());
}
/// Bug 226: accept_story must refuse when the feature branch has unmerged code.
#[test]
fn tool_accept_story_refuses_when_feature_branch_has_unmerged_code() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
// Create a feature branch with code changes.
std::process::Command::new("git")
.args(["checkout", "-b", "feature/story-50_story_test"])
.current_dir(tmp.path())
.output()
.unwrap();
std::fs::write(tmp.path().join("feature.rs"), "fn main() {}").unwrap();
std::process::Command::new("git")
.args(["add", "."])
.current_dir(tmp.path())
.output()
.unwrap();
std::process::Command::new("git")
.args(["commit", "-m", "add feature"])
.current_dir(tmp.path())
.output()
.unwrap();
std::process::Command::new("git")
.args(["checkout", "master"])
.current_dir(tmp.path())
.output()
.unwrap();
// Create story file in current/ so move_story_to_done would work.
let current_dir = tmp.path().join(".huskies/work/2_current");
std::fs::create_dir_all(&current_dir).unwrap();
std::fs::write(
current_dir.join("50_story_test.md"),
"---\nname: Test\n---\n",
)
.unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_accept_story(&json!({"story_id": "50_story_test"}), &ctx);
assert!(
result.is_err(),
"should refuse when feature branch has unmerged code"
);
let err = result.unwrap_err();
assert!(
err.contains("unmerged"),
"error should mention unmerged changes: {err}"
);
}
/// Bug 226: accept_story succeeds when no feature branch exists (e.g. manual stories).
#[test]
fn tool_accept_story_succeeds_when_no_feature_branch() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
// Create story file in current/ (no feature branch).
let current_dir = tmp.path().join(".huskies/work/2_current");
std::fs::create_dir_all(&current_dir).unwrap();
let content = "---\nname: No Branch\n---\n";
std::fs::write(
current_dir.join("51_story_no_branch.md"),
content,
)
.unwrap();
crate::db::ensure_content_store();
crate::db::write_content("51_story_no_branch", content);
let ctx = test_ctx(tmp.path());
let result = tool_accept_story(&json!({"story_id": "51_story_no_branch"}), &ctx);
assert!(
result.is_ok(),
"should succeed when no feature branch: {result:?}"
);
}
// ── tool_update_story non-string front matter tests ───────────────────────
fn setup_story_for_update(dir: &std::path::Path, story_id: &str, content: &str) {
let current = dir.join(".huskies/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join(format!("{story_id}.md")), content).unwrap();
crate::db::ensure_content_store();
crate::db::write_content(story_id, content);
}
#[test]
fn tool_update_story_front_matter_json_bool_written_unquoted() {
let tmp = tempfile::tempdir().unwrap();
setup_story_for_update(
tmp.path(),
"504_bool_test",
"---\nname: Bool Test\n---\n\nNo sections.\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_update_story(
&json!({"story_id": "504_bool_test", "front_matter": {"blocked": false}}),
&ctx,
);
assert!(result.is_ok(), "Expected ok: {result:?}");
let content = crate::db::read_content("504_bool_test").unwrap();
assert!(content.contains("blocked: false"), "bool should be unquoted: {content}");
assert!(!content.contains("blocked: \"false\""), "bool must not be quoted: {content}");
}
#[test]
fn tool_update_story_front_matter_json_number_written_unquoted() {
let tmp = tempfile::tempdir().unwrap();
setup_story_for_update(
tmp.path(),
"504_num_test",
"---\nname: Num Test\n---\n\nNo sections.\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_update_story(
&json!({"story_id": "504_num_test", "front_matter": {"retry_count": 3}}),
&ctx,
);
assert!(result.is_ok(), "Expected ok: {result:?}");
let content = crate::db::read_content("504_num_test").unwrap();
assert!(content.contains("retry_count: 3"), "number should be unquoted: {content}");
assert!(!content.contains("retry_count: \"3\""), "number must not be quoted: {content}");
}
#[test]
fn tool_update_story_front_matter_json_array_written_as_yaml_sequence() {
let tmp = tempfile::tempdir().unwrap();
setup_story_for_update(
tmp.path(),
"504_arr_test",
"---\nname: Array Test\n---\n\nNo sections.\n",
);
let ctx = test_ctx(tmp.path());
let result = tool_update_story(
&json!({"story_id": "504_arr_test", "front_matter": {"depends_on": [490, 491]}}),
&ctx,
);
assert!(result.is_ok(), "Expected ok: {result:?}");
let content = crate::db::read_content("504_arr_test").unwrap();
// YAML inline sequences use spaces after commas
assert!(content.contains("depends_on: [490, 491]"), "array should be unquoted YAML: {content}");
assert!(!content.contains("depends_on: \""), "array must not be quoted: {content}");
// The YAML must be parseable as a vec
let meta = crate::io::story_metadata::parse_front_matter(&content)
.expect("front matter should parse");
assert_eq!(meta.depends_on, Some(vec![490, 491]));
}
#[test]
fn tool_check_criterion_missing_story_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_check_criterion(&json!({"criterion_index": 0}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("story_id"));
}
#[test]
fn tool_check_criterion_missing_criterion_index() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_check_criterion(&json!({"story_id": "1_test"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("criterion_index"));
}
#[test]
fn tool_check_criterion_marks_unchecked_item() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"9904_test",
"2_current",
"---\nname: Test\n---\n## AC\n- [ ] First criterion\n- [x] Already done\n",
);
let ctx = test_ctx(tmp.path());
let result =
tool_check_criterion(&json!({"story_id": "9904_test", "criterion_index": 0}), &ctx);
assert!(result.is_ok(), "Expected ok: {result:?}");
assert!(result.unwrap().contains("Criterion 0 checked"));
}
/// Regression test for bug 514: deleting a story must cancel its pending
/// rate-limit retry timer so the tick loop cannot re-spawn an agent.
///
/// Repro (2026-04-09): `delete_story 478_…` returned success and removed
/// the filesystem shadow, but the timer entry in `.huskies/timers.json`
/// survived. Five minutes later the tick loop fired and re-spawned
/// `coder-1` on the deleted story.
#[tokio::test]
async fn delete_story_cancels_pending_timer() {
use chrono::Utc;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
// Create a story file in the backlog.
let backlog = root.join(".huskies/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(
backlog.join("478_story_rate_limit_repro.md"),
"---\nname: \"Rate Limit Repro\"\n---\n",
)
.unwrap();
let ctx = test_ctx(root);
// Schedule a rate-limit retry timer for the story (simulates the
// auto-scheduler that fires after a rate-limit event).
let future_time = Utc::now() + chrono::Duration::minutes(5);
ctx.timer_store
.add("478_story_rate_limit_repro".to_string(), future_time)
.unwrap();
// Sanity: timer is present before deletion.
assert_eq!(ctx.timer_store.list().len(), 1);
// Delete the story.
let result = tool_delete_story(
&json!({"story_id": "478_story_rate_limit_repro"}),
&ctx,
)
.await;
assert!(result.is_ok(), "delete_story failed: {result:?}");
// Timer must be gone — fast-forwarding past the scheduled time should
// return no entries.
assert!(
ctx.timer_store.list().is_empty(),
"timer was not cancelled by delete_story"
);
let far_future = Utc::now() + chrono::Duration::hours(1);
let due = ctx.timer_store.take_due(far_future);
assert!(
due.is_empty(),
"take_due returned a timer for the deleted story: {due:?}"
);
// Filesystem shadow must also be gone.
assert!(
!backlog
.join("478_story_rate_limit_repro.md")
.exists(),
"filesystem shadow was not removed"
);
}
}