huskies: merge 788
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,232 @@
|
||||
//! Story creation and CRDT eviction tools.
|
||||
|
||||
use crate::http::context::AppContext;
|
||||
use crate::http::workflow::create_story_file;
|
||||
use crate::io::story_metadata::check_archived_deps_from_list;
|
||||
use crate::slog_warn;
|
||||
use serde_json::Value;
|
||||
|
||||
pub(crate) fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let name = args
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: name")?;
|
||||
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
||||
let description = args.get("description").and_then(|v| v.as_str());
|
||||
let acceptance_criteria: Vec<String> = args
|
||||
.get("acceptance_criteria")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
||||
.ok_or("Missing required argument: acceptance_criteria")?;
|
||||
if acceptance_criteria.is_empty() {
|
||||
return Err("acceptance_criteria must contain at least one entry".to_string());
|
||||
}
|
||||
const JUNK_AC: &[&str] = &["", "todo", "tbd", "fixme", "xxx", "???"];
|
||||
let all_junk = acceptance_criteria
|
||||
.iter()
|
||||
.all(|ac| JUNK_AC.contains(&ac.trim().to_lowercase().as_str()));
|
||||
if all_junk {
|
||||
return Err(
|
||||
"acceptance_criteria must contain at least one real entry (not just TODO/TBD/FIXME/XXX/???)."
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
let depends_on: Option<Vec<u32>> = args
|
||||
.get("depends_on")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
// Spike 61: write the file only — the filesystem watcher detects the new
|
||||
// .md file in work/1_backlog/ and auto-commits with a deterministic message.
|
||||
let commit = false;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let story_id = create_story_file(
|
||||
&root,
|
||||
name,
|
||||
user_story,
|
||||
description,
|
||||
Some(&acceptance_criteria),
|
||||
depends_on.as_deref(),
|
||||
commit,
|
||||
)?;
|
||||
|
||||
// Bug 503: warn at creation time if any depends_on points at an already-archived story.
|
||||
// Archived = satisfied semantics: the dep will resolve immediately on the next promotion
|
||||
// tick, which is surprising if the archived story was abandoned rather than cleanly done.
|
||||
let archived_deps = depends_on
|
||||
.as_deref()
|
||||
.map(|deps| check_archived_deps_from_list(&root, deps))
|
||||
.unwrap_or_default();
|
||||
if !archived_deps.is_empty() {
|
||||
slog_warn!(
|
||||
"[create-story] Story '{story_id}' depends_on {archived_deps:?} which \
|
||||
are already in 6_archived. The dep will be treated as satisfied on the \
|
||||
next promotion tick. If these deps were abandoned (not cleanly completed), \
|
||||
consider removing the depends_on or keeping the story in backlog manually."
|
||||
);
|
||||
return Ok(format!(
|
||||
"Created story: {story_id}\n\n\
|
||||
WARNING: depends_on {archived_deps:?} point at stories already in \
|
||||
6_archived. These deps are treated as satisfied (archived = satisfied \
|
||||
semantics), so this story may be auto-promoted from backlog immediately. \
|
||||
If the archived deps were abandoned rather than completed, remove the \
|
||||
depends_on or move the story back to backlog manually after promotion."
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!("Created story: {story_id}"))
|
||||
}
|
||||
|
||||
/// Purge a story from the in-memory CRDT by writing a tombstone op (story 521).
|
||||
///
|
||||
/// This is the eviction primitive for the four-state-machine drift problem
|
||||
/// we hit on 2026-04-09 — when a story gets stuck in the running server's
|
||||
/// in-memory CRDT and can't be cleared by sqlite deletes alone (because the
|
||||
/// in-memory state outlives any pipeline_items / crdt_ops manipulation),
|
||||
/// this tool writes a proper CRDT delete op via `crdt_state::evict_item`.
|
||||
///
|
||||
/// The tombstone op:
|
||||
/// - Marks the in-memory CRDT item as `is_deleted = true` immediately
|
||||
/// (so subsequent `read_all_items` / `read_item` calls skip it)
|
||||
/// - Is persisted to `crdt_ops` so the eviction survives a server restart
|
||||
/// - Drops the in-memory `CONTENT_STORE` entry for the story
|
||||
///
|
||||
/// This tool does NOT touch: running agents, worktrees, the `pipeline_items`
|
||||
/// shadow table, `timers.json`, or filesystem shadows. Compose with
|
||||
/// `stop_agent`, `remove_worktree`, etc. as needed for a full purge — or
|
||||
/// see story 514 (delete_story full cleanup) for a future "do it all" tool.
|
||||
pub(crate) fn tool_purge_story(args: &Value, _ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
crate::crdt_state::evict_item(story_id)?;
|
||||
|
||||
Ok(format!(
|
||||
"Evicted '{story_id}' from in-memory CRDT (tombstone op persisted to crdt_ops; CONTENT_STORE entry dropped)."
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_missing_name() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(&json!({}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Missing required argument"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_rejects_empty_name() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(
|
||||
&json!({"name": "!!!", "acceptance_criteria": ["AC1"]}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("alphanumeric"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_rejects_missing_acceptance_criteria() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(&json!({"name": "My Story"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("acceptance_criteria"),
|
||||
"error should mention acceptance_criteria"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_rejects_empty_acceptance_criteria() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(
|
||||
&json!({"name": "My Story", "acceptance_criteria": []}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("acceptance_criteria"),
|
||||
"error should mention acceptance_criteria"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_accepts_single_criterion() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(
|
||||
&json!({"name": "Single Criterion Story", "acceptance_criteria": ["It works"]}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_ok(), "expected ok: {result:?}");
|
||||
assert!(result.unwrap().contains("Created story:"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_rejects_all_junk_acceptance_criteria() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(
|
||||
&json!({"name": "Junk Story", "acceptance_criteria": ["TODO", "TBD"]}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("real entry"),
|
||||
"error should mention real entry"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_accepts_mixed_junk_and_real_acceptance_criteria() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_story(
|
||||
&json!({"name": "Mixed Story", "acceptance_criteria": ["TODO", "Real AC"]}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_ok(), "expected ok for mixed AC: {result:?}");
|
||||
assert!(result.unwrap().contains("Created story:"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_description_is_written_to_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
let result = tool_create_story(
|
||||
&json!({
|
||||
"name": "Story With Description",
|
||||
"description": "This is the background context.",
|
||||
"acceptance_criteria": ["Described well"]
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(result.contains("Created story:"));
|
||||
|
||||
let story_id = result
|
||||
.trim_start_matches("Created story: ")
|
||||
.trim()
|
||||
.to_string();
|
||||
let content = crate::db::read_content(&story_id).expect("story content should exist");
|
||||
assert!(
|
||||
content.contains("## Description"),
|
||||
"Description section missing from story: {content}"
|
||||
);
|
||||
assert!(
|
||||
content.contains("This is the background context."),
|
||||
"Description text missing from story: {content}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,385 @@
|
||||
//! Story acceptance and full-cleanup deletion tools.
|
||||
|
||||
use crate::agents::{feature_branch_has_unmerged_changes, move_story_to_done};
|
||||
use crate::http::context::AppContext;
|
||||
use crate::slog_warn;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
|
||||
pub(crate) fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Bug 226: Refuse to accept if the feature branch has unmerged code.
|
||||
// The code must be squash-merged via merge_agent_work first.
|
||||
if feature_branch_has_unmerged_changes(&project_root, story_id) {
|
||||
return Err(format!(
|
||||
"Cannot accept story '{story_id}': feature branch 'feature/story-{story_id}' \
|
||||
has unmerged changes. Use merge_agent_work to squash-merge the code into \
|
||||
master first."
|
||||
));
|
||||
}
|
||||
|
||||
move_story_to_done(story_id)?;
|
||||
ctx.services.agents.remove_agents_for_story(story_id);
|
||||
|
||||
Ok(format!(
|
||||
"Story '{story_id}' accepted, moved to done/, and committed to master."
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) async fn tool_delete_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
let mut failed_steps: Vec<String> = Vec::new();
|
||||
|
||||
// 0. Cancel any pending rate-limit retry timers for this story (bug 514).
|
||||
// Must happen before stopping agents so the tick loop cannot re-spawn
|
||||
// an agent after we tear everything else down.
|
||||
let timer_removed = ctx.timer_store.remove(story_id);
|
||||
if timer_removed {
|
||||
slog_warn!("[delete_story] Cancelled pending timer for '{story_id}'");
|
||||
} else {
|
||||
slog_warn!("[delete_story] No pending timer found for '{story_id}'");
|
||||
}
|
||||
|
||||
// 1. Stop any running agents for this story (best-effort).
|
||||
if let Ok(agents) = ctx.services.agents.list_agents() {
|
||||
for agent in agents.iter().filter(|a| a.story_id == story_id) {
|
||||
match ctx
|
||||
.services
|
||||
.agents
|
||||
.stop_agent(&project_root, story_id, &agent.agent_name)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Stopped agent '{}' for '{story_id}'",
|
||||
agent.agent_name
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Failed to stop agent '{}' for '{story_id}': {e}",
|
||||
agent.agent_name
|
||||
);
|
||||
failed_steps.push(format!("stop_agent({}): {e}", agent.agent_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Remove agent pool entries.
|
||||
let removed_count = ctx.services.agents.remove_agents_for_story(story_id);
|
||||
slog_warn!("[delete_story] Removed {removed_count} agent pool entries for '{story_id}'");
|
||||
|
||||
// 3. Remove worktree (best-effort).
|
||||
if let Ok(config) = crate::config::ProjectConfig::load(&project_root) {
|
||||
match crate::worktree::remove_worktree_by_story_id(&project_root, story_id, &config).await {
|
||||
Ok(()) => slog_warn!("[delete_story] Removed worktree for '{story_id}'"),
|
||||
Err(e) => slog_warn!("[delete_story] Worktree removal for '{story_id}': {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Write a CRDT tombstone op so the story is evicted from the in-memory
|
||||
// state machine and the deletion is persisted to crdt_ops (survives
|
||||
// restart). Best-effort: legacy filesystem-only stories may not have a
|
||||
// CRDT entry, so a "not found" error is expected and non-fatal.
|
||||
match crate::crdt_state::evict_item(story_id) {
|
||||
Ok(()) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Evicted '{story_id}' from CRDT (tombstone persisted to crdt_ops)"
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
slog_warn!("[delete_story] CRDT eviction for '{story_id}': {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Delete from database content store and shadow table.
|
||||
let found_in_db = crate::db::read_content(story_id).is_some()
|
||||
|| crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some();
|
||||
crate::db::delete_item(story_id);
|
||||
slog_warn!("[delete_story] Deleted '{story_id}' from content store / shadow table");
|
||||
|
||||
// 6. Remove the filesystem shadow file from work/N_stage/.
|
||||
let sk = project_root.join(".huskies").join("work");
|
||||
let stage_dirs = [
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
];
|
||||
let mut deleted_from_fs = false;
|
||||
for stage in &stage_dirs {
|
||||
let path = sk.join(stage).join(format!("{story_id}.md"));
|
||||
if path.exists() {
|
||||
match fs::remove_file(&path) {
|
||||
Ok(()) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Deleted filesystem shadow '{story_id}' from work/{stage}/"
|
||||
);
|
||||
deleted_from_fs = true;
|
||||
}
|
||||
Err(e) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Failed to delete filesystem shadow '{story_id}' from work/{stage}/: {e}"
|
||||
);
|
||||
failed_steps.push(format!("delete_filesystem({stage}): {e}"));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found_in_db && !deleted_from_fs && !timer_removed {
|
||||
return Err(format!(
|
||||
"Story '{story_id}' not found in any pipeline stage."
|
||||
));
|
||||
}
|
||||
|
||||
if !failed_steps.is_empty() {
|
||||
return Err(format!(
|
||||
"Story '{story_id}' partially deleted. Failed steps: {}.",
|
||||
failed_steps.join("; ")
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!("Story '{story_id}' deleted from pipeline."))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
use serde_json::json;
|
||||
|
||||
fn setup_git_repo_in(dir: &std::path::Path) {
|
||||
std::process::Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["config", "user.email", "test@test.com"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["config", "user.name", "Test"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["commit", "--allow-empty", "-m", "init"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_delete_story_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_delete_story(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_delete_story_not_found_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_delete_story(&json!({"story_id": "99_nonexistent"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("not found in any pipeline stage")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_delete_story_deletes_file_from_backlog() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
||||
fs::create_dir_all(&backlog).unwrap();
|
||||
let story_file = backlog.join("10_story_cleanup.md");
|
||||
fs::write(&story_file, "---\nname: Cleanup\n---\n").unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_delete_story(&json!({"story_id": "10_story_cleanup"}), &ctx).await;
|
||||
assert!(result.is_ok(), "expected ok: {result:?}");
|
||||
assert!(!story_file.exists(), "story file should be deleted");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_delete_story_deletes_file_from_current() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let current = tmp.path().join(".huskies/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
let story_file = current.join("11_story_active.md");
|
||||
fs::write(&story_file, "---\nname: Active\n---\n").unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_delete_story(&json!({"story_id": "11_story_active"}), &ctx).await;
|
||||
assert!(result.is_ok(), "expected ok: {result:?}");
|
||||
assert!(!story_file.exists(), "story file should be deleted");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_accept_story_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_accept_story(&json!({}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_accept_story_nonexistent_story_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_git_repo_in(tmp.path());
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// No story file in current/ — should fail
|
||||
let result = tool_accept_story(&json!({"story_id": "99_nonexistent"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_accept_story_refuses_when_feature_branch_has_unmerged_code() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_git_repo_in(tmp.path());
|
||||
|
||||
// Create a feature branch with code changes.
|
||||
std::process::Command::new("git")
|
||||
.args(["checkout", "-b", "feature/story-50_story_test"])
|
||||
.current_dir(tmp.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
std::fs::write(tmp.path().join("feature.rs"), "fn main() {}").unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["add", "."])
|
||||
.current_dir(tmp.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["commit", "-m", "add feature"])
|
||||
.current_dir(tmp.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["checkout", "master"])
|
||||
.current_dir(tmp.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
// Create story file in current/ so move_story_to_done would work.
|
||||
let current_dir = tmp.path().join(".huskies/work/2_current");
|
||||
std::fs::create_dir_all(¤t_dir).unwrap();
|
||||
std::fs::write(
|
||||
current_dir.join("50_story_test.md"),
|
||||
"---\nname: Test\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_accept_story(&json!({"story_id": "50_story_test"}), &ctx);
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"should refuse when feature branch has unmerged code"
|
||||
);
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
err.contains("unmerged"),
|
||||
"error should mention unmerged changes: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_accept_story_succeeds_when_no_feature_branch() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_git_repo_in(tmp.path());
|
||||
|
||||
// Create story file in current/ (no feature branch).
|
||||
let current_dir = tmp.path().join(".huskies/work/2_current");
|
||||
std::fs::create_dir_all(¤t_dir).unwrap();
|
||||
let content = "---\nname: No Branch\n---\n";
|
||||
std::fs::write(current_dir.join("51_story_no_branch.md"), content).unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content("51_story_no_branch", "2_current", content);
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_accept_story(&json!({"story_id": "51_story_no_branch"}), &ctx);
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"should succeed when no feature branch: {result:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_story_cancels_pending_timer() {
|
||||
use chrono::Utc;
|
||||
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
// Create a story file in the backlog.
|
||||
let backlog = root.join(".huskies/work/1_backlog");
|
||||
fs::create_dir_all(&backlog).unwrap();
|
||||
fs::write(
|
||||
backlog.join("478_story_rate_limit_repro.md"),
|
||||
"---\nname: \"Rate Limit Repro\"\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let ctx = test_ctx(root);
|
||||
|
||||
// Schedule a rate-limit retry timer for the story (simulates the
|
||||
// auto-scheduler that fires after a rate-limit event).
|
||||
let future_time = Utc::now() + chrono::Duration::minutes(5);
|
||||
ctx.timer_store
|
||||
.add("478_story_rate_limit_repro".to_string(), future_time)
|
||||
.unwrap();
|
||||
|
||||
// Sanity: timer is present before deletion.
|
||||
assert_eq!(ctx.timer_store.list().len(), 1);
|
||||
|
||||
// Delete the story.
|
||||
let result =
|
||||
tool_delete_story(&json!({"story_id": "478_story_rate_limit_repro"}), &ctx).await;
|
||||
assert!(result.is_ok(), "delete_story failed: {result:?}");
|
||||
|
||||
// Timer must be gone — fast-forwarding past the scheduled time should
|
||||
// return no entries.
|
||||
assert!(
|
||||
ctx.timer_store.list().is_empty(),
|
||||
"timer was not cancelled by delete_story"
|
||||
);
|
||||
let far_future = Utc::now() + chrono::Duration::hours(1);
|
||||
let due = ctx.timer_store.take_due(far_future);
|
||||
assert!(
|
||||
due.is_empty(),
|
||||
"take_due returned a timer for the deleted story: {due:?}"
|
||||
);
|
||||
|
||||
// Filesystem shadow must also be gone.
|
||||
assert!(
|
||||
!backlog.join("478_story_rate_limit_repro.md").exists(),
|
||||
"filesystem shadow was not removed"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
//! Story creation, listing, update, and lifecycle MCP tools.
|
||||
|
||||
mod create;
|
||||
mod delete;
|
||||
mod query;
|
||||
mod update;
|
||||
|
||||
pub(crate) use create::{tool_create_story, tool_purge_story};
|
||||
pub(crate) use delete::{tool_accept_story, tool_delete_story};
|
||||
pub(crate) use query::{tool_get_pipeline_status, tool_list_upcoming, tool_validate_stories};
|
||||
pub(crate) use update::{tool_unblock_story, tool_update_story};
|
||||
@@ -0,0 +1,241 @@
|
||||
//! Read-only story query tools: validation, listing, and pipeline status.
|
||||
|
||||
use crate::http::context::AppContext;
|
||||
use crate::http::workflow::{load_pipeline_state, load_upcoming_stories, validate_story_dirs};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
pub(crate) fn tool_validate_stories(ctx: &AppContext) -> Result<String, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let results = validate_story_dirs(&root)?;
|
||||
serde_json::to_string_pretty(&json!(
|
||||
results
|
||||
.iter()
|
||||
.map(|r| json!({
|
||||
"story_id": r.story_id,
|
||||
"valid": r.valid,
|
||||
"error": r.error,
|
||||
}))
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(crate) fn tool_list_upcoming(ctx: &AppContext) -> Result<String, String> {
|
||||
let stories = load_upcoming_stories(ctx)?;
|
||||
serde_json::to_string_pretty(&json!(
|
||||
stories
|
||||
.iter()
|
||||
.map(|s| json!({
|
||||
"story_id": s.story_id,
|
||||
"name": s.name,
|
||||
"error": s.error,
|
||||
}))
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(crate) fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, String> {
|
||||
let state = load_pipeline_state(ctx)?;
|
||||
let running_merges = ctx.services.agents.list_running_merges()?;
|
||||
|
||||
fn map_items(items: &[crate::http::workflow::UpcomingStory], stage: &str) -> Vec<Value> {
|
||||
items
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let mut item = json!({
|
||||
"story_id": s.story_id,
|
||||
"name": s.name,
|
||||
"stage": stage,
|
||||
"agent": s.agent.as_ref().map(|a| json!({
|
||||
"agent_name": a.agent_name,
|
||||
"model": a.model,
|
||||
"status": a.status,
|
||||
})),
|
||||
});
|
||||
// Include blocked/retry_count when present so callers can
|
||||
// identify stories stuck in the pipeline.
|
||||
if let Some(true) = s.blocked {
|
||||
item["blocked"] = json!(true);
|
||||
}
|
||||
if let Some(rc) = s.retry_count {
|
||||
item["retry_count"] = json!(rc);
|
||||
}
|
||||
if let Some(ref mf) = s.merge_failure {
|
||||
item["merge_failure"] = json!(mf);
|
||||
}
|
||||
item
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let mut active: Vec<Value> = Vec::new();
|
||||
active.extend(map_items(&state.current, "current"));
|
||||
active.extend(map_items(&state.qa, "qa"));
|
||||
active.extend(map_items(&state.merge, "merge"));
|
||||
active.extend(map_items(&state.done, "done"));
|
||||
|
||||
let backlog: Vec<Value> = state
|
||||
.backlog
|
||||
.iter()
|
||||
.map(|s| json!({ "story_id": s.story_id, "name": s.name }))
|
||||
.collect();
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"active": active,
|
||||
"backlog": backlog,
|
||||
"backlog_count": backlog.len(),
|
||||
"deterministic_merges_in_flight": running_merges,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn tool_validate_stories_empty_project() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_validate_stories(&ctx).unwrap();
|
||||
// CRDT is global; other tests may have inserted items.
|
||||
// Just verify it parses without error.
|
||||
let _parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_create_story_and_list_upcoming() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
// No git repo needed: spike 61 — create_story just writes the file;
|
||||
// the filesystem watcher handles the commit asynchronously.
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
let result = super::super::tool_create_story(
|
||||
&json!({"name": "Test Story", "acceptance_criteria": ["AC1", "AC2"]}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(result.contains("Created story:"));
|
||||
|
||||
// List should return it (CRDT is global, so filter for our story)
|
||||
let list = tool_list_upcoming(&ctx).unwrap();
|
||||
let parsed: Vec<Value> = serde_json::from_str(&list).unwrap();
|
||||
assert!(
|
||||
parsed.iter().any(|s| s["name"] == "Test Story"),
|
||||
"expected 'Test Story' in upcoming list: {parsed:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_pipeline_status_returns_structured_response() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
crate::db::ensure_content_store();
|
||||
for (stage, id, name) in &[
|
||||
("1_backlog", "9910_story_upcoming", "Upcoming Story"),
|
||||
("2_current", "9920_story_current", "Current Story"),
|
||||
("3_qa", "9930_story_qa", "QA Story"),
|
||||
("4_merge", "9940_story_merge", "Merge Story"),
|
||||
("5_done", "9950_story_done", "Done Story"),
|
||||
] {
|
||||
crate::db::write_item_with_content(id, stage, &format!("---\nname: \"{name}\"\n---\n"));
|
||||
}
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_get_pipeline_status(&ctx).unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
|
||||
// Active stages include current, qa, merge, done
|
||||
let active = parsed["active"].as_array().unwrap();
|
||||
let stages: Vec<&str> = active
|
||||
.iter()
|
||||
.map(|i| i["stage"].as_str().unwrap())
|
||||
.collect();
|
||||
assert!(stages.contains(&"current"));
|
||||
assert!(stages.contains(&"qa"));
|
||||
assert!(stages.contains(&"merge"));
|
||||
assert!(stages.contains(&"done"));
|
||||
|
||||
// Backlog should contain our item
|
||||
let backlog = parsed["backlog"].as_array().unwrap();
|
||||
assert!(
|
||||
backlog
|
||||
.iter()
|
||||
.any(|b| b["story_id"] == "9910_story_upcoming"),
|
||||
"expected 9910_story_upcoming in backlog: {backlog:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_pipeline_status_includes_agent_assignment() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"9921_story_active",
|
||||
"2_current",
|
||||
"---\nname: \"Active Story\"\n---\n",
|
||||
);
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
ctx.services.agents.inject_test_agent(
|
||||
"9921_story_active",
|
||||
"coder-1",
|
||||
crate::agents::AgentStatus::Running,
|
||||
);
|
||||
|
||||
let result = tool_get_pipeline_status(&ctx).unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
|
||||
let active = parsed["active"].as_array().unwrap();
|
||||
let item = active
|
||||
.iter()
|
||||
.find(|i| i["story_id"] == "9921_story_active")
|
||||
.expect("expected 9921_story_active in active items");
|
||||
assert_eq!(item["stage"], "current");
|
||||
assert!(!item["agent"].is_null(), "agent should be present");
|
||||
assert_eq!(item["agent"]["agent_name"], "coder-1");
|
||||
assert_eq!(item["agent"]["status"], "running");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_validate_stories_with_valid_story() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"9907_test",
|
||||
"2_current",
|
||||
"---\nname: \"Valid Story\"\n---\n## AC\n- [ ] First\n",
|
||||
);
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_validate_stories(&ctx).unwrap();
|
||||
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||||
let item = parsed
|
||||
.iter()
|
||||
.find(|v| v["story_id"] == "9907_test")
|
||||
.expect("expected 9907_test in validation results");
|
||||
assert_eq!(item["valid"], true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_validate_stories_with_invalid_front_matter() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content("9908_test", "2_current", "## No front matter at all\n");
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_validate_stories(&ctx).unwrap();
|
||||
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||||
let item = parsed
|
||||
.iter()
|
||||
.find(|v| v["story_id"] == "9908_test")
|
||||
.expect("expected 9908_test in validation results");
|
||||
assert_eq!(item["valid"], false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,193 @@
|
||||
//! Story field update and unblock tools.
|
||||
|
||||
use crate::http::context::AppContext;
|
||||
use crate::http::workflow::update_story_in_file;
|
||||
use crate::io::story_metadata::check_archived_deps;
|
||||
use crate::slog_warn;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub(crate) fn tool_update_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
||||
let description = args.get("description").and_then(|v| v.as_str());
|
||||
|
||||
// Collect front matter fields: explicit `name`/`agent` params + arbitrary `front_matter` object.
|
||||
// Values are passed as serde_json::Value so native booleans, numbers, and arrays are
|
||||
// preserved and encoded correctly as unquoted YAML by update_story_in_file.
|
||||
let mut front_matter: HashMap<String, Value> = HashMap::new();
|
||||
if let Some(name) = args.get("name").and_then(|v| v.as_str()) {
|
||||
front_matter.insert("name".to_string(), Value::String(name.to_string()));
|
||||
}
|
||||
if let Some(agent) = args.get("agent").and_then(|v| v.as_str()) {
|
||||
front_matter.insert("agent".to_string(), Value::String(agent.to_string()));
|
||||
}
|
||||
if let Some(obj) = args.get("front_matter").and_then(|v| v.as_object()) {
|
||||
for (k, v) in obj {
|
||||
front_matter.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
let front_matter_opt = if front_matter.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(&front_matter)
|
||||
};
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
update_story_in_file(&root, story_id, user_story, description, front_matter_opt)?;
|
||||
|
||||
// Bug 503: warn if any depends_on in the (now updated) story points at an archived story.
|
||||
let stage = crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|i| i.stage.dir_name().to_string())
|
||||
.unwrap_or_else(|| "1_backlog".to_string());
|
||||
let archived_deps = check_archived_deps(&root, &stage, story_id);
|
||||
if !archived_deps.is_empty() {
|
||||
slog_warn!(
|
||||
"[update-story] Story '{story_id}' depends_on {archived_deps:?} which \
|
||||
are already in 6_archived. The dep will be treated as satisfied on the \
|
||||
next promotion tick. If these deps were abandoned (not cleanly completed), \
|
||||
consider removing the depends_on or keeping the story in backlog manually."
|
||||
);
|
||||
return Ok(format!(
|
||||
"Updated story '{story_id}'.\n\n\
|
||||
WARNING: depends_on {archived_deps:?} point at stories already in \
|
||||
6_archived. These deps are treated as satisfied (archived = satisfied \
|
||||
semantics), so this story may be auto-promoted from backlog immediately. \
|
||||
If the archived deps were abandoned rather than completed, remove the \
|
||||
depends_on or move the story back to backlog manually after promotion."
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!("Updated story '{story_id}'."))
|
||||
}
|
||||
|
||||
pub(crate) fn tool_unblock_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
|
||||
// Extract the numeric prefix (e.g. "42" from "42" or from legacy "42_story_foo").
|
||||
let story_number = story_id
|
||||
.split('_')
|
||||
.next()
|
||||
.filter(|s| !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()))
|
||||
.ok_or_else(|| {
|
||||
format!("Invalid story_id format: '{story_id}'. Expected a numeric ID (e.g. '42').")
|
||||
})?;
|
||||
|
||||
Ok(crate::chat::commands::unblock::unblock_by_number(
|
||||
&root,
|
||||
story_number,
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
|
||||
fn setup_story_for_update(dir: &std::path::Path, story_id: &str, content: &str) {
|
||||
let current = dir.join(".huskies/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::write(current.join(format!("{story_id}.md")), content).unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_content(story_id, content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_update_story_front_matter_json_bool_written_unquoted() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_story_for_update(
|
||||
tmp.path(),
|
||||
"504_bool_test",
|
||||
"---\nname: Bool Test\n---\n\nNo sections.\n",
|
||||
);
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
let result = tool_update_story(
|
||||
&json!({"story_id": "504_bool_test", "front_matter": {"blocked": false}}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_ok(), "Expected ok: {result:?}");
|
||||
|
||||
let content = crate::db::read_content("504_bool_test").unwrap();
|
||||
assert!(
|
||||
content.contains("blocked: false"),
|
||||
"bool should be unquoted: {content}"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("blocked: \"false\""),
|
||||
"bool must not be quoted: {content}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_update_story_front_matter_json_number_written_unquoted() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_story_for_update(
|
||||
tmp.path(),
|
||||
"504_num_test",
|
||||
"---\nname: Num Test\n---\n\nNo sections.\n",
|
||||
);
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
let result = tool_update_story(
|
||||
&json!({"story_id": "504_num_test", "front_matter": {"retry_count": 3}}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_ok(), "Expected ok: {result:?}");
|
||||
|
||||
let content = crate::db::read_content("504_num_test").unwrap();
|
||||
assert!(
|
||||
content.contains("retry_count: 3"),
|
||||
"number should be unquoted: {content}"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("retry_count: \"3\""),
|
||||
"number must not be quoted: {content}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_update_story_front_matter_json_array_written_as_yaml_sequence() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_story_for_update(
|
||||
tmp.path(),
|
||||
"504_arr_test",
|
||||
"---\nname: Array Test\n---\n\nNo sections.\n",
|
||||
);
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
let result = tool_update_story(
|
||||
&json!({"story_id": "504_arr_test", "front_matter": {"depends_on": [490, 491]}}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_ok(), "Expected ok: {result:?}");
|
||||
|
||||
let content = crate::db::read_content("504_arr_test").unwrap();
|
||||
// YAML inline sequences use spaces after commas
|
||||
assert!(
|
||||
content.contains("depends_on: [490, 491]"),
|
||||
"array should be unquoted YAML: {content}"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("depends_on: \""),
|
||||
"array must not be quoted: {content}"
|
||||
);
|
||||
|
||||
// The YAML must be parseable as a vec
|
||||
let meta = crate::io::story_metadata::parse_front_matter(&content)
|
||||
.expect("front matter should parse");
|
||||
assert_eq!(meta.depends_on, Some(vec![490, 491]));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user