Spike 61: filesystem watcher and UI simplification

Add notify-based filesystem watcher for .story_kit/work/ that
auto-commits changes with deterministic messages and broadcasts
events over WebSocket. Push full pipeline state (Upcoming, Current,
QA, To Merge) to frontend on connect and after every watcher event.

Strip dead UI: remove ReviewPanel, GatePanel, TodoPanel,
UpcomingPanel and all associated REST polling. Replace with 4
generic StagePanel components driven by WebSocket. Simplify
AgentPanel to roster-only.

Delete all 11 workflow HTTP endpoints and 16 request/response types
from the server. Clean dead code from workflow module. MCP tools
call Rust functions directly and need none of the HTTP layer.

Net: ~4,100 lines deleted, ~400 added.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Dave
2026-02-20 19:39:19 +00:00
parent 65b104edc5
commit 810608d3d8
29 changed files with 1041 additions and 4526 deletions

View File

@@ -753,42 +753,6 @@ pub struct MergeReport {
pub story_archived: bool,
}
/// Stage one or more file paths and create a deterministic commit in the given git root.
///
/// Pass deleted paths too so git stages their removal alongside any new files.
pub fn git_stage_and_commit(
git_root: &Path,
paths: &[&Path],
message: &str,
) -> Result<(), String> {
let mut add_cmd = Command::new("git");
add_cmd.arg("add").current_dir(git_root);
for path in paths {
add_cmd.arg(path.to_string_lossy().as_ref());
}
let output = add_cmd.output().map_err(|e| format!("git add: {e}"))?;
if !output.status.success() {
return Err(format!(
"git add failed: {}",
String::from_utf8_lossy(&output.stderr)
));
}
let output = Command::new("git")
.args(["commit", "-m", message])
.current_dir(git_root)
.output()
.map_err(|e| format!("git commit: {e}"))?;
if !output.status.success() {
return Err(format!(
"git commit failed: {}",
String::from_utf8_lossy(&output.stderr)
));
}
Ok(())
}
/// Determine the work item type from its ID (new naming: `{N}_{type}_{slug}`).
/// Returns "bug", "spike", or "story".
#[allow(dead_code)]
@@ -850,12 +814,7 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
source_dir.display()
);
let msg = format!("story-kit: start {story_id}");
git_stage_and_commit(
project_root,
&[current_path.as_path(), source_path.as_path()],
&msg,
)
Ok(())
}
/// Move a story from `work/2_current/` to `work/5_archived/` and auto-commit.
@@ -899,12 +858,7 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
};
eprintln!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_archived/");
let msg = format!("story-kit: accept story {story_id}");
git_stage_and_commit(
project_root,
&[archived_path.as_path(), source_path.as_path()],
&msg,
)
Ok(())
}
/// Move a story/bug from `work/2_current/` to `work/4_merge/` and auto-commit.
@@ -935,12 +889,7 @@ pub fn move_story_to_merge(project_root: &Path, story_id: &str) -> Result<(), St
eprintln!("[lifecycle] Moved '{story_id}' from work/2_current/ to work/4_merge/");
let msg = format!("story-kit: queue {story_id} for merge");
git_stage_and_commit(
project_root,
&[merge_path.as_path(), current_path.as_path()],
&msg,
)
Ok(())
}
/// Move a story/bug from `work/2_current/` to `work/3_qa/` and auto-commit.
@@ -971,12 +920,7 @@ pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), Strin
eprintln!("[lifecycle] Moved '{story_id}' from work/2_current/ to work/3_qa/");
let msg = format!("story-kit: queue {story_id} for QA");
git_stage_and_commit(
project_root,
&[qa_path.as_path(), current_path.as_path()],
&msg,
)
Ok(())
}
/// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_archived/` and auto-commit.
@@ -1015,12 +959,7 @@ pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), Str
"[lifecycle] Closed bug '{bug_id}' → work/5_archived/"
);
let msg = format!("story-kit: close bug {bug_id}");
git_stage_and_commit(
project_root,
&[archive_path.as_path(), source_path.as_path()],
&msg,
)
Ok(())
}
// ── Acceptance-gate helpers ───────────────────────────────────────────────────
@@ -1634,6 +1573,7 @@ mod tests {
}
// ── move_story_to_current tests ────────────────────────────────────────────
// No git repo needed: the watcher handles commits asynchronously.
fn init_git_repo(repo: &std::path::Path) {
Command::new("git")
@@ -1659,179 +1599,86 @@ mod tests {
}
#[test]
fn move_story_to_current_moves_file_and_commits() {
fn move_story_to_current_moves_file() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let upcoming = repo.join(".story_kit/work/1_upcoming");
let current_dir = repo.join(".story_kit/work/2_current");
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let upcoming = root.join(".story_kit/work/1_upcoming");
let current = root.join(".story_kit/work/2_current");
fs::create_dir_all(&upcoming).unwrap();
fs::create_dir_all(&current_dir).unwrap();
fs::create_dir_all(&current).unwrap();
fs::write(upcoming.join("10_story_foo.md"), "test").unwrap();
let story_file = upcoming.join("10_story_my_story.md");
fs::write(&story_file, "---\nname: Test\ntest_plan: pending\n---\n").unwrap();
move_story_to_current(root, "10_story_foo").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add story"])
.current_dir(repo)
.output()
.unwrap();
move_story_to_current(repo, "10_story_my_story").unwrap();
assert!(!story_file.exists(), "upcoming file should be gone");
assert!(
current_dir.join("10_story_my_story.md").exists(),
"current/ file should exist"
);
assert!(!upcoming.join("10_story_foo.md").exists());
assert!(current.join("10_story_foo.md").exists());
}
#[test]
fn move_story_to_current_is_idempotent_when_already_current() {
use std::fs;
use tempfile::tempdir;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("11_story_foo.md"), "test").unwrap();
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let current_dir = repo.join(".story_kit/work/2_current");
fs::create_dir_all(&current_dir).unwrap();
fs::write(
current_dir.join("11_story_my_story.md"),
"---\nname: Test\ntest_plan: pending\n---\n",
)
.unwrap();
// Should succeed without error even though there's nothing to move
move_story_to_current(repo, "11_story_my_story").unwrap();
assert!(current_dir.join("11_story_my_story.md").exists());
move_story_to_current(root, "11_story_foo").unwrap();
assert!(current.join("11_story_foo.md").exists());
}
#[test]
fn move_story_to_current_noop_when_not_in_upcoming() {
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
// Story doesn't exist anywhere — should return Ok (lenient)
let result = move_story_to_current(repo, "99_missing");
assert!(result.is_ok(), "should return Ok when story is not found");
let tmp = tempfile::tempdir().unwrap();
assert!(move_story_to_current(tmp.path(), "99_missing").is_ok());
}
#[test]
fn move_bug_to_current_moves_from_bugs_dir() {
fn move_bug_to_current_moves_from_upcoming() {
use std::fs;
use tempfile::tempdir;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let upcoming = root.join(".story_kit/work/1_upcoming");
let current = root.join(".story_kit/work/2_current");
fs::create_dir_all(&upcoming).unwrap();
fs::create_dir_all(&current).unwrap();
fs::write(upcoming.join("1_bug_test.md"), "# Bug 1\n").unwrap();
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
move_story_to_current(root, "1_bug_test").unwrap();
let upcoming_dir = repo.join(".story_kit/work/1_upcoming");
let current_dir = repo.join(".story_kit/work/2_current");
fs::create_dir_all(&upcoming_dir).unwrap();
fs::create_dir_all(&current_dir).unwrap();
let bug_file = upcoming_dir.join("1_bug_test.md");
fs::write(&bug_file, "# Bug 1\n").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add bug"])
.current_dir(repo)
.output()
.unwrap();
move_story_to_current(repo, "1_bug_test").unwrap();
assert!(!bug_file.exists(), "upcoming/ file should be gone");
assert!(
current_dir.join("1_bug_test.md").exists(),
"current/ file should exist"
);
assert!(!upcoming.join("1_bug_test.md").exists());
assert!(current.join("1_bug_test.md").exists());
}
#[test]
fn close_bug_moves_from_current_to_archive() {
use std::fs;
use tempfile::tempdir;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("2_bug_test.md"), "# Bug 2\n").unwrap();
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
close_bug_to_archive(root, "2_bug_test").unwrap();
let current_dir = repo.join(".story_kit/work/2_current");
fs::create_dir_all(&current_dir).unwrap();
let bug_in_current = current_dir.join("2_bug_test.md");
fs::write(&bug_in_current, "# Bug 2\n").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add bug to current"])
.current_dir(repo)
.output()
.unwrap();
close_bug_to_archive(repo, "2_bug_test").unwrap();
let archive_path = repo.join(".story_kit/work/5_archived/2_bug_test.md");
assert!(!bug_in_current.exists(), "current/ file should be gone");
assert!(archive_path.exists(), "archive file should exist");
assert!(!current.join("2_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/2_bug_test.md").exists());
}
#[test]
fn close_bug_moves_from_bugs_dir_when_not_started() {
fn close_bug_moves_from_upcoming_when_not_started() {
use std::fs;
use tempfile::tempdir;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let upcoming = root.join(".story_kit/work/1_upcoming");
fs::create_dir_all(&upcoming).unwrap();
fs::write(upcoming.join("3_bug_test.md"), "# Bug 3\n").unwrap();
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
close_bug_to_archive(root, "3_bug_test").unwrap();
let upcoming_dir = repo.join(".story_kit/work/1_upcoming");
fs::create_dir_all(&upcoming_dir).unwrap();
let bug_file = upcoming_dir.join("3_bug_test.md");
fs::write(&bug_file, "# Bug 3\n").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add bug"])
.current_dir(repo)
.output()
.unwrap();
close_bug_to_archive(repo, "3_bug_test").unwrap();
let archive_path = repo.join(".story_kit/work/5_archived/3_bug_test.md");
assert!(!bug_file.exists(), "upcoming/ file should be gone");
assert!(archive_path.exists(), "archive file should exist");
assert!(!upcoming.join("3_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/3_bug_test.md").exists());
}
#[test]
@@ -1842,216 +1689,102 @@ mod tests {
assert_eq!(item_type_from_id("1_story_simple"), "story");
}
// ── git_stage_and_commit tests ─────────────────────────────────────────────
#[test]
fn git_stage_and_commit_creates_commit() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let file = repo.join("hello.txt");
fs::write(&file, "hello").unwrap();
git_stage_and_commit(repo, &[file.as_path()], "story-kit: test commit").unwrap();
// Verify the commit exists
let output = Command::new("git")
.args(["log", "--oneline", "-1"])
.current_dir(repo)
.output()
.unwrap();
let log = String::from_utf8_lossy(&output.stdout);
assert!(log.contains("story-kit: test commit"), "commit should appear in log: {log}");
}
// ── move_story_to_merge tests ──────────────────────────────────────────────
#[test]
fn move_story_to_merge_moves_file_and_commits() {
fn move_story_to_merge_moves_file() {
use std::fs;
use tempfile::tempdir;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("20_story_foo.md"), "test").unwrap();
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
move_story_to_merge(root, "20_story_foo").unwrap();
let current_dir = repo.join(".story_kit/work/2_current");
fs::create_dir_all(&current_dir).unwrap();
let story_file = current_dir.join("20_story_my_story.md");
fs::write(&story_file, "---\nname: Test\ntest_plan: approved\n---\n").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add story"])
.current_dir(repo)
.output()
.unwrap();
move_story_to_merge(repo, "20_story_my_story").unwrap();
let merge_path = repo.join(".story_kit/work/4_merge/20_story_my_story.md");
assert!(!story_file.exists(), "2_current file should be gone");
assert!(merge_path.exists(), "4_merge file should exist");
assert!(!current.join("20_story_foo.md").exists());
assert!(root.join(".story_kit/work/4_merge/20_story_foo.md").exists());
}
#[test]
fn move_story_to_merge_idempotent_when_already_in_merge() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let merge_dir = repo.join(".story_kit/work/4_merge");
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let merge_dir = root.join(".story_kit/work/4_merge");
fs::create_dir_all(&merge_dir).unwrap();
fs::write(
merge_dir.join("21_story_test.md"),
"---\nname: Test\ntest_plan: approved\n---\n",
)
.unwrap();
fs::write(merge_dir.join("21_story_test.md"), "test").unwrap();
// Should succeed without error even though there's nothing to move
move_story_to_merge(repo, "21_story_test").unwrap();
move_story_to_merge(root, "21_story_test").unwrap();
assert!(merge_dir.join("21_story_test.md").exists());
}
#[test]
fn move_story_to_merge_errors_when_not_in_current() {
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let result = move_story_to_merge(repo, "99_nonexistent");
assert!(result.is_err());
let tmp = tempfile::tempdir().unwrap();
let result = move_story_to_merge(tmp.path(), "99_nonexistent");
assert!(result.unwrap_err().contains("not found in work/2_current/"));
}
// ── move_story_to_qa tests ────────────────────────────────────────────────
#[test]
fn move_story_to_qa_moves_file_and_commits() {
fn move_story_to_qa_moves_file() {
use std::fs;
use tempfile::tempdir;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("30_story_qa.md"), "test").unwrap();
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
move_story_to_qa(root, "30_story_qa").unwrap();
let current_dir = repo.join(".story_kit/work/2_current");
fs::create_dir_all(&current_dir).unwrap();
let story_file = current_dir.join("30_story_qa_test.md");
fs::write(&story_file, "---\nname: QA Test\ntest_plan: approved\n---\n").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add story"])
.current_dir(repo)
.output()
.unwrap();
move_story_to_qa(repo, "30_story_qa_test").unwrap();
let qa_path = repo.join(".story_kit/work/3_qa/30_story_qa_test.md");
assert!(!story_file.exists(), "2_current file should be gone");
assert!(qa_path.exists(), "3_qa file should exist");
assert!(!current.join("30_story_qa.md").exists());
assert!(root.join(".story_kit/work/3_qa/30_story_qa.md").exists());
}
#[test]
fn move_story_to_qa_idempotent_when_already_in_qa() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let qa_dir = repo.join(".story_kit/work/3_qa");
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let qa_dir = root.join(".story_kit/work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();
fs::write(
qa_dir.join("31_story_test.md"),
"---\nname: Test\ntest_plan: approved\n---\n",
)
.unwrap();
fs::write(qa_dir.join("31_story_test.md"), "test").unwrap();
// Should succeed without error even though there's nothing to move
move_story_to_qa(repo, "31_story_test").unwrap();
move_story_to_qa(root, "31_story_test").unwrap();
assert!(qa_dir.join("31_story_test.md").exists());
}
#[test]
fn move_story_to_qa_errors_when_not_in_current() {
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let result = move_story_to_qa(repo, "99_nonexistent");
assert!(result.is_err());
let tmp = tempfile::tempdir().unwrap();
let result = move_story_to_qa(tmp.path(), "99_nonexistent");
assert!(result.unwrap_err().contains("not found in work/2_current/"));
}
// ── move_story_to_archived with 4_merge source ────────────────────────────
// ── move_story_to_archived tests ──────────────────────────────────────────
#[test]
fn move_story_to_archived_finds_in_merge_dir() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let merge_dir = repo.join(".story_kit/work/4_merge");
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let merge_dir = root.join(".story_kit/work/4_merge");
fs::create_dir_all(&merge_dir).unwrap();
let story_file = merge_dir.join("22_story_test.md");
fs::write(&story_file, "---\nname: Test\ntest_plan: approved\n---\n").unwrap();
fs::write(merge_dir.join("22_story_test.md"), "test").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add story in merge"])
.current_dir(repo)
.output()
.unwrap();
move_story_to_archived(root, "22_story_test").unwrap();
move_story_to_archived(repo, "22_story_test").unwrap();
let archived = repo.join(".story_kit/work/5_archived/22_story_test.md");
assert!(!story_file.exists(), "4_merge file should be gone");
assert!(archived.exists(), "5_archived file should exist");
assert!(!merge_dir.join("22_story_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/22_story_test.md").exists());
}
#[test]
fn move_story_to_archived_error_when_not_in_current_or_merge() {
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
let result = move_story_to_archived(repo, "99_nonexistent");
assert!(result.is_err());
let msg = result.unwrap_err();
assert!(msg.contains("4_merge"), "error should mention 4_merge: {msg}");
let tmp = tempfile::tempdir().unwrap();
let result = move_story_to_archived(tmp.path(), "99_nonexistent");
assert!(result.unwrap_err().contains("4_merge"));
}
// ── merge_agent_work tests ────────────────────────────────────────────────

View File

@@ -1,9 +1,11 @@
use crate::agents::AgentPool;
use crate::io::watcher::WatcherEvent;
use crate::state::SessionState;
use crate::store::JsonFileStore;
use crate::workflow::WorkflowState;
use poem::http::StatusCode;
use std::sync::Arc;
use tokio::sync::broadcast;
#[derive(Clone)]
pub struct AppContext {
@@ -11,6 +13,9 @@ pub struct AppContext {
pub store: Arc<JsonFileStore>,
pub workflow: Arc<std::sync::Mutex<WorkflowState>>,
pub agents: Arc<AgentPool>,
/// Broadcast channel for filesystem watcher events. WebSocket handlers
/// subscribe to this to push lifecycle notifications to connected clients.
pub watcher_tx: broadcast::Sender<WatcherEvent>,
}
#[cfg(test)]
@@ -19,11 +24,13 @@ impl AppContext {
let state = SessionState::default();
*state.project_root.lock().unwrap() = Some(project_root.clone());
let store_path = project_root.join(".story_kit_store.json");
let (watcher_tx, _) = broadcast::channel(64);
Self {
state: Arc::new(state),
store: Arc::new(JsonFileStore::new(store_path).unwrap()),
workflow: Arc::new(std::sync::Mutex::new(WorkflowState::default())),
agents: Arc::new(AgentPool::new(3001)),
watcher_tx,
}
}
}

View File

@@ -848,8 +848,9 @@ fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
let acceptance_criteria: Option<Vec<String>> = args
.get("acceptance_criteria")
.and_then(|v| serde_json::from_value(v.clone()).ok());
// MCP tool always auto-commits the new story file to master.
let commit = true;
// Spike 61: write the file only — the filesystem watcher detects the new
// .md file in work/1_upcoming/ and auto-commits with a deterministic message.
let commit = false;
let root = ctx.state.get_project_root()?;
let story_id = create_story_file(
@@ -1607,30 +1608,10 @@ mod tests {
#[test]
fn tool_create_story_and_list_upcoming() {
let tmp = tempfile::tempdir().unwrap();
// The MCP tool always commits, so we need a real git repo.
std::process::Command::new("git")
.args(["init"])
.current_dir(tmp.path())
.output()
.unwrap();
std::process::Command::new("git")
.args(["config", "user.email", "test@test.com"])
.current_dir(tmp.path())
.output()
.unwrap();
std::process::Command::new("git")
.args(["config", "user.name", "Test"])
.current_dir(tmp.path())
.output()
.unwrap();
std::process::Command::new("git")
.args(["commit", "--allow-empty", "-m", "init"])
.current_dir(tmp.path())
.output()
.unwrap();
// No git repo needed: spike 61 — create_story just writes the file;
// the filesystem watcher handles the commit asynchronously.
let ctx = test_ctx(tmp.path());
// Create a story (always auto-commits in MCP handler)
let result = tool_create_story(
&json!({"name": "Test Story", "acceptance_criteria": ["AC1", "AC2"]}),
&ctx,

View File

@@ -26,7 +26,6 @@ use poem_openapi::OpenApiService;
use project::ProjectApi;
use settings::SettingsApi;
use std::sync::Arc;
use workflow::WorkflowApi;
pub fn build_routes(ctx: AppContext) -> impl poem::Endpoint {
let ctx_arc = std::sync::Arc::new(ctx);
@@ -58,7 +57,6 @@ type ApiTuple = (
AnthropicApi,
IoApi,
ChatApi,
WorkflowApi,
AgentsApi,
SettingsApi,
);
@@ -73,7 +71,6 @@ pub fn build_openapi_service(ctx: Arc<AppContext>) -> (ApiService, ApiService) {
AnthropicApi::new(ctx.clone()),
IoApi { ctx: ctx.clone() },
ChatApi { ctx: ctx.clone() },
WorkflowApi { ctx: ctx.clone() },
AgentsApi { ctx: ctx.clone() },
SettingsApi { ctx: ctx.clone() },
);
@@ -87,7 +84,6 @@ pub fn build_openapi_service(ctx: Arc<AppContext>) -> (ApiService, ApiService) {
AnthropicApi::new(ctx.clone()),
IoApi { ctx: ctx.clone() },
ChatApi { ctx: ctx.clone() },
WorkflowApi { ctx: ctx.clone() },
AgentsApi { ctx: ctx.clone() },
SettingsApi { ctx },
);

View File

@@ -1,158 +1,55 @@
use crate::agents::git_stage_and_commit;
use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::io::story_metadata::{StoryMetadata, parse_front_matter, parse_unchecked_todos};
use crate::workflow::{
CoverageReport, StoryTestResults, TestCaseResult, TestStatus,
evaluate_acceptance_with_coverage, parse_coverage_json, summarize_results,
};
use poem_openapi::{Object, OpenApi, Tags, payload::Json};
use serde::Deserialize;
use std::collections::BTreeSet;
use crate::http::context::AppContext;
use crate::io::story_metadata::parse_front_matter;
use serde::Serialize;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::Arc;
#[derive(Tags)]
enum WorkflowTags {
Workflow,
}
#[derive(Deserialize, Object)]
struct TestCasePayload {
pub name: String,
pub status: String,
pub details: Option<String>,
}
#[derive(Deserialize, Object)]
struct RecordTestsPayload {
pub story_id: String,
pub unit: Vec<TestCasePayload>,
pub integration: Vec<TestCasePayload>,
}
#[derive(Deserialize, Object)]
struct AcceptanceRequest {
pub story_id: String,
}
#[derive(Object)]
struct TestRunSummaryResponse {
pub total: usize,
pub passed: usize,
pub failed: usize,
}
#[derive(Object)]
struct CoverageReportResponse {
pub current_percent: f64,
pub threshold_percent: f64,
pub baseline_percent: Option<f64>,
}
#[derive(Object)]
struct AcceptanceResponse {
pub can_accept: bool,
pub reasons: Vec<String>,
pub warning: Option<String>,
pub summary: TestRunSummaryResponse,
pub missing_categories: Vec<String>,
pub coverage_report: Option<CoverageReportResponse>,
}
#[derive(Object)]
struct ReviewStory {
pub story_id: String,
pub can_accept: bool,
pub reasons: Vec<String>,
pub warning: Option<String>,
pub summary: TestRunSummaryResponse,
pub missing_categories: Vec<String>,
pub coverage_report: Option<CoverageReportResponse>,
}
#[derive(Deserialize, Object)]
struct RecordCoveragePayload {
pub story_id: String,
pub current_percent: f64,
pub threshold_percent: Option<f64>,
}
#[derive(Deserialize, Object)]
struct CollectCoverageRequest {
pub story_id: String,
pub threshold_percent: Option<f64>,
}
#[derive(Object)]
struct ReviewListResponse {
pub stories: Vec<ReviewStory>,
}
#[derive(Object)]
struct StoryTodosResponse {
pub story_id: String,
pub story_name: Option<String>,
pub todos: Vec<String>,
pub error: Option<String>,
}
#[derive(Object)]
struct TodoListResponse {
pub stories: Vec<StoryTodosResponse>,
}
#[derive(Object)]
#[derive(Clone, Debug, Serialize)]
pub struct UpcomingStory {
pub story_id: String,
pub name: Option<String>,
pub error: Option<String>,
}
#[derive(Object)]
struct UpcomingStoriesResponse {
pub stories: Vec<UpcomingStory>,
}
#[derive(Deserialize, Object)]
struct CreateStoryPayload {
pub name: String,
pub user_story: Option<String>,
pub acceptance_criteria: Option<Vec<String>>,
/// If true, git-add and git-commit the new story file to the current branch.
pub commit: Option<bool>,
}
#[derive(Object)]
struct CreateStoryResponse {
pub story_id: String,
}
#[derive(Object)]
pub struct StoryValidationResult {
pub story_id: String,
pub valid: bool,
pub error: Option<String>,
}
#[derive(Object)]
struct ValidateStoriesResponse {
pub stories: Vec<StoryValidationResult>,
/// Full pipeline state across all stages.
#[derive(Clone, Debug, Serialize)]
pub struct PipelineState {
pub upcoming: Vec<UpcomingStory>,
pub current: Vec<UpcomingStory>,
pub qa: Vec<UpcomingStory>,
pub merge: Vec<UpcomingStory>,
}
pub fn load_upcoming_stories(ctx: &AppContext) -> Result<Vec<UpcomingStory>, String> {
let root = ctx.state.get_project_root()?;
let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming");
/// Load the full pipeline state (all 4 active stages).
pub fn load_pipeline_state(ctx: &AppContext) -> Result<PipelineState, String> {
Ok(PipelineState {
upcoming: load_stage_items(ctx, "1_upcoming")?,
current: load_stage_items(ctx, "2_current")?,
qa: load_stage_items(ctx, "3_qa")?,
merge: load_stage_items(ctx, "4_merge")?,
})
}
if !upcoming_dir.exists() {
/// Load work items from any pipeline stage directory.
fn load_stage_items(ctx: &AppContext, stage_dir: &str) -> Result<Vec<UpcomingStory>, String> {
let root = ctx.state.get_project_root()?;
let dir = root.join(".story_kit").join("work").join(stage_dir);
if !dir.exists() {
return Ok(Vec::new());
}
let mut stories = Vec::new();
for entry in fs::read_dir(&upcoming_dir)
.map_err(|e| format!("Failed to read upcoming stories directory: {e}"))?
for entry in fs::read_dir(&dir)
.map_err(|e| format!("Failed to read {stage_dir} directory: {e}"))?
{
let entry = entry.map_err(|e| format!("Failed to read upcoming story entry: {e}"))?;
let entry = entry.map_err(|e| format!("Failed to read {stage_dir} entry: {e}"))?;
let path = entry.path();
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
continue;
@@ -175,449 +72,8 @@ pub fn load_upcoming_stories(ctx: &AppContext) -> Result<Vec<UpcomingStory>, Str
Ok(stories)
}
fn load_current_story_metadata(ctx: &AppContext) -> Result<Vec<(String, StoryMetadata)>, String> {
let root = ctx.state.get_project_root()?;
let current_dir = root.join(".story_kit").join("work").join("2_current");
if !current_dir.exists() {
return Ok(Vec::new());
}
let mut stories = Vec::new();
for entry in fs::read_dir(&current_dir)
.map_err(|e| format!("Failed to read current stories directory: {e}"))?
{
let entry = entry.map_err(|e| format!("Failed to read current story entry: {e}"))?;
let path = entry.path();
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
continue;
}
let story_id = path
.file_stem()
.and_then(|stem| stem.to_str())
.ok_or_else(|| "Invalid story file name.".to_string())?
.to_string();
let contents = fs::read_to_string(&path)
.map_err(|e| format!("Failed to read story file {}: {e}", path.display()))?;
let metadata = parse_front_matter(&contents)
.map_err(|e| format!("Failed to parse front matter for {story_id}: {e:?}"))?;
stories.push((story_id, metadata));
}
Ok(stories)
}
fn to_review_story(
story_id: &str,
results: &StoryTestResults,
coverage: Option<&CoverageReport>,
) -> ReviewStory {
let decision = evaluate_acceptance_with_coverage(results, coverage);
let summary = summarize_results(results);
let mut missing_categories = Vec::new();
let mut reasons = decision.reasons;
if results.unit.is_empty() {
missing_categories.push("unit".to_string());
reasons.push("Missing unit test results.".to_string());
}
if results.integration.is_empty() {
missing_categories.push("integration".to_string());
reasons.push("Missing integration test results.".to_string());
}
let can_accept = decision.can_accept && missing_categories.is_empty();
let coverage_report = coverage.map(|c| CoverageReportResponse {
current_percent: c.current_percent,
threshold_percent: c.threshold_percent,
baseline_percent: c.baseline_percent,
});
ReviewStory {
story_id: story_id.to_string(),
can_accept,
reasons,
warning: decision.warning,
summary: TestRunSummaryResponse {
total: summary.total,
passed: summary.passed,
failed: summary.failed,
},
missing_categories,
coverage_report,
}
}
pub struct WorkflowApi {
pub ctx: Arc<AppContext>,
}
#[OpenApi(tag = "WorkflowTags::Workflow")]
impl WorkflowApi {
/// Record test results for a story (unit + integration).
#[oai(path = "/workflow/tests/record", method = "post")]
async fn record_tests(&self, payload: Json<RecordTestsPayload>) -> OpenApiResult<Json<bool>> {
let unit = payload
.0
.unit
.into_iter()
.map(to_test_case)
.collect::<Result<Vec<_>, String>>()
.map_err(bad_request)?;
let integration = payload
.0
.integration
.into_iter()
.map(to_test_case)
.collect::<Result<Vec<_>, String>>()
.map_err(bad_request)?;
let mut workflow = self
.ctx
.workflow
.lock()
.map_err(|e| bad_request(e.to_string()))?;
workflow
.record_test_results_validated(payload.0.story_id, unit, integration)
.map_err(bad_request)?;
Ok(Json(true))
}
/// Evaluate acceptance readiness for a story.
#[oai(path = "/workflow/acceptance", method = "post")]
async fn acceptance(
&self,
payload: Json<AcceptanceRequest>,
) -> OpenApiResult<Json<AcceptanceResponse>> {
let (results, coverage) = {
let workflow = self
.ctx
.workflow
.lock()
.map_err(|e| bad_request(e.to_string()))?;
let results = workflow
.results
.get(&payload.0.story_id)
.cloned()
.unwrap_or_default();
let coverage = workflow.coverage.get(&payload.0.story_id).cloned();
(results, coverage)
};
let decision =
evaluate_acceptance_with_coverage(&results, coverage.as_ref());
let summary = summarize_results(&results);
let mut missing_categories = Vec::new();
let mut reasons = decision.reasons;
if results.unit.is_empty() {
missing_categories.push("unit".to_string());
reasons.push("Missing unit test results.".to_string());
}
if results.integration.is_empty() {
missing_categories.push("integration".to_string());
reasons.push("Missing integration test results.".to_string());
}
let can_accept = decision.can_accept && missing_categories.is_empty();
let coverage_report = coverage.map(|c| CoverageReportResponse {
current_percent: c.current_percent,
threshold_percent: c.threshold_percent,
baseline_percent: c.baseline_percent,
});
Ok(Json(AcceptanceResponse {
can_accept,
reasons,
warning: decision.warning,
summary: TestRunSummaryResponse {
total: summary.total,
passed: summary.passed,
failed: summary.failed,
},
missing_categories,
coverage_report,
}))
}
/// List stories that are ready for human review.
#[oai(path = "/workflow/review", method = "get")]
async fn review_queue(&self) -> OpenApiResult<Json<ReviewListResponse>> {
let stories = {
let workflow = self
.ctx
.workflow
.lock()
.map_err(|e| bad_request(e.to_string()))?;
workflow
.results
.iter()
.map(|(story_id, results)| {
let coverage = workflow.coverage.get(story_id);
to_review_story(story_id, results, coverage)
})
.filter(|story| story.can_accept)
.collect::<Vec<_>>()
};
Ok(Json(ReviewListResponse { stories }))
}
/// List stories in the review queue, including blocked items and current stories.
#[oai(path = "/workflow/review/all", method = "get")]
async fn review_queue_all(&self) -> OpenApiResult<Json<ReviewListResponse>> {
let current_stories =
load_current_story_metadata(self.ctx.as_ref()).map_err(bad_request)?;
let stories = {
let mut workflow = self
.ctx
.workflow
.lock()
.map_err(|e| bad_request(e.to_string()))?;
if !current_stories.is_empty() {
workflow.load_story_metadata(current_stories);
}
let mut story_ids = BTreeSet::new();
for story_id in workflow.results.keys() {
story_ids.insert(story_id.clone());
}
for story_id in workflow.stories.keys() {
story_ids.insert(story_id.clone());
}
story_ids
.into_iter()
.map(|story_id| {
let results = workflow.results.get(&story_id).cloned().unwrap_or_default();
let coverage = workflow.coverage.get(&story_id);
to_review_story(&story_id, &results, coverage)
})
.collect::<Vec<_>>()
};
Ok(Json(ReviewListResponse { stories }))
}
/// Record coverage data for a story.
#[oai(path = "/workflow/coverage/record", method = "post")]
async fn record_coverage(
&self,
payload: Json<RecordCoveragePayload>,
) -> OpenApiResult<Json<bool>> {
let mut workflow = self
.ctx
.workflow
.lock()
.map_err(|e| bad_request(e.to_string()))?;
workflow.record_coverage(
payload.0.story_id,
payload.0.current_percent,
payload.0.threshold_percent,
);
Ok(Json(true))
}
/// Run coverage collection: execute test:coverage, parse output, record result.
#[oai(path = "/workflow/coverage/collect", method = "post")]
async fn collect_coverage(
&self,
payload: Json<CollectCoverageRequest>,
) -> OpenApiResult<Json<CoverageReportResponse>> {
let root = self
.ctx
.state
.get_project_root()
.map_err(bad_request)?;
let frontend_dir = root.join("frontend");
// Run pnpm run test:coverage in the frontend directory
let output = tokio::task::spawn_blocking(move || {
std::process::Command::new("pnpm")
.args(["run", "test:coverage"])
.current_dir(&frontend_dir)
.output()
})
.await
.map_err(|e| bad_request(format!("Task join error: {e}")))?
.map_err(|e| bad_request(format!("Failed to run coverage command: {e}")))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
let combined: Vec<&str> = stdout
.lines()
.chain(stderr.lines())
.filter(|l| !l.trim().is_empty())
.collect();
let tail: Vec<&str> = combined
.iter()
.rev()
.take(5)
.rev()
.copied()
.collect();
let summary = if tail.is_empty() {
"Unknown error. Check server logs for details.".to_string()
} else {
tail.join("\n")
};
return Err(bad_request(format!("Coverage command failed:\n{summary}")));
}
// Read the coverage summary JSON
let summary_path = root
.join("frontend")
.join("coverage")
.join("coverage-summary.json");
let json_str = fs::read_to_string(&summary_path)
.map_err(|e| bad_request(format!("Failed to read coverage summary: {e}")))?;
let current_percent = parse_coverage_json(&json_str).map_err(bad_request)?;
// Record coverage in workflow state
let coverage_report = {
let mut workflow = self
.ctx
.workflow
.lock()
.map_err(|e| bad_request(e.to_string()))?;
workflow.record_coverage(
payload.0.story_id.clone(),
current_percent,
payload.0.threshold_percent,
);
workflow
.coverage
.get(&payload.0.story_id)
.cloned()
.expect("just inserted")
};
Ok(Json(CoverageReportResponse {
current_percent: coverage_report.current_percent,
threshold_percent: coverage_report.threshold_percent,
baseline_percent: coverage_report.baseline_percent,
}))
}
/// List unchecked acceptance criteria (TODOs) for all current stories.
#[oai(path = "/workflow/todos", method = "get")]
async fn story_todos(&self) -> OpenApiResult<Json<TodoListResponse>> {
let root = self.ctx.state.get_project_root().map_err(bad_request)?;
let current_dir = root.join(".story_kit").join("work").join("2_current");
if !current_dir.exists() {
return Ok(Json(TodoListResponse {
stories: Vec::new(),
}));
}
let mut stories = Vec::new();
let mut entries: Vec<_> = fs::read_dir(&current_dir)
.map_err(|e| bad_request(format!("Failed to read current stories: {e}")))?
.filter_map(|e| e.ok())
.collect();
entries.sort_by_key(|e| e.file_name());
for entry in entries {
let path = entry.path();
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
continue;
}
let story_id = path
.file_stem()
.and_then(|stem| stem.to_str())
.unwrap_or_default()
.to_string();
let contents = fs::read_to_string(&path)
.map_err(|e| bad_request(format!("Failed to read {}: {e}", path.display())))?;
let (story_name, error) = match parse_front_matter(&contents) {
Ok(m) => (m.name, None),
Err(e) => (None, Some(e.to_string())),
};
let todos = parse_unchecked_todos(&contents);
stories.push(StoryTodosResponse {
story_id,
story_name,
todos,
error,
});
}
Ok(Json(TodoListResponse { stories }))
}
/// List upcoming stories from .story_kit/stories/upcoming/.
#[oai(path = "/workflow/upcoming", method = "get")]
async fn list_upcoming_stories(&self) -> OpenApiResult<Json<UpcomingStoriesResponse>> {
let stories = load_upcoming_stories(self.ctx.as_ref()).map_err(bad_request)?;
Ok(Json(UpcomingStoriesResponse { stories }))
}
/// Validate front matter on all current and upcoming story files.
#[oai(path = "/workflow/stories/validate", method = "get")]
async fn validate_stories(&self) -> OpenApiResult<Json<ValidateStoriesResponse>> {
let root = self.ctx.state.get_project_root().map_err(bad_request)?;
let stories = validate_story_dirs(&root).map_err(bad_request)?;
Ok(Json(ValidateStoriesResponse { stories }))
}
/// Create a new story file with correct front matter in upcoming/.
#[oai(path = "/workflow/stories/create", method = "post")]
async fn create_story(
&self,
payload: Json<CreateStoryPayload>,
) -> OpenApiResult<Json<CreateStoryResponse>> {
let root = self.ctx.state.get_project_root().map_err(bad_request)?;
let commit = payload.0.commit.unwrap_or(false);
let story_id = create_story_file(
&root,
&payload.0.name,
payload.0.user_story.as_deref(),
payload.0.acceptance_criteria.as_deref(),
commit,
)
.map_err(bad_request)?;
Ok(Json(CreateStoryResponse { story_id }))
}
/// Ensure a story can be accepted; returns an error when gates fail.
#[oai(path = "/workflow/acceptance/ensure", method = "post")]
async fn ensure_acceptance(
&self,
payload: Json<AcceptanceRequest>,
) -> OpenApiResult<Json<bool>> {
let response = self.acceptance(payload).await?.0;
if response.can_accept {
return Ok(Json(true));
}
let mut parts = Vec::new();
if !response.reasons.is_empty() {
parts.push(response.reasons.join("; "));
}
if let Some(warning) = response.warning {
parts.push(warning);
}
let message = if parts.is_empty() {
"Acceptance is blocked.".to_string()
} else {
format!("Acceptance is blocked: {}", parts.join("; "))
};
Err(bad_request(message))
}
pub fn load_upcoming_stories(ctx: &AppContext) -> Result<Vec<UpcomingStory>, String> {
load_stage_items(ctx, "1_upcoming")
}
/// Shared create-story logic used by both the OpenApi and MCP handlers.
@@ -686,19 +142,12 @@ pub fn create_story_file(
fs::write(&filepath, &content)
.map_err(|e| format!("Failed to write story file: {e}"))?;
if commit {
git_commit_story_file(root, &filepath, &story_id)?;
}
// Watcher handles the git commit asynchronously.
let _ = commit; // kept for API compat, ignored
Ok(story_id)
}
/// Git-add and git-commit a newly created story file using a deterministic message.
fn git_commit_story_file(root: &Path, filepath: &Path, story_id: &str) -> Result<(), String> {
let msg = format!("story-kit: create story {story_id}");
git_stage_and_commit(root, &[filepath], &msg)
}
// ── Bug file helpers ──────────────────────────────────────────────
/// Create a bug file in `work/1_upcoming/` with a deterministic filename and auto-commit.
@@ -761,8 +210,7 @@ pub fn create_bug_file(
fs::write(&filepath, &content).map_err(|e| format!("Failed to write bug file: {e}"))?;
let msg = format!("story-kit: create bug {bug_id}");
git_stage_and_commit(root, &[filepath.as_path()], &msg)?;
// Watcher handles the git commit asynchronously.
Ok(bug_id)
}
@@ -898,8 +346,8 @@ pub fn check_criterion_in_file(
fs::write(&filepath, &new_str)
.map_err(|e| format!("Failed to write story file: {e}"))?;
let msg = format!("story-kit: check criterion {criterion_index} for story {story_id}");
git_stage_and_commit(project_root, &[filepath.as_path()], &msg)
// Watcher handles the git commit asynchronously.
Ok(())
}
/// Update the `test_plan` front-matter field in a story file and auto-commit.
@@ -952,8 +400,8 @@ pub fn set_test_plan_in_file(
fs::write(&filepath, &new_str)
.map_err(|e| format!("Failed to write story file: {e}"))?;
let msg = format!("story-kit: set test_plan to {status} for story {story_id}");
git_stage_and_commit(project_root, &[filepath.as_path()], &msg)
// Watcher handles the git commit asynchronously.
Ok(())
}
fn slugify_name(name: &str) -> String {
@@ -1084,128 +532,9 @@ pub fn validate_story_dirs(
Ok(results)
}
fn to_test_case(input: TestCasePayload) -> Result<TestCaseResult, String> {
let status = parse_test_status(&input.status)?;
Ok(TestCaseResult {
name: input.name,
status,
details: input.details,
})
}
fn parse_test_status(value: &str) -> Result<TestStatus, String> {
match value {
"pass" => Ok(TestStatus::Pass),
"fail" => Ok(TestStatus::Fail),
other => Err(format!(
"Invalid test status '{other}'. Use 'pass' or 'fail'."
)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::workflow::{StoryTestResults, TestCaseResult, TestStatus};
#[test]
fn parse_test_status_pass() {
assert_eq!(parse_test_status("pass").unwrap(), TestStatus::Pass);
}
#[test]
fn parse_test_status_fail() {
assert_eq!(parse_test_status("fail").unwrap(), TestStatus::Fail);
}
#[test]
fn parse_test_status_invalid() {
let result = parse_test_status("unknown");
assert!(result.is_err());
assert!(result.unwrap_err().contains("Invalid test status"));
}
#[test]
fn to_test_case_converts_pass() {
let payload = TestCasePayload {
name: "my_test".to_string(),
status: "pass".to_string(),
details: Some("all good".to_string()),
};
let result = to_test_case(payload).unwrap();
assert_eq!(result.name, "my_test");
assert_eq!(result.status, TestStatus::Pass);
assert_eq!(result.details, Some("all good".to_string()));
}
#[test]
fn to_test_case_rejects_invalid_status() {
let payload = TestCasePayload {
name: "bad".to_string(),
status: "maybe".to_string(),
details: None,
};
assert!(to_test_case(payload).is_err());
}
#[test]
fn to_review_story_all_passing() {
let results = StoryTestResults {
unit: vec![TestCaseResult {
name: "u1".to_string(),
status: TestStatus::Pass,
details: None,
}],
integration: vec![TestCaseResult {
name: "i1".to_string(),
status: TestStatus::Pass,
details: None,
}],
};
let review = to_review_story("story-29", &results, None);
assert!(review.can_accept);
assert!(review.reasons.is_empty());
assert!(review.missing_categories.is_empty());
assert_eq!(review.summary.total, 2);
assert_eq!(review.summary.passed, 2);
}
#[test]
fn to_review_story_missing_integration() {
let results = StoryTestResults {
unit: vec![TestCaseResult {
name: "u1".to_string(),
status: TestStatus::Pass,
details: None,
}],
integration: vec![],
};
let review = to_review_story("story-29", &results, None);
assert!(!review.can_accept);
assert!(review.missing_categories.contains(&"integration".to_string()));
}
#[test]
fn to_review_story_with_failures() {
let results = StoryTestResults {
unit: vec![TestCaseResult {
name: "u1".to_string(),
status: TestStatus::Fail,
details: None,
}],
integration: vec![TestCaseResult {
name: "i1".to_string(),
status: TestStatus::Pass,
details: None,
}],
};
let review = to_review_story("story-29", &results, None);
assert!(!review.can_accept);
assert_eq!(review.summary.failed, 1);
}
#[test]
fn load_upcoming_returns_empty_when_no_dir() {

View File

@@ -1,4 +1,6 @@
use crate::http::context::AppContext;
use crate::http::workflow::{PipelineState, load_pipeline_state};
use crate::io::watcher::WatcherEvent;
use crate::llm::chat;
use crate::llm::types::Message;
use futures::{SinkExt, StreamExt};
@@ -30,16 +32,56 @@ enum WsRequest {
/// - `token` streams partial model output.
/// - `update` pushes the updated message history.
/// - `error` reports a request or processing failure.
/// - `work_item_changed` notifies that a `.story_kit/work/` file changed.
enum WsResponse {
Token { content: String },
Update { messages: Vec<Message> },
/// Session ID for Claude Code conversation resumption.
SessionId { session_id: String },
Error { message: String },
/// Filesystem watcher notification: a work-pipeline file was created or
/// modified and auto-committed. The frontend can use this to refresh its
/// story/bug list without polling.
WorkItemChanged {
stage: String,
item_id: String,
action: String,
commit_msg: String,
},
/// Full pipeline state pushed on connect and after every watcher event.
PipelineState {
upcoming: Vec<crate::http::workflow::UpcomingStory>,
current: Vec<crate::http::workflow::UpcomingStory>,
qa: Vec<crate::http::workflow::UpcomingStory>,
merge: Vec<crate::http::workflow::UpcomingStory>,
},
}
impl From<WatcherEvent> for WsResponse {
fn from(e: WatcherEvent) -> Self {
WsResponse::WorkItemChanged {
stage: e.stage,
item_id: e.item_id,
action: e.action,
commit_msg: e.commit_msg,
}
}
}
impl From<PipelineState> for WsResponse {
fn from(s: PipelineState) -> Self {
WsResponse::PipelineState {
upcoming: s.upcoming,
current: s.current,
qa: s.qa,
merge: s.merge,
}
}
}
#[handler]
/// WebSocket endpoint for streaming chat responses and cancellation.
/// WebSocket endpoint for streaming chat responses, cancellation, and
/// filesystem watcher notifications.
///
/// Accepts JSON `WsRequest` messages and streams `WsResponse` messages.
pub async fn ws_handler(ws: WebSocket, ctx: Data<&Arc<AppContext>>) -> impl poem::IntoResponse {
@@ -58,6 +100,37 @@ pub async fn ws_handler(ws: WebSocket, ctx: Data<&Arc<AppContext>>) -> impl poem
}
});
// Push initial pipeline state to the client on connect.
if let Ok(state) = load_pipeline_state(ctx.as_ref()) {
let _ = tx.send(state.into());
}
// Subscribe to filesystem watcher events and forward them to the client.
// After each watcher event, also push the updated pipeline state.
let tx_watcher = tx.clone();
let ctx_watcher = ctx.clone();
let mut watcher_rx = ctx.watcher_tx.subscribe();
tokio::spawn(async move {
loop {
match watcher_rx.recv().await {
Ok(evt) => {
if tx_watcher.send(evt.into()).is_err() {
break;
}
// Push refreshed pipeline state after the change.
if let Ok(state) = load_pipeline_state(ctx_watcher.as_ref()) {
if tx_watcher.send(state.into()).is_err() {
break;
}
}
}
// Lagged: skip missed events, keep going.
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => continue,
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
}
}
});
while let Some(Ok(msg)) = stream.next().await {
if let WsMessage::Text(text) = msg {
let parsed: Result<WsRequest, _> = serde_json::from_str(&text);

View File

@@ -2,3 +2,4 @@ pub mod fs;
pub mod search;
pub mod shell;
pub mod story_metadata;
pub mod watcher;

282
server/src/io/watcher.rs Normal file
View File

@@ -0,0 +1,282 @@
//! Filesystem watcher for `.story_kit/work/`.
//!
//! Watches the work pipeline directories for file changes, infers the lifecycle
//! stage from the target directory name, auto-commits with a deterministic message,
//! and broadcasts a [`WatcherEvent`] to all connected WebSocket clients.
//!
//! # Debouncing
//! Events are buffered for 300 ms after the last activity. All changes within the
//! window are batched into a single `git add + commit`. This avoids double-commits
//! when `fs::rename` fires both a remove and a create event.
//!
//! # Race conditions
//! If a mutation handler (e.g. `move_story_to_current`) already committed the
//! change, `git commit` will return "nothing to commit". The watcher detects this
//! via exit-code inspection and silently skips the commit while still broadcasting
//! the event so connected clients stay in sync.
use notify::{EventKind, RecommendedWatcher, RecursiveMode, Watcher, recommended_watcher};
use serde::Serialize;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::mpsc;
use std::time::{Duration, Instant};
use tokio::sync::broadcast;
/// A lifecycle event emitted by the filesystem watcher after auto-committing.
#[derive(Clone, Debug, Serialize)]
pub struct WatcherEvent {
/// Pipeline stage directory (e.g. `"2_current"`, `"5_archived"`).
pub stage: String,
/// Work item ID (filename stem without extension, e.g. `"42_story_my_feature"`).
pub item_id: String,
/// Semantic action inferred from the stage (e.g. `"start"`, `"accept"`).
pub action: String,
/// The deterministic git commit message used (or that would have been used).
pub commit_msg: String,
}
/// Map a pipeline directory name to a (action, commit-message-prefix) pair.
fn stage_metadata(stage: &str, item_id: &str) -> Option<(&'static str, String)> {
let (action, prefix) = match stage {
"1_upcoming" => ("create", format!("story-kit: create {item_id}")),
"2_current" => ("start", format!("story-kit: start {item_id}")),
"3_qa" => ("qa", format!("story-kit: queue {item_id} for QA")),
"4_merge" => ("merge", format!("story-kit: queue {item_id} for merge")),
"5_archived" => ("accept", format!("story-kit: accept {item_id}")),
_ => return None,
};
Some((action, prefix))
}
/// Return the pipeline stage name for a path if it is a `.md` file living
/// directly inside one of the known work subdirectories, otherwise `None`.
fn stage_for_path(path: &Path) -> Option<String> {
if path.extension().is_none_or(|e| e != "md") {
return None;
}
let stage = path
.parent()
.and_then(|p| p.file_name())
.and_then(|n| n.to_str())?;
matches!(stage, "1_upcoming" | "2_current" | "3_qa" | "4_merge" | "5_archived")
.then(|| stage.to_string())
}
/// Stage all changes in the work directory and commit with the given message.
///
/// Uses `git add -A .story_kit/work/` to catch both additions and deletions in
/// a single commit. Returns `Ok(true)` if a commit was made, `Ok(false)` if
/// there was nothing to commit, and `Err` for unexpected failures.
fn git_add_work_and_commit(git_root: &Path, message: &str) -> Result<bool, String> {
let work_rel = PathBuf::from(".story_kit").join("work");
let add_out = std::process::Command::new("git")
.args(["add", "-A"])
.arg(&work_rel)
.current_dir(git_root)
.output()
.map_err(|e| format!("git add: {e}"))?;
if !add_out.status.success() {
return Err(format!(
"git add failed: {}",
String::from_utf8_lossy(&add_out.stderr)
));
}
let commit_out = std::process::Command::new("git")
.args(["commit", "-m", message])
.current_dir(git_root)
.output()
.map_err(|e| format!("git commit: {e}"))?;
if commit_out.status.success() {
return Ok(true);
}
let stderr = String::from_utf8_lossy(&commit_out.stderr);
let stdout = String::from_utf8_lossy(&commit_out.stdout);
if stdout.contains("nothing to commit") || stderr.contains("nothing to commit") {
return Ok(false);
}
Err(format!("git commit failed: {stderr}"))
}
/// Process a batch of pending (path → stage) entries: commit and broadcast.
///
/// Only files that still exist on disk are used to derive the commit message
/// (they represent the destination of a move or a new file). Deletions are
/// captured by `git add -A .story_kit/work/` automatically.
fn flush_pending(
pending: &HashMap<PathBuf, String>,
git_root: &Path,
event_tx: &broadcast::Sender<WatcherEvent>,
) {
// Separate into files that exist (additions) vs gone (deletions).
let mut additions: Vec<(&PathBuf, &str)> = Vec::new();
for (path, stage) in pending {
if path.exists() {
additions.push((path, stage.as_str()));
}
}
// Pick the commit message from the first addition (the meaningful side of a move).
// If there are only deletions, use a generic message.
let (action, item_id, commit_msg) = if let Some((path, stage)) = additions.first() {
let item = path.file_stem().and_then(|s| s.to_str()).unwrap_or("unknown");
if let Some((act, msg)) = stage_metadata(stage, item) {
(act, item.to_string(), msg)
} else {
return;
}
} else {
// Only deletions — pick any pending path for the item name.
let Some((path, _)) = pending.iter().next() else {
return;
};
let item = path.file_stem().and_then(|s| s.to_str()).unwrap_or("unknown");
("remove", item.to_string(), format!("story-kit: remove {item}"))
};
eprintln!("[watcher] flush: {commit_msg}");
match git_add_work_and_commit(git_root, &commit_msg) {
Ok(committed) => {
if committed {
eprintln!("[watcher] committed: {commit_msg}");
} else {
eprintln!("[watcher] skipped (already committed): {commit_msg}");
}
let stage = additions.first().map_or("unknown", |(_, s)| s);
let evt = WatcherEvent {
stage: stage.to_string(),
item_id,
action: action.to_string(),
commit_msg,
};
let _ = event_tx.send(evt);
}
Err(e) => {
eprintln!("[watcher] git error: {e}");
}
}
}
/// Start the filesystem watcher on a dedicated OS thread.
///
/// `work_dir` — absolute path to `.story_kit/work/` (watched recursively).
/// `git_root` — project root (passed to `git` commands as cwd).
/// `event_tx` — broadcast sender; each connected WebSocket client holds a receiver.
pub fn start_watcher(
work_dir: PathBuf,
git_root: PathBuf,
event_tx: broadcast::Sender<WatcherEvent>,
) {
std::thread::spawn(move || {
let (notify_tx, notify_rx) = mpsc::channel::<notify::Result<notify::Event>>();
let mut watcher: RecommendedWatcher = match recommended_watcher(move |res| {
let _ = notify_tx.send(res);
}) {
Ok(w) => w,
Err(e) => {
eprintln!("[watcher] failed to create watcher: {e}");
return;
}
};
if let Err(e) = watcher.watch(&work_dir, RecursiveMode::Recursive) {
eprintln!("[watcher] failed to watch {}: {e}", work_dir.display());
return;
}
eprintln!("[watcher] watching {}", work_dir.display());
const DEBOUNCE: Duration = Duration::from_millis(300);
// Map path → stage for pending (uncommitted) changes.
let mut pending: HashMap<PathBuf, String> = HashMap::new();
let mut deadline: Option<Instant> = None;
loop {
// How long until the debounce window closes (or wait for next event).
let timeout = deadline.map_or(Duration::from_secs(60), |d| {
d.saturating_duration_since(Instant::now())
});
let flush = match notify_rx.recv_timeout(timeout) {
Ok(Ok(event)) => {
// Track creates, modifies, AND removes. Removes are needed so
// that standalone deletions trigger a flush, and so that moves
// (which fire Remove + Create) land in the same debounce window.
let is_relevant_kind = matches!(
event.kind,
EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_)
);
if is_relevant_kind {
for path in event.paths {
if let Some(stage) = stage_for_path(&path) {
pending.insert(path, stage);
deadline = Some(Instant::now() + DEBOUNCE);
}
}
}
false
}
Ok(Err(e)) => {
eprintln!("[watcher] notify error: {e}");
false
}
// Debounce window expired — time to flush.
Err(mpsc::RecvTimeoutError::Timeout) => true,
Err(mpsc::RecvTimeoutError::Disconnected) => {
eprintln!("[watcher] channel disconnected, shutting down");
break;
}
};
if flush && !pending.is_empty() {
flush_pending(&pending, &git_root, &event_tx);
pending.clear();
deadline = None;
}
}
});
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn stage_for_path_recognises_pipeline_dirs() {
let base = PathBuf::from("/proj/.story_kit/work");
assert_eq!(
stage_for_path(&base.join("2_current/42_story_foo.md")),
Some("2_current".to_string())
);
assert_eq!(
stage_for_path(&base.join("5_archived/10_bug_bar.md")),
Some("5_archived".to_string())
);
assert_eq!(stage_for_path(&base.join("other/file.md")), None);
assert_eq!(
stage_for_path(&base.join("2_current/42_story_foo.txt")),
None
);
}
#[test]
fn stage_metadata_returns_correct_actions() {
let (action, msg) = stage_metadata("2_current", "42_story_foo").unwrap();
assert_eq!(action, "start");
assert_eq!(msg, "story-kit: start 42_story_foo");
let (action, msg) = stage_metadata("5_archived", "42_story_foo").unwrap();
assert_eq!(action, "accept");
assert_eq!(msg, "story-kit: accept 42_story_foo");
assert!(stage_metadata("unknown", "id").is_none());
}
}

View File

@@ -18,6 +18,7 @@ use poem::Server;
use poem::listener::TcpListener;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::sync::broadcast;
const DEFAULT_PORT: u16 = 3001;
@@ -88,11 +89,21 @@ async fn main() -> Result<(), std::io::Error> {
let port = resolve_port();
let agents = Arc::new(AgentPool::new(port));
// Filesystem watcher: broadcast channel for work/ pipeline changes.
let (watcher_tx, _) = broadcast::channel::<io::watcher::WatcherEvent>(1024);
if let Some(ref root) = *app_state.project_root.lock().unwrap() {
let work_dir = root.join(".story_kit").join("work");
if work_dir.is_dir() {
io::watcher::start_watcher(work_dir, root.clone(), watcher_tx.clone());
}
}
let ctx = AppContext {
state: app_state,
store,
workflow,
agents,
watcher_tx,
};
let app = build_routes(ctx);

View File

@@ -1,14 +1,5 @@
//! Workflow module: story gating and test result tracking.
//!
//! This module provides the in-memory primitives for:
//! - reading story metadata (front matter) for gating decisions
//! - tracking test run results
//! - evaluating acceptance readiness
//!
//! NOTE: This is a naive, local-only implementation that will be
//! refactored later into orchestration-aware components.
//! Workflow module: test result tracking and acceptance evaluation.
use crate::io::story_metadata::{StoryMetadata, TestPlanStatus};
use std::collections::HashMap;
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -24,11 +15,9 @@ pub struct TestCaseResult {
pub details: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TestRunSummary {
pub total: usize,
pub passed: usize,
pub failed: usize,
struct TestRunSummary {
total: usize,
failed: usize,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -45,44 +34,12 @@ pub struct StoryTestResults {
}
#[derive(Debug, Clone, Default)]
#[allow(dead_code)]
pub struct WorkflowState {
pub stories: HashMap<String, StoryMetadata>,
pub results: HashMap<String, StoryTestResults>,
pub coverage: HashMap<String, CoverageReport>,
}
#[allow(dead_code)]
impl WorkflowState {
pub fn upsert_story(&mut self, story_id: String, metadata: StoryMetadata) {
self.stories.insert(story_id, metadata);
}
pub fn load_story_metadata(&mut self, stories: Vec<(String, StoryMetadata)>) {
for (story_id, metadata) in stories {
self.stories.insert(story_id, metadata);
}
}
pub fn refresh_story_metadata(&mut self, story_id: String, metadata: StoryMetadata) -> bool {
match self.stories.get(&story_id) {
Some(existing) if existing == &metadata => false,
_ => {
self.stories.insert(story_id, metadata);
true
}
}
}
pub fn record_test_results(
&mut self,
story_id: String,
unit: Vec<TestCaseResult>,
integration: Vec<TestCaseResult>,
) {
let _ = self.record_test_results_validated(story_id, unit, integration);
}
pub fn record_test_results_validated(
&mut self,
story_id: String,
@@ -107,65 +64,23 @@ impl WorkflowState {
Ok(())
}
pub fn record_coverage(
&mut self,
story_id: String,
current_percent: f64,
threshold_percent: Option<f64>,
) {
let threshold = threshold_percent.unwrap_or(80.0);
let baseline = self
.coverage
.get(&story_id)
.map(|existing| existing.baseline_percent.unwrap_or(existing.current_percent));
self.coverage.insert(
story_id,
CoverageReport {
current_percent,
threshold_percent: threshold,
baseline_percent: baseline,
},
);
}
}
#[allow(dead_code)]
pub fn can_start_implementation(metadata: &StoryMetadata) -> Result<(), String> {
match metadata.test_plan {
Some(TestPlanStatus::Approved) => Ok(()),
Some(TestPlanStatus::WaitingForApproval) => {
Err("Test plan is waiting for approval; implementation is blocked.".to_string())
}
Some(TestPlanStatus::Unknown(ref value)) => Err(format!(
"Test plan state is unknown ({value}); implementation is blocked."
)),
None => Err("Missing test plan status; implementation is blocked.".to_string()),
}
}
pub fn summarize_results(results: &StoryTestResults) -> TestRunSummary {
fn summarize_results(results: &StoryTestResults) -> TestRunSummary {
let mut total = 0;
let mut passed = 0;
let mut failed = 0;
for test in results.unit.iter().chain(results.integration.iter()) {
total += 1;
match test.status {
TestStatus::Pass => passed += 1,
TestStatus::Fail => failed += 1,
if test.status == TestStatus::Fail {
failed += 1;
}
}
TestRunSummary {
total,
passed,
failed,
}
TestRunSummary { total, failed }
}
pub fn evaluate_acceptance(results: &StoryTestResults) -> AcceptanceDecision {
fn evaluate_acceptance(results: &StoryTestResults) -> AcceptanceDecision {
let summary = summarize_results(results);
if summary.failed == 0 && summary.total > 0 {
@@ -211,32 +126,6 @@ pub struct CoverageReport {
pub baseline_percent: Option<f64>,
}
/// Parse coverage percentage from a vitest coverage-summary.json string.
/// Expects JSON with `{"total": {"lines": {"pct": <number>}}}`.
pub fn parse_coverage_json(json_str: &str) -> Result<f64, String> {
let value: serde_json::Value =
serde_json::from_str(json_str).map_err(|e| format!("Invalid coverage JSON: {e}"))?;
value
.get("total")
.and_then(|t| t.get("lines"))
.and_then(|l| l.get("pct"))
.and_then(|p| p.as_f64())
.ok_or_else(|| "Missing total.lines.pct in coverage JSON.".to_string())
}
/// Check whether coverage meets the threshold.
#[allow(dead_code)]
pub fn check_coverage_threshold(current: f64, threshold: f64) -> Result<(), String> {
if current >= threshold {
Ok(())
} else {
Err(format!(
"Coverage below threshold ({current:.1}% < {threshold:.1}%)."
))
}
}
/// Evaluate acceptance with optional coverage data.
pub fn evaluate_acceptance_with_coverage(
results: &StoryTestResults,
@@ -269,43 +158,7 @@ pub fn evaluate_acceptance_with_coverage(
mod tests {
use super::*;
// === parse_coverage_json ===
#[test]
fn parses_valid_coverage_json() {
let json = r#"{"total":{"lines":{"total":100,"covered":85,"pct":85.0},"statements":{"pct":85.0}}}"#;
assert_eq!(parse_coverage_json(json).unwrap(), 85.0);
}
#[test]
fn rejects_invalid_coverage_json() {
assert!(parse_coverage_json("not json").is_err());
}
#[test]
fn rejects_missing_total_lines_pct() {
let json = r#"{"total":{"branches":{"pct":90.0}}}"#;
assert!(parse_coverage_json(json).is_err());
}
// === AC1: check_coverage_threshold ===
#[test]
fn coverage_threshold_passes_when_met() {
assert!(check_coverage_threshold(80.0, 80.0).is_ok());
assert!(check_coverage_threshold(95.5, 80.0).is_ok());
}
#[test]
fn coverage_threshold_fails_when_below() {
let result = check_coverage_threshold(72.3, 80.0);
assert!(result.is_err());
let err = result.unwrap_err();
assert!(err.contains("72.3%"));
assert!(err.contains("80.0%"));
}
// === AC2: evaluate_acceptance_with_coverage ===
// === evaluate_acceptance_with_coverage ===
#[test]
fn acceptance_blocked_by_coverage_below_threshold() {
@@ -403,49 +256,7 @@ mod tests {
assert!(decision.can_accept);
}
// === record_coverage ===
#[test]
fn record_coverage_first_time_has_no_baseline() {
let mut state = WorkflowState::default();
state.record_coverage("story-27".to_string(), 85.0, Some(80.0));
let report = state.coverage.get("story-27").unwrap();
assert_eq!(report.current_percent, 85.0);
assert_eq!(report.threshold_percent, 80.0);
assert_eq!(report.baseline_percent, None);
}
#[test]
fn record_coverage_subsequent_sets_baseline() {
let mut state = WorkflowState::default();
state.record_coverage("story-27".to_string(), 85.0, Some(80.0));
state.record_coverage("story-27".to_string(), 78.0, Some(80.0));
let report = state.coverage.get("story-27").unwrap();
assert_eq!(report.current_percent, 78.0);
assert_eq!(report.baseline_percent, Some(85.0));
}
#[test]
fn record_coverage_default_threshold() {
let mut state = WorkflowState::default();
state.record_coverage("story-27".to_string(), 90.0, None);
let report = state.coverage.get("story-27").unwrap();
assert_eq!(report.threshold_percent, 80.0);
}
#[test]
fn record_coverage_custom_threshold() {
let mut state = WorkflowState::default();
state.record_coverage("story-27".to_string(), 90.0, Some(95.0));
let report = state.coverage.get("story-27").unwrap();
assert_eq!(report.threshold_percent, 95.0);
}
// === Existing tests ===
// === evaluate_acceptance ===
#[test]
fn warns_when_multiple_tests_fail() {
@@ -478,32 +289,6 @@ mod tests {
);
}
#[test]
fn rejects_recording_multiple_failures() {
let mut state = WorkflowState::default();
let unit = vec![
TestCaseResult {
name: "unit-1".to_string(),
status: TestStatus::Fail,
details: None,
},
TestCaseResult {
name: "unit-2".to_string(),
status: TestStatus::Fail,
details: None,
},
];
let integration = vec![TestCaseResult {
name: "integration-1".to_string(),
status: TestStatus::Pass,
details: None,
}];
let result = state.record_test_results_validated("story-26".to_string(), unit, integration);
assert!(result.is_err());
}
#[test]
fn accepts_when_all_tests_pass() {
let results = StoryTestResults {
@@ -557,49 +342,32 @@ mod tests {
assert!(decision.warning.is_none());
}
#[test]
fn summarize_results_counts_correctly() {
let results = StoryTestResults {
unit: vec![
TestCaseResult { name: "u1".to_string(), status: TestStatus::Pass, details: None },
TestCaseResult { name: "u2".to_string(), status: TestStatus::Fail, details: None },
],
integration: vec![
TestCaseResult { name: "i1".to_string(), status: TestStatus::Pass, details: None },
],
};
let summary = summarize_results(&results);
assert_eq!(summary.total, 3);
assert_eq!(summary.passed, 2);
assert_eq!(summary.failed, 1);
}
// === record_test_results_validated ===
#[test]
fn can_start_implementation_requires_approved_plan() {
let approved = StoryMetadata {
name: Some("Test".to_string()),
test_plan: Some(TestPlanStatus::Approved),
};
assert!(can_start_implementation(&approved).is_ok());
fn rejects_recording_multiple_failures() {
let mut state = WorkflowState::default();
let unit = vec![
TestCaseResult {
name: "unit-1".to_string(),
status: TestStatus::Fail,
details: None,
},
TestCaseResult {
name: "unit-2".to_string(),
status: TestStatus::Fail,
details: None,
},
];
let integration = vec![TestCaseResult {
name: "integration-1".to_string(),
status: TestStatus::Pass,
details: None,
}];
let waiting = StoryMetadata {
name: Some("Test".to_string()),
test_plan: Some(TestPlanStatus::WaitingForApproval),
};
assert!(can_start_implementation(&waiting).is_err());
let result = state.record_test_results_validated("story-26".to_string(), unit, integration);
let unknown = StoryMetadata {
name: Some("Test".to_string()),
test_plan: Some(TestPlanStatus::Unknown("draft".to_string())),
};
assert!(can_start_implementation(&unknown).is_err());
let missing = StoryMetadata {
name: Some("Test".to_string()),
test_plan: None,
};
assert!(can_start_implementation(&missing).is_err());
assert!(result.is_err());
}
#[test]
@@ -626,22 +394,4 @@ mod tests {
assert_eq!(state.results["story-29"].unit.len(), 1);
assert_eq!(state.results["story-29"].integration.len(), 1);
}
#[test]
fn refresh_story_metadata_returns_false_when_unchanged() {
let mut state = WorkflowState::default();
let meta = StoryMetadata {
name: Some("Test".to_string()),
test_plan: Some(TestPlanStatus::Approved),
};
assert!(state.refresh_story_metadata("s1".to_string(), meta.clone()));
assert!(!state.refresh_story_metadata("s1".to_string(), meta.clone()));
let updated = StoryMetadata {
name: Some("Updated".to_string()),
test_plan: Some(TestPlanStatus::Approved),
};
assert!(state.refresh_story_metadata("s1".to_string(), updated));
}
}