story-kit: merge 151_story_split_archived_into_done_and_archived_with_time_based_promotion

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Dave
2026-02-24 17:01:57 +00:00
parent 95ed60401f
commit aef022c74c
8 changed files with 212 additions and 49 deletions

View File

@@ -33,3 +33,4 @@ walkdir = { workspace = true }
tempfile = { workspace = true }
tokio-tungstenite = { workspace = true }
mockito = "1"
filetime = { workspace = true }

View File

@@ -2031,9 +2031,9 @@ fn item_source_dir(project_root: &Path, _item_id: &str) -> PathBuf {
project_root.join(".story_kit").join("work").join("1_upcoming")
}
/// Return the archive directory path for a work item (always work/5_archived/).
/// Return the done directory path for a work item (always work/5_done/).
fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf {
project_root.join(".story_kit").join("work").join("5_archived")
project_root.join(".story_kit").join("work").join("5_done")
}
/// Move a work item (story, bug, or spike) from `work/1_upcoming/` to `work/2_current/`.
@@ -2075,21 +2075,22 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
Ok(())
}
/// Move a story from `work/2_current/` to `work/5_archived/` and auto-commit.
/// Move a story from `work/2_current/` to `work/5_done/` and auto-commit.
///
/// * If the story is in `2_current/`, it is moved to `5_archived/` and committed.
/// * If the story is in `4_merge/`, it is moved to `5_archived/` and committed.
/// * If the story is already in `5_archived/`, this is a no-op (idempotent).
/// * If the story is not found in `2_current/`, `4_merge/`, or `5_archived/`, an error is returned.
/// * If the story is in `2_current/`, it is moved to `5_done/` and committed.
/// * If the story is in `4_merge/`, it is moved to `5_done/` and committed.
/// * If the story is already in `5_done/` or `6_archived/`, this is a no-op (idempotent).
/// * If the story is not found in `2_current/`, `4_merge/`, `5_done/`, or `6_archived/`, an error is returned.
pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
let merge_path = sk.join("4_merge").join(format!("{story_id}.md"));
let archived_dir = sk.join("5_archived");
let archived_path = archived_dir.join(format!("{story_id}.md"));
let done_dir = sk.join("5_done");
let done_path = done_dir.join(format!("{story_id}.md"));
let archived_path = sk.join("6_archived").join(format!("{story_id}.md"));
if archived_path.exists() {
// Already archived — idempotent, nothing to do.
if done_path.exists() || archived_path.exists() {
// Already in done or archived — idempotent, nothing to do.
return Ok(());
}
@@ -2104,17 +2105,17 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
));
};
std::fs::create_dir_all(&archived_dir)
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?;
std::fs::rename(&source_path, &archived_path)
.map_err(|e| format!("Failed to move story '{story_id}' to 5_archived/: {e}"))?;
std::fs::create_dir_all(&done_dir)
.map_err(|e| format!("Failed to create work/5_done/ directory: {e}"))?;
std::fs::rename(&source_path, &done_path)
.map_err(|e| format!("Failed to move story '{story_id}' to 5_done/: {e}"))?;
let from_dir = if source_path == current_path {
"work/2_current/"
} else {
"work/4_merge/"
};
slog!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_archived/");
slog!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_done/");
Ok(())
}
@@ -2192,11 +2193,11 @@ pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), Strin
Ok(())
}
/// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_archived/` and auto-commit.
/// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_done/` and auto-commit.
///
/// * If the bug is in `2_current/`, it is moved to `5_archived/` and committed.
/// * If the bug is still in `1_upcoming/` (never started), it is moved directly to `5_archived/`.
/// * If the bug is already in `5_archived/`, this is a no-op (idempotent).
/// * If the bug is in `2_current/`, it is moved to `5_done/` and committed.
/// * If the bug is still in `1_upcoming/` (never started), it is moved directly to `5_done/`.
/// * If the bug is already in `5_done/`, this is a no-op (idempotent).
/// * If the bug is not found anywhere, an error is returned.
pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
@@ -2220,12 +2221,12 @@ pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), Str
};
std::fs::create_dir_all(&archive_dir)
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?;
.map_err(|e| format!("Failed to create work/5_done/ directory: {e}"))?;
std::fs::rename(&source_path, &archive_path)
.map_err(|e| format!("Failed to move bug '{bug_id}' to 5_archived/: {e}"))?;
.map_err(|e| format!("Failed to move bug '{bug_id}' to 5_done/: {e}"))?;
slog!(
"[lifecycle] Closed bug '{bug_id}' → work/5_archived/"
"[lifecycle] Closed bug '{bug_id}' → work/5_done/"
);
Ok(())
@@ -3676,7 +3677,7 @@ mod tests {
close_bug_to_archive(root, "2_bug_test").unwrap();
assert!(!current.join("2_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/2_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_done/2_bug_test.md").exists());
}
#[test]
@@ -3691,7 +3692,7 @@ mod tests {
close_bug_to_archive(root, "3_bug_test").unwrap();
assert!(!upcoming.join("3_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/3_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_done/3_bug_test.md").exists());
}
#[test]
@@ -3944,7 +3945,7 @@ mod tests {
move_story_to_archived(root, "22_story_test").unwrap();
assert!(!merge_dir.join("22_story_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/22_story_test.md").exists());
assert!(root.join(".story_kit/work/5_done/22_story_test.md").exists());
}
#[test]
@@ -4036,10 +4037,10 @@ mod tests {
report.success || report.gate_output.contains("Failed to run") || !report.gates_passed,
"report should be coherent: {report:?}"
);
// Story should be archived if gates passed
// Story should be in done if gates passed
if report.story_archived {
let archived = repo.join(".story_kit/work/5_archived/23_test.md");
assert!(archived.exists(), "archived file should exist");
let done = repo.join(".story_kit/work/5_done/23_test.md");
assert!(done.exists(), "done file should exist");
}
}
@@ -5737,8 +5738,8 @@ theirs
assert_eq!(remaining.len(), 1, "only the other story's agent should remain");
assert_eq!(remaining[0].story_id, "61_story_other");
// Story file should be in 5_archived/
assert!(root.join(".story_kit/work/5_archived/60_story_cleanup.md").exists());
// Story file should be in 5_done/
assert!(root.join(".story_kit/work/5_done/60_story_cleanup.md").exists());
}
// ── bug 154: merge worktree installs frontend deps ────────────────────

View File

@@ -61,7 +61,7 @@ struct WorktreeListEntry {
path: String,
}
/// Returns true if the story file exists in `work/5_archived/`.
/// Returns true if the story file exists in `work/5_done/` or `work/6_archived/`.
///
/// Used to exclude agents for already-archived stories from the `list_agents`
/// response so the agents panel is not cluttered with old completed items on
@@ -139,7 +139,7 @@ impl AgentsApi {
/// List all agents with their status.
///
/// Agents for stories that have been archived (`work/5_archived/`) are
/// Agents for stories that have been completed (`work/5_done/` or `work/6_archived/`) are
/// excluded so the agents panel is not cluttered with old completed items
/// on frontend startup.
#[oai(path = "/agents", method = "get")]

View File

@@ -586,7 +586,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "accept_story",
"description": "Accept a story: moves it from current/ to archived/ and auto-commits to master.",
"description": "Accept a story: moves it from current/ to done/ and auto-commits to master.",
"inputSchema": {
"type": "object",
"properties": {
@@ -693,7 +693,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "merge_agent_work",
"description": "Trigger the mergemaster pipeline for a completed story: squash-merge the feature branch into master, run quality gates (cargo clippy, cargo test, pnpm build, pnpm test), archive the story from work/4_merge/ or work/2_current/ to work/5_archived/, and clean up the worktree and branch. Reports success/failure with details including any conflicts found and gate output.",
"description": "Trigger the mergemaster pipeline for a completed story: squash-merge the feature branch into master, run quality gates (cargo clippy, cargo test, pnpm build, pnpm test), move the story from work/4_merge/ or work/2_current/ to work/5_done/, and clean up the worktree and branch. Reports success/failure with details including any conflicts found and gate output.",
"inputSchema": {
"type": "object",
"properties": {
@@ -1301,7 +1301,7 @@ fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
ctx.agents.remove_agents_for_story(story_id);
Ok(format!(
"Story '{story_id}' accepted, moved to archived/, and committed to master."
"Story '{story_id}' accepted, moved to done/, and committed to master."
))
}
@@ -2251,7 +2251,7 @@ mod tests {
let result = tool_close_bug(&json!({"bug_id": "1_bug_crash"}), &ctx).unwrap();
assert!(result.contains("1_bug_crash"));
assert!(!bug_file.exists());
assert!(tmp.path().join(".story_kit/work/5_archived/1_bug_crash.md").exists());
assert!(tmp.path().join(".story_kit/work/5_done/1_bug_crash.md").exists());
}
// ── Spike lifecycle tool tests ─────────────────────────────────────────

View File

@@ -505,7 +505,7 @@ fn next_item_number(root: &std::path::Path) -> Result<u32, String> {
let work_base = root.join(".story_kit").join("work");
let mut max_num: u32 = 0;
for subdir in &["1_upcoming", "2_current", "3_qa", "4_merge", "5_archived"] {
for subdir in &["1_upcoming", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
let dir = work_base.join(subdir);
if !dir.exists() {
continue;
@@ -869,7 +869,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let upcoming = tmp.path().join(".story_kit/work/1_upcoming");
let current = tmp.path().join(".story_kit/work/2_current");
let archived = tmp.path().join(".story_kit/work/5_archived");
let archived = tmp.path().join(".story_kit/work/5_done");
fs::create_dir_all(&upcoming).unwrap();
fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&archived).unwrap();
@@ -1116,7 +1116,7 @@ mod tests {
fn next_item_number_scans_archived_too() {
let tmp = tempfile::tempdir().unwrap();
let upcoming = tmp.path().join(".story_kit/work/1_upcoming");
let archived = tmp.path().join(".story_kit/work/5_archived");
let archived = tmp.path().join(".story_kit/work/5_done");
fs::create_dir_all(&upcoming).unwrap();
fs::create_dir_all(&archived).unwrap();
fs::write(archived.join("5_bug_old.md"), "").unwrap();
@@ -1134,7 +1134,7 @@ mod tests {
fn list_bug_files_excludes_archive_subdir() {
let tmp = tempfile::tempdir().unwrap();
let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming");
let archived_dir = tmp.path().join(".story_kit/work/5_archived");
let archived_dir = tmp.path().join(".story_kit/work/5_done");
fs::create_dir_all(&upcoming_dir).unwrap();
fs::create_dir_all(&archived_dir).unwrap();
fs::write(upcoming_dir.join("1_bug_open.md"), "# Bug 1: Open Bug\n").unwrap();

View File

@@ -25,7 +25,7 @@ use serde::Serialize;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::mpsc;
use std::time::{Duration, Instant};
use std::time::{Duration, Instant, SystemTime};
use tokio::sync::broadcast;
/// A lifecycle event emitted by the filesystem watcher.
@@ -68,7 +68,8 @@ fn stage_metadata(stage: &str, item_id: &str) -> Option<(&'static str, String)>
"2_current" => ("start", format!("story-kit: start {item_id}")),
"3_qa" => ("qa", format!("story-kit: queue {item_id} for QA")),
"4_merge" => ("merge", format!("story-kit: queue {item_id} for merge")),
"5_archived" => ("accept", format!("story-kit: accept {item_id}")),
"5_done" => ("done", format!("story-kit: done {item_id}")),
"6_archived" => ("accept", format!("story-kit: accept {item_id}")),
_ => return None,
};
Some((action, prefix))
@@ -96,7 +97,7 @@ fn stage_for_path(path: &Path) -> Option<String> {
.parent()
.and_then(|p| p.file_name())
.and_then(|n| n.to_str())?;
matches!(stage, "1_upcoming" | "2_current" | "3_qa" | "4_merge" | "5_archived")
matches!(stage, "1_upcoming" | "2_current" | "3_qa" | "4_merge" | "5_done" | "6_archived")
.then(|| stage.to_string())
}
@@ -199,6 +200,66 @@ fn flush_pending(
}
}
/// Scan `work/5_done/` and move any `.md` files whose mtime is older than
/// `DONE_RETENTION` to `work/6_archived/`.
///
/// Called periodically from the watcher thread. File moves will trigger normal
/// watcher events, which `flush_pending` will commit and broadcast.
fn sweep_done_to_archived(work_dir: &Path) {
const DONE_RETENTION: Duration = Duration::from_secs(4 * 60 * 60);
let done_dir = work_dir.join("5_done");
if !done_dir.exists() {
return;
}
let entries = match std::fs::read_dir(&done_dir) {
Ok(e) => e,
Err(e) => {
slog!("[watcher] sweep: failed to read 5_done/: {e}");
return;
}
};
let archived_dir = work_dir.join("6_archived");
for entry in entries.flatten() {
let path = entry.path();
if path.extension().is_none_or(|e| e != "md") {
continue;
}
let mtime = match entry.metadata().and_then(|m| m.modified()) {
Ok(t) => t,
Err(_) => continue,
};
let age = SystemTime::now()
.duration_since(mtime)
.unwrap_or_default();
if age >= DONE_RETENTION {
if let Err(e) = std::fs::create_dir_all(&archived_dir) {
slog!("[watcher] sweep: failed to create 6_archived/: {e}");
continue;
}
let dest = archived_dir.join(entry.file_name());
match std::fs::rename(&path, &dest) {
Ok(()) => {
let item_id = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
slog!("[watcher] sweep: promoted {item_id} → 6_archived/");
}
Err(e) => {
slog!("[watcher] sweep: failed to move {}: {e}", path.display());
}
}
}
}
}
/// Start the filesystem watcher on a dedicated OS thread.
///
/// `work_dir` — absolute path to `.story_kit/work/` (watched recursively).
@@ -239,12 +300,19 @@ pub fn start_watcher(
slog!("[watcher] watching {}", work_dir.display());
const DEBOUNCE: Duration = Duration::from_millis(300);
/// How often to check 5_done/ for items to promote to 6_archived/.
const SWEEP_INTERVAL: Duration = Duration::from_secs(60);
// Map path → stage for pending (uncommitted) work-item changes.
let mut pending: HashMap<PathBuf, String> = HashMap::new();
// Whether a config file change is pending in the current debounce window.
let mut config_changed_pending = false;
let mut deadline: Option<Instant> = None;
// Track when we last swept 5_done/ → 6_archived/.
// Initialise to "now minus interval" so the first sweep runs on startup.
let mut last_sweep = Instant::now()
.checked_sub(SWEEP_INTERVAL)
.unwrap_or_else(Instant::now);
loop {
// How long until the debounce window closes (or wait for next event).
@@ -299,6 +367,13 @@ pub fn start_watcher(
config_changed_pending = false;
}
deadline = None;
// Periodically promote old items from 5_done/ to 6_archived/.
let now = Instant::now();
if now.duration_since(last_sweep) >= SWEEP_INTERVAL {
last_sweep = now;
sweep_done_to_archived(&work_dir);
}
}
}
});
@@ -422,7 +497,8 @@ mod tests {
("1_upcoming", "create", "story-kit: create 10_story_x"),
("3_qa", "qa", "story-kit: queue 10_story_x for QA"),
("4_merge", "merge", "story-kit: queue 10_story_x for merge"),
("5_archived", "accept", "story-kit: accept 10_story_x"),
("5_done", "done", "story-kit: done 10_story_x"),
("6_archived", "accept", "story-kit: accept 10_story_x"),
];
for (stage, expected_action, expected_msg) in stages {
@@ -530,8 +606,12 @@ mod tests {
Some("2_current".to_string())
);
assert_eq!(
stage_for_path(&base.join("5_archived/10_bug_bar.md")),
Some("5_archived".to_string())
stage_for_path(&base.join("5_done/10_bug_bar.md")),
Some("5_done".to_string())
);
assert_eq!(
stage_for_path(&base.join("6_archived/10_bug_bar.md")),
Some("6_archived".to_string())
);
assert_eq!(stage_for_path(&base.join("other/file.md")), None);
assert_eq!(
@@ -571,7 +651,11 @@ mod tests {
assert_eq!(action, "start");
assert_eq!(msg, "story-kit: start 42_story_foo");
let (action, msg) = stage_metadata("5_archived", "42_story_foo").unwrap();
let (action, msg) = stage_metadata("5_done", "42_story_foo").unwrap();
assert_eq!(action, "done");
assert_eq!(msg, "story-kit: done 42_story_foo");
let (action, msg) = stage_metadata("6_archived", "42_story_foo").unwrap();
assert_eq!(action, "accept");
assert_eq!(msg, "story-kit: accept 42_story_foo");
@@ -615,4 +699,48 @@ mod tests {
let other_root_config = PathBuf::from("/other/.story_kit/project.toml");
assert!(!is_config_file(&other_root_config, &git_root));
}
// ── sweep_done_to_archived ────────────────────────────────────────────────
#[test]
fn sweep_moves_old_items_to_archived() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let done_dir = work_dir.join("5_done");
let archived_dir = work_dir.join("6_archived");
fs::create_dir_all(&done_dir).unwrap();
// Write a file and backdate its mtime to 5 hours ago.
let story_path = done_dir.join("10_story_old.md");
fs::write(&story_path, "---\nname: old\n---\n").unwrap();
let past = SystemTime::now()
.checked_sub(Duration::from_secs(5 * 60 * 60))
.unwrap();
filetime::set_file_mtime(&story_path, filetime::FileTime::from_system_time(past))
.unwrap();
sweep_done_to_archived(&work_dir);
assert!(!story_path.exists(), "old item should be moved out of 5_done/");
assert!(
archived_dir.join("10_story_old.md").exists(),
"old item should appear in 6_archived/"
);
}
#[test]
fn sweep_keeps_recent_items_in_done() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let done_dir = work_dir.join("5_done");
fs::create_dir_all(&done_dir).unwrap();
// Write a file with a recent mtime (now).
let story_path = done_dir.join("11_story_new.md");
fs::write(&story_path, "---\nname: new\n---\n").unwrap();
sweep_done_to_archived(&work_dir);
assert!(story_path.exists(), "recent item should remain in 5_done/");
}
}