story-kit: merge 151_story_split_archived_into_done_and_archived_with_time_based_promotion

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Dave
2026-02-24 17:01:57 +00:00
parent 95ed60401f
commit aef022c74c
8 changed files with 212 additions and 49 deletions

34
Cargo.lock generated
View File

@@ -471,6 +471,17 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "filetime"
version = "0.2.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db"
dependencies = [
"cfg-if",
"libc",
"libredox",
]
[[package]] [[package]]
name = "find-msvc-tools" name = "find-msvc-tools"
version = "0.1.9" version = "0.1.9"
@@ -1158,6 +1169,17 @@ version = "0.2.182"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112"
[[package]]
name = "libredox"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616"
dependencies = [
"bitflags 2.11.0",
"libc",
"redox_syscall 0.7.2",
]
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.11.0" version = "0.11.0"
@@ -1374,7 +1396,7 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall", "redox_syscall 0.5.18",
"smallvec", "smallvec",
"windows-link 0.2.1", "windows-link 0.2.1",
] ]
@@ -1700,6 +1722,15 @@ dependencies = [
"bitflags 2.11.0", "bitflags 2.11.0",
] ]
[[package]]
name = "redox_syscall"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d94dd2f7cd932d4dc02cc8b2b50dfd38bd079a4e5d79198b99743d7fcf9a4b4"
dependencies = [
"bitflags 2.11.0",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.12.3" version = "1.12.3"
@@ -2190,6 +2221,7 @@ dependencies = [
"bytes 1.11.1", "bytes 1.11.1",
"chrono", "chrono",
"eventsource-stream", "eventsource-stream",
"filetime",
"futures", "futures",
"homedir", "homedir",
"ignore", "ignore",

View File

@@ -28,3 +28,4 @@ toml = "1.0.3+spec-1.1.0"
uuid = { version = "1.21.0", features = ["v4", "serde"] } uuid = { version = "1.21.0", features = ["v4", "serde"] }
tokio-tungstenite = "0.28.0" tokio-tungstenite = "0.28.0"
walkdir = "2.5.0" walkdir = "2.5.0"
filetime = "0.2"

View File

@@ -33,3 +33,4 @@ walkdir = { workspace = true }
tempfile = { workspace = true } tempfile = { workspace = true }
tokio-tungstenite = { workspace = true } tokio-tungstenite = { workspace = true }
mockito = "1" mockito = "1"
filetime = { workspace = true }

View File

@@ -2031,9 +2031,9 @@ fn item_source_dir(project_root: &Path, _item_id: &str) -> PathBuf {
project_root.join(".story_kit").join("work").join("1_upcoming") project_root.join(".story_kit").join("work").join("1_upcoming")
} }
/// Return the archive directory path for a work item (always work/5_archived/). /// Return the done directory path for a work item (always work/5_done/).
fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf { fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf {
project_root.join(".story_kit").join("work").join("5_archived") project_root.join(".story_kit").join("work").join("5_done")
} }
/// Move a work item (story, bug, or spike) from `work/1_upcoming/` to `work/2_current/`. /// Move a work item (story, bug, or spike) from `work/1_upcoming/` to `work/2_current/`.
@@ -2075,21 +2075,22 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
Ok(()) Ok(())
} }
/// Move a story from `work/2_current/` to `work/5_archived/` and auto-commit. /// Move a story from `work/2_current/` to `work/5_done/` and auto-commit.
/// ///
/// * If the story is in `2_current/`, it is moved to `5_archived/` and committed. /// * If the story is in `2_current/`, it is moved to `5_done/` and committed.
/// * If the story is in `4_merge/`, it is moved to `5_archived/` and committed. /// * If the story is in `4_merge/`, it is moved to `5_done/` and committed.
/// * If the story is already in `5_archived/`, this is a no-op (idempotent). /// * If the story is already in `5_done/` or `6_archived/`, this is a no-op (idempotent).
/// * If the story is not found in `2_current/`, `4_merge/`, or `5_archived/`, an error is returned. /// * If the story is not found in `2_current/`, `4_merge/`, `5_done/`, or `6_archived/`, an error is returned.
pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(), String> { pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work"); let sk = project_root.join(".story_kit").join("work");
let current_path = sk.join("2_current").join(format!("{story_id}.md")); let current_path = sk.join("2_current").join(format!("{story_id}.md"));
let merge_path = sk.join("4_merge").join(format!("{story_id}.md")); let merge_path = sk.join("4_merge").join(format!("{story_id}.md"));
let archived_dir = sk.join("5_archived"); let done_dir = sk.join("5_done");
let archived_path = archived_dir.join(format!("{story_id}.md")); let done_path = done_dir.join(format!("{story_id}.md"));
let archived_path = sk.join("6_archived").join(format!("{story_id}.md"));
if archived_path.exists() { if done_path.exists() || archived_path.exists() {
// Already archived — idempotent, nothing to do. // Already in done or archived — idempotent, nothing to do.
return Ok(()); return Ok(());
} }
@@ -2104,17 +2105,17 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
)); ));
}; };
std::fs::create_dir_all(&archived_dir) std::fs::create_dir_all(&done_dir)
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?; .map_err(|e| format!("Failed to create work/5_done/ directory: {e}"))?;
std::fs::rename(&source_path, &archived_path) std::fs::rename(&source_path, &done_path)
.map_err(|e| format!("Failed to move story '{story_id}' to 5_archived/: {e}"))?; .map_err(|e| format!("Failed to move story '{story_id}' to 5_done/: {e}"))?;
let from_dir = if source_path == current_path { let from_dir = if source_path == current_path {
"work/2_current/" "work/2_current/"
} else { } else {
"work/4_merge/" "work/4_merge/"
}; };
slog!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_archived/"); slog!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_done/");
Ok(()) Ok(())
} }
@@ -2192,11 +2193,11 @@ pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), Strin
Ok(()) Ok(())
} }
/// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_archived/` and auto-commit. /// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_done/` and auto-commit.
/// ///
/// * If the bug is in `2_current/`, it is moved to `5_archived/` and committed. /// * If the bug is in `2_current/`, it is moved to `5_done/` and committed.
/// * If the bug is still in `1_upcoming/` (never started), it is moved directly to `5_archived/`. /// * If the bug is still in `1_upcoming/` (never started), it is moved directly to `5_done/`.
/// * If the bug is already in `5_archived/`, this is a no-op (idempotent). /// * If the bug is already in `5_done/`, this is a no-op (idempotent).
/// * If the bug is not found anywhere, an error is returned. /// * If the bug is not found anywhere, an error is returned.
pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> { pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work"); let sk = project_root.join(".story_kit").join("work");
@@ -2220,12 +2221,12 @@ pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), Str
}; };
std::fs::create_dir_all(&archive_dir) std::fs::create_dir_all(&archive_dir)
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?; .map_err(|e| format!("Failed to create work/5_done/ directory: {e}"))?;
std::fs::rename(&source_path, &archive_path) std::fs::rename(&source_path, &archive_path)
.map_err(|e| format!("Failed to move bug '{bug_id}' to 5_archived/: {e}"))?; .map_err(|e| format!("Failed to move bug '{bug_id}' to 5_done/: {e}"))?;
slog!( slog!(
"[lifecycle] Closed bug '{bug_id}' → work/5_archived/" "[lifecycle] Closed bug '{bug_id}' → work/5_done/"
); );
Ok(()) Ok(())
@@ -3676,7 +3677,7 @@ mod tests {
close_bug_to_archive(root, "2_bug_test").unwrap(); close_bug_to_archive(root, "2_bug_test").unwrap();
assert!(!current.join("2_bug_test.md").exists()); assert!(!current.join("2_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/2_bug_test.md").exists()); assert!(root.join(".story_kit/work/5_done/2_bug_test.md").exists());
} }
#[test] #[test]
@@ -3691,7 +3692,7 @@ mod tests {
close_bug_to_archive(root, "3_bug_test").unwrap(); close_bug_to_archive(root, "3_bug_test").unwrap();
assert!(!upcoming.join("3_bug_test.md").exists()); assert!(!upcoming.join("3_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/3_bug_test.md").exists()); assert!(root.join(".story_kit/work/5_done/3_bug_test.md").exists());
} }
#[test] #[test]
@@ -3944,7 +3945,7 @@ mod tests {
move_story_to_archived(root, "22_story_test").unwrap(); move_story_to_archived(root, "22_story_test").unwrap();
assert!(!merge_dir.join("22_story_test.md").exists()); assert!(!merge_dir.join("22_story_test.md").exists());
assert!(root.join(".story_kit/work/5_archived/22_story_test.md").exists()); assert!(root.join(".story_kit/work/5_done/22_story_test.md").exists());
} }
#[test] #[test]
@@ -4036,10 +4037,10 @@ mod tests {
report.success || report.gate_output.contains("Failed to run") || !report.gates_passed, report.success || report.gate_output.contains("Failed to run") || !report.gates_passed,
"report should be coherent: {report:?}" "report should be coherent: {report:?}"
); );
// Story should be archived if gates passed // Story should be in done if gates passed
if report.story_archived { if report.story_archived {
let archived = repo.join(".story_kit/work/5_archived/23_test.md"); let done = repo.join(".story_kit/work/5_done/23_test.md");
assert!(archived.exists(), "archived file should exist"); assert!(done.exists(), "done file should exist");
} }
} }
@@ -5737,8 +5738,8 @@ theirs
assert_eq!(remaining.len(), 1, "only the other story's agent should remain"); assert_eq!(remaining.len(), 1, "only the other story's agent should remain");
assert_eq!(remaining[0].story_id, "61_story_other"); assert_eq!(remaining[0].story_id, "61_story_other");
// Story file should be in 5_archived/ // Story file should be in 5_done/
assert!(root.join(".story_kit/work/5_archived/60_story_cleanup.md").exists()); assert!(root.join(".story_kit/work/5_done/60_story_cleanup.md").exists());
} }
// ── bug 154: merge worktree installs frontend deps ──────────────────── // ── bug 154: merge worktree installs frontend deps ────────────────────

View File

@@ -61,7 +61,7 @@ struct WorktreeListEntry {
path: String, path: String,
} }
/// Returns true if the story file exists in `work/5_archived/`. /// Returns true if the story file exists in `work/5_done/` or `work/6_archived/`.
/// ///
/// Used to exclude agents for already-archived stories from the `list_agents` /// Used to exclude agents for already-archived stories from the `list_agents`
/// response so the agents panel is not cluttered with old completed items on /// response so the agents panel is not cluttered with old completed items on
@@ -139,7 +139,7 @@ impl AgentsApi {
/// List all agents with their status. /// List all agents with their status.
/// ///
/// Agents for stories that have been archived (`work/5_archived/`) are /// Agents for stories that have been completed (`work/5_done/` or `work/6_archived/`) are
/// excluded so the agents panel is not cluttered with old completed items /// excluded so the agents panel is not cluttered with old completed items
/// on frontend startup. /// on frontend startup.
#[oai(path = "/agents", method = "get")] #[oai(path = "/agents", method = "get")]

View File

@@ -586,7 +586,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
}, },
{ {
"name": "accept_story", "name": "accept_story",
"description": "Accept a story: moves it from current/ to archived/ and auto-commits to master.", "description": "Accept a story: moves it from current/ to done/ and auto-commits to master.",
"inputSchema": { "inputSchema": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -693,7 +693,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
}, },
{ {
"name": "merge_agent_work", "name": "merge_agent_work",
"description": "Trigger the mergemaster pipeline for a completed story: squash-merge the feature branch into master, run quality gates (cargo clippy, cargo test, pnpm build, pnpm test), archive the story from work/4_merge/ or work/2_current/ to work/5_archived/, and clean up the worktree and branch. Reports success/failure with details including any conflicts found and gate output.", "description": "Trigger the mergemaster pipeline for a completed story: squash-merge the feature branch into master, run quality gates (cargo clippy, cargo test, pnpm build, pnpm test), move the story from work/4_merge/ or work/2_current/ to work/5_done/, and clean up the worktree and branch. Reports success/failure with details including any conflicts found and gate output.",
"inputSchema": { "inputSchema": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -1301,7 +1301,7 @@ fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
ctx.agents.remove_agents_for_story(story_id); ctx.agents.remove_agents_for_story(story_id);
Ok(format!( Ok(format!(
"Story '{story_id}' accepted, moved to archived/, and committed to master." "Story '{story_id}' accepted, moved to done/, and committed to master."
)) ))
} }
@@ -2251,7 +2251,7 @@ mod tests {
let result = tool_close_bug(&json!({"bug_id": "1_bug_crash"}), &ctx).unwrap(); let result = tool_close_bug(&json!({"bug_id": "1_bug_crash"}), &ctx).unwrap();
assert!(result.contains("1_bug_crash")); assert!(result.contains("1_bug_crash"));
assert!(!bug_file.exists()); assert!(!bug_file.exists());
assert!(tmp.path().join(".story_kit/work/5_archived/1_bug_crash.md").exists()); assert!(tmp.path().join(".story_kit/work/5_done/1_bug_crash.md").exists());
} }
// ── Spike lifecycle tool tests ───────────────────────────────────────── // ── Spike lifecycle tool tests ─────────────────────────────────────────

View File

@@ -505,7 +505,7 @@ fn next_item_number(root: &std::path::Path) -> Result<u32, String> {
let work_base = root.join(".story_kit").join("work"); let work_base = root.join(".story_kit").join("work");
let mut max_num: u32 = 0; let mut max_num: u32 = 0;
for subdir in &["1_upcoming", "2_current", "3_qa", "4_merge", "5_archived"] { for subdir in &["1_upcoming", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
let dir = work_base.join(subdir); let dir = work_base.join(subdir);
if !dir.exists() { if !dir.exists() {
continue; continue;
@@ -869,7 +869,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap(); let tmp = tempfile::tempdir().unwrap();
let upcoming = tmp.path().join(".story_kit/work/1_upcoming"); let upcoming = tmp.path().join(".story_kit/work/1_upcoming");
let current = tmp.path().join(".story_kit/work/2_current"); let current = tmp.path().join(".story_kit/work/2_current");
let archived = tmp.path().join(".story_kit/work/5_archived"); let archived = tmp.path().join(".story_kit/work/5_done");
fs::create_dir_all(&upcoming).unwrap(); fs::create_dir_all(&upcoming).unwrap();
fs::create_dir_all(&current).unwrap(); fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&archived).unwrap(); fs::create_dir_all(&archived).unwrap();
@@ -1116,7 +1116,7 @@ mod tests {
fn next_item_number_scans_archived_too() { fn next_item_number_scans_archived_too() {
let tmp = tempfile::tempdir().unwrap(); let tmp = tempfile::tempdir().unwrap();
let upcoming = tmp.path().join(".story_kit/work/1_upcoming"); let upcoming = tmp.path().join(".story_kit/work/1_upcoming");
let archived = tmp.path().join(".story_kit/work/5_archived"); let archived = tmp.path().join(".story_kit/work/5_done");
fs::create_dir_all(&upcoming).unwrap(); fs::create_dir_all(&upcoming).unwrap();
fs::create_dir_all(&archived).unwrap(); fs::create_dir_all(&archived).unwrap();
fs::write(archived.join("5_bug_old.md"), "").unwrap(); fs::write(archived.join("5_bug_old.md"), "").unwrap();
@@ -1134,7 +1134,7 @@ mod tests {
fn list_bug_files_excludes_archive_subdir() { fn list_bug_files_excludes_archive_subdir() {
let tmp = tempfile::tempdir().unwrap(); let tmp = tempfile::tempdir().unwrap();
let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming"); let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming");
let archived_dir = tmp.path().join(".story_kit/work/5_archived"); let archived_dir = tmp.path().join(".story_kit/work/5_done");
fs::create_dir_all(&upcoming_dir).unwrap(); fs::create_dir_all(&upcoming_dir).unwrap();
fs::create_dir_all(&archived_dir).unwrap(); fs::create_dir_all(&archived_dir).unwrap();
fs::write(upcoming_dir.join("1_bug_open.md"), "# Bug 1: Open Bug\n").unwrap(); fs::write(upcoming_dir.join("1_bug_open.md"), "# Bug 1: Open Bug\n").unwrap();

View File

@@ -25,7 +25,7 @@ use serde::Serialize;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::mpsc; use std::sync::mpsc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant, SystemTime};
use tokio::sync::broadcast; use tokio::sync::broadcast;
/// A lifecycle event emitted by the filesystem watcher. /// A lifecycle event emitted by the filesystem watcher.
@@ -68,7 +68,8 @@ fn stage_metadata(stage: &str, item_id: &str) -> Option<(&'static str, String)>
"2_current" => ("start", format!("story-kit: start {item_id}")), "2_current" => ("start", format!("story-kit: start {item_id}")),
"3_qa" => ("qa", format!("story-kit: queue {item_id} for QA")), "3_qa" => ("qa", format!("story-kit: queue {item_id} for QA")),
"4_merge" => ("merge", format!("story-kit: queue {item_id} for merge")), "4_merge" => ("merge", format!("story-kit: queue {item_id} for merge")),
"5_archived" => ("accept", format!("story-kit: accept {item_id}")), "5_done" => ("done", format!("story-kit: done {item_id}")),
"6_archived" => ("accept", format!("story-kit: accept {item_id}")),
_ => return None, _ => return None,
}; };
Some((action, prefix)) Some((action, prefix))
@@ -96,7 +97,7 @@ fn stage_for_path(path: &Path) -> Option<String> {
.parent() .parent()
.and_then(|p| p.file_name()) .and_then(|p| p.file_name())
.and_then(|n| n.to_str())?; .and_then(|n| n.to_str())?;
matches!(stage, "1_upcoming" | "2_current" | "3_qa" | "4_merge" | "5_archived") matches!(stage, "1_upcoming" | "2_current" | "3_qa" | "4_merge" | "5_done" | "6_archived")
.then(|| stage.to_string()) .then(|| stage.to_string())
} }
@@ -199,6 +200,66 @@ fn flush_pending(
} }
} }
/// Scan `work/5_done/` and move any `.md` files whose mtime is older than
/// `DONE_RETENTION` to `work/6_archived/`.
///
/// Called periodically from the watcher thread. File moves will trigger normal
/// watcher events, which `flush_pending` will commit and broadcast.
fn sweep_done_to_archived(work_dir: &Path) {
const DONE_RETENTION: Duration = Duration::from_secs(4 * 60 * 60);
let done_dir = work_dir.join("5_done");
if !done_dir.exists() {
return;
}
let entries = match std::fs::read_dir(&done_dir) {
Ok(e) => e,
Err(e) => {
slog!("[watcher] sweep: failed to read 5_done/: {e}");
return;
}
};
let archived_dir = work_dir.join("6_archived");
for entry in entries.flatten() {
let path = entry.path();
if path.extension().is_none_or(|e| e != "md") {
continue;
}
let mtime = match entry.metadata().and_then(|m| m.modified()) {
Ok(t) => t,
Err(_) => continue,
};
let age = SystemTime::now()
.duration_since(mtime)
.unwrap_or_default();
if age >= DONE_RETENTION {
if let Err(e) = std::fs::create_dir_all(&archived_dir) {
slog!("[watcher] sweep: failed to create 6_archived/: {e}");
continue;
}
let dest = archived_dir.join(entry.file_name());
match std::fs::rename(&path, &dest) {
Ok(()) => {
let item_id = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
slog!("[watcher] sweep: promoted {item_id} → 6_archived/");
}
Err(e) => {
slog!("[watcher] sweep: failed to move {}: {e}", path.display());
}
}
}
}
}
/// Start the filesystem watcher on a dedicated OS thread. /// Start the filesystem watcher on a dedicated OS thread.
/// ///
/// `work_dir` — absolute path to `.story_kit/work/` (watched recursively). /// `work_dir` — absolute path to `.story_kit/work/` (watched recursively).
@@ -239,12 +300,19 @@ pub fn start_watcher(
slog!("[watcher] watching {}", work_dir.display()); slog!("[watcher] watching {}", work_dir.display());
const DEBOUNCE: Duration = Duration::from_millis(300); const DEBOUNCE: Duration = Duration::from_millis(300);
/// How often to check 5_done/ for items to promote to 6_archived/.
const SWEEP_INTERVAL: Duration = Duration::from_secs(60);
// Map path → stage for pending (uncommitted) work-item changes. // Map path → stage for pending (uncommitted) work-item changes.
let mut pending: HashMap<PathBuf, String> = HashMap::new(); let mut pending: HashMap<PathBuf, String> = HashMap::new();
// Whether a config file change is pending in the current debounce window. // Whether a config file change is pending in the current debounce window.
let mut config_changed_pending = false; let mut config_changed_pending = false;
let mut deadline: Option<Instant> = None; let mut deadline: Option<Instant> = None;
// Track when we last swept 5_done/ → 6_archived/.
// Initialise to "now minus interval" so the first sweep runs on startup.
let mut last_sweep = Instant::now()
.checked_sub(SWEEP_INTERVAL)
.unwrap_or_else(Instant::now);
loop { loop {
// How long until the debounce window closes (or wait for next event). // How long until the debounce window closes (or wait for next event).
@@ -299,6 +367,13 @@ pub fn start_watcher(
config_changed_pending = false; config_changed_pending = false;
} }
deadline = None; deadline = None;
// Periodically promote old items from 5_done/ to 6_archived/.
let now = Instant::now();
if now.duration_since(last_sweep) >= SWEEP_INTERVAL {
last_sweep = now;
sweep_done_to_archived(&work_dir);
}
} }
} }
}); });
@@ -422,7 +497,8 @@ mod tests {
("1_upcoming", "create", "story-kit: create 10_story_x"), ("1_upcoming", "create", "story-kit: create 10_story_x"),
("3_qa", "qa", "story-kit: queue 10_story_x for QA"), ("3_qa", "qa", "story-kit: queue 10_story_x for QA"),
("4_merge", "merge", "story-kit: queue 10_story_x for merge"), ("4_merge", "merge", "story-kit: queue 10_story_x for merge"),
("5_archived", "accept", "story-kit: accept 10_story_x"), ("5_done", "done", "story-kit: done 10_story_x"),
("6_archived", "accept", "story-kit: accept 10_story_x"),
]; ];
for (stage, expected_action, expected_msg) in stages { for (stage, expected_action, expected_msg) in stages {
@@ -530,8 +606,12 @@ mod tests {
Some("2_current".to_string()) Some("2_current".to_string())
); );
assert_eq!( assert_eq!(
stage_for_path(&base.join("5_archived/10_bug_bar.md")), stage_for_path(&base.join("5_done/10_bug_bar.md")),
Some("5_archived".to_string()) Some("5_done".to_string())
);
assert_eq!(
stage_for_path(&base.join("6_archived/10_bug_bar.md")),
Some("6_archived".to_string())
); );
assert_eq!(stage_for_path(&base.join("other/file.md")), None); assert_eq!(stage_for_path(&base.join("other/file.md")), None);
assert_eq!( assert_eq!(
@@ -571,7 +651,11 @@ mod tests {
assert_eq!(action, "start"); assert_eq!(action, "start");
assert_eq!(msg, "story-kit: start 42_story_foo"); assert_eq!(msg, "story-kit: start 42_story_foo");
let (action, msg) = stage_metadata("5_archived", "42_story_foo").unwrap(); let (action, msg) = stage_metadata("5_done", "42_story_foo").unwrap();
assert_eq!(action, "done");
assert_eq!(msg, "story-kit: done 42_story_foo");
let (action, msg) = stage_metadata("6_archived", "42_story_foo").unwrap();
assert_eq!(action, "accept"); assert_eq!(action, "accept");
assert_eq!(msg, "story-kit: accept 42_story_foo"); assert_eq!(msg, "story-kit: accept 42_story_foo");
@@ -615,4 +699,48 @@ mod tests {
let other_root_config = PathBuf::from("/other/.story_kit/project.toml"); let other_root_config = PathBuf::from("/other/.story_kit/project.toml");
assert!(!is_config_file(&other_root_config, &git_root)); assert!(!is_config_file(&other_root_config, &git_root));
} }
// ── sweep_done_to_archived ────────────────────────────────────────────────
#[test]
fn sweep_moves_old_items_to_archived() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let done_dir = work_dir.join("5_done");
let archived_dir = work_dir.join("6_archived");
fs::create_dir_all(&done_dir).unwrap();
// Write a file and backdate its mtime to 5 hours ago.
let story_path = done_dir.join("10_story_old.md");
fs::write(&story_path, "---\nname: old\n---\n").unwrap();
let past = SystemTime::now()
.checked_sub(Duration::from_secs(5 * 60 * 60))
.unwrap();
filetime::set_file_mtime(&story_path, filetime::FileTime::from_system_time(past))
.unwrap();
sweep_done_to_archived(&work_dir);
assert!(!story_path.exists(), "old item should be moved out of 5_done/");
assert!(
archived_dir.join("10_story_old.md").exists(),
"old item should appear in 6_archived/"
);
}
#[test]
fn sweep_keeps_recent_items_in_done() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let done_dir = work_dir.join("5_done");
fs::create_dir_all(&done_dir).unwrap();
// Write a file with a recent mtime (now).
let story_path = done_dir.join("11_story_new.md");
fs::write(&story_path, "---\nname: new\n---\n").unwrap();
sweep_done_to_archived(&work_dir);
assert!(story_path.exists(), "recent item should remain in 5_done/");
}
} }