story-kit: merge 287_story_rename_upcoming_pipeline_stage_to_backlog

This commit is contained in:
Dave
2026-03-18 14:31:12 +00:00
parent 967ebd7a84
commit df6f792214
26 changed files with 250 additions and 228 deletions

View File

@@ -672,7 +672,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "create_spike",
"description": "Create a spike file in .story_kit/work/1_upcoming/ with a deterministic filename and YAML front matter. Returns the spike_id.",
"description": "Create a spike file in .story_kit/work/1_backlog/ with a deterministic filename and YAML front matter. Returns the spike_id.",
"inputSchema": {
"type": "object",
"properties": {
@@ -690,7 +690,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "create_bug",
"description": "Create a bug file in work/1_upcoming/ with a deterministic filename and auto-commit to master. Returns the bug_id.",
"description": "Create a bug file in work/1_backlog/ with a deterministic filename and auto-commit to master. Returns the bug_id.",
"inputSchema": {
"type": "object",
"properties": {
@@ -725,7 +725,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "list_bugs",
"description": "List all open bugs in work/1_upcoming/ matching the _bug_ naming convention.",
"description": "List all open bugs in work/1_backlog/ matching the _bug_ naming convention.",
"inputSchema": {
"type": "object",
"properties": {}
@@ -733,7 +733,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "create_refactor",
"description": "Create a refactor work item in work/1_upcoming/ with a deterministic filename and YAML front matter. Returns the refactor_id.",
"description": "Create a refactor work item in work/1_backlog/ with a deterministic filename and YAML front matter. Returns the refactor_id.",
"inputSchema": {
"type": "object",
"properties": {
@@ -756,7 +756,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "list_refactors",
"description": "List all open refactors in work/1_upcoming/ matching the _refactor_ naming convention.",
"description": "List all open refactors in work/1_backlog/ matching the _refactor_ naming convention.",
"inputSchema": {
"type": "object",
"properties": {}
@@ -764,7 +764,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "close_bug",
"description": "Archive a bug from work/2_current/ or work/1_upcoming/ to work/5_done/ and auto-commit to master.",
"description": "Archive a bug from work/2_current/ or work/1_backlog/ to work/5_done/ and auto-commit to master.",
"inputSchema": {
"type": "object",
"properties": {
@@ -1022,7 +1022,7 @@ fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
.get("acceptance_criteria")
.and_then(|v| serde_json::from_value(v.clone()).ok());
// Spike 61: write the file only — the filesystem watcher detects the new
// .md file in work/1_upcoming/ and auto-commits with a deterministic message.
// .md file in work/1_backlog/ and auto-commits with a deterministic message.
let commit = false;
let root = ctx.state.get_project_root()?;
@@ -1091,16 +1091,16 @@ fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, String> {
active.extend(map_items(&state.merge, "merge"));
active.extend(map_items(&state.done, "done"));
let upcoming: Vec<Value> = state
.upcoming
let backlog: Vec<Value> = state
.backlog
.iter()
.map(|s| json!({ "story_id": s.story_id, "name": s.name }))
.collect();
serde_json::to_string_pretty(&json!({
"active": active,
"upcoming": upcoming,
"upcoming_count": upcoming.len(),
"backlog": backlog,
"backlog_count": backlog.len(),
}))
.map_err(|e| format!("Serialization error: {e}"))
}
@@ -2452,7 +2452,7 @@ mod tests {
let root = tmp.path();
for (stage, id, name) in &[
("1_upcoming", "10_story_upcoming", "Upcoming Story"),
("1_backlog", "10_story_upcoming", "Upcoming Story"),
("2_current", "20_story_current", "Current Story"),
("3_qa", "30_story_qa", "QA Story"),
("4_merge", "40_story_merge", "Merge Story"),
@@ -2481,11 +2481,11 @@ mod tests {
assert!(stages.contains(&"merge"));
assert!(stages.contains(&"done"));
// Upcoming backlog
let upcoming = parsed["upcoming"].as_array().unwrap();
assert_eq!(upcoming.len(), 1);
assert_eq!(upcoming[0]["story_id"], "10_story_upcoming");
assert_eq!(parsed["upcoming_count"], 1);
// Backlog
let backlog = parsed["backlog"].as_array().unwrap();
assert_eq!(backlog.len(), 1);
assert_eq!(backlog[0]["story_id"], "10_story_upcoming");
assert_eq!(parsed["backlog_count"], 1);
}
#[test]
@@ -2801,8 +2801,8 @@ mod tests {
let t = tool.unwrap();
let desc = t["description"].as_str().unwrap();
assert!(
desc.contains("work/1_upcoming/"),
"create_bug description should reference work/1_upcoming/, got: {desc}"
desc.contains("work/1_backlog/"),
"create_bug description should reference work/1_backlog/, got: {desc}"
);
assert!(
!desc.contains(".story_kit/bugs"),
@@ -2826,8 +2826,8 @@ mod tests {
let t = tool.unwrap();
let desc = t["description"].as_str().unwrap();
assert!(
desc.contains("work/1_upcoming/"),
"list_bugs description should reference work/1_upcoming/, got: {desc}"
desc.contains("work/1_backlog/"),
"list_bugs description should reference work/1_backlog/, got: {desc}"
);
assert!(
!desc.contains(".story_kit/bugs"),
@@ -2911,7 +2911,7 @@ mod tests {
assert!(result.contains("1_bug_login_crash"));
let bug_file = tmp
.path()
.join(".story_kit/work/1_upcoming/1_bug_login_crash.md");
.join(".story_kit/work/1_backlog/1_bug_login_crash.md");
assert!(bug_file.exists());
}
@@ -2927,15 +2927,15 @@ mod tests {
#[test]
fn tool_list_bugs_returns_open_bugs() {
let tmp = tempfile::tempdir().unwrap();
let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming");
std::fs::create_dir_all(&upcoming_dir).unwrap();
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
std::fs::create_dir_all(&backlog_dir).unwrap();
std::fs::write(
upcoming_dir.join("1_bug_crash.md"),
backlog_dir.join("1_bug_crash.md"),
"# Bug 1: App Crash\n",
)
.unwrap();
std::fs::write(
upcoming_dir.join("2_bug_typo.md"),
backlog_dir.join("2_bug_typo.md"),
"# Bug 2: Typo in Header\n",
)
.unwrap();
@@ -2963,9 +2963,9 @@ mod tests {
fn tool_close_bug_moves_to_archive() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming");
std::fs::create_dir_all(&upcoming_dir).unwrap();
let bug_file = upcoming_dir.join("1_bug_crash.md");
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
std::fs::create_dir_all(&backlog_dir).unwrap();
let bug_file = backlog_dir.join("1_bug_crash.md");
std::fs::write(&bug_file, "# Bug 1: Crash\n").unwrap();
// Stage the file so it's tracked
std::process::Command::new("git")
@@ -3035,7 +3035,7 @@ mod tests {
assert!(result.contains("1_spike_compare_encoders"));
let spike_file = tmp
.path()
.join(".story_kit/work/1_upcoming/1_spike_compare_encoders.md");
.join(".story_kit/work/1_backlog/1_spike_compare_encoders.md");
assert!(spike_file.exists());
let contents = std::fs::read_to_string(&spike_file).unwrap();
assert!(contents.starts_with("---\nname: \"Compare Encoders\"\n---"));
@@ -3050,7 +3050,7 @@ mod tests {
let result = tool_create_spike(&json!({"name": "My Spike"}), &ctx).unwrap();
assert!(result.contains("1_spike_my_spike"));
let spike_file = tmp.path().join(".story_kit/work/1_upcoming/1_spike_my_spike.md");
let spike_file = tmp.path().join(".story_kit/work/1_backlog/1_spike_my_spike.md");
assert!(spike_file.exists());
let contents = std::fs::read_to_string(&spike_file).unwrap();
assert!(contents.starts_with("---\nname: \"My Spike\"\n---"));