From 2fae9066e2fd9f8df953c5147a3c09ac448e6d35 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 17 Mar 2026 00:37:45 +0000 Subject: [PATCH] story-kit: merge 254_story_add_refactor_work_item_type --- frontend/src/components/StagePanel.tsx | 11 ++- server/src/http/mcp.rs | 77 +++++++++++++++- server/src/http/workflow.rs | 120 +++++++++++++++++++++++++ 3 files changed, 202 insertions(+), 6 deletions(-) diff --git a/frontend/src/components/StagePanel.tsx b/frontend/src/components/StagePanel.tsx index a6b7026..ecd5879 100644 --- a/frontend/src/components/StagePanel.tsx +++ b/frontend/src/components/StagePanel.tsx @@ -4,12 +4,13 @@ import { useLozengeFly } from "./LozengeFlyContext"; const { useLayoutEffect, useRef } = React; -type WorkItemType = "story" | "bug" | "spike" | "unknown"; +type WorkItemType = "story" | "bug" | "spike" | "refactor" | "unknown"; const TYPE_COLORS: Record = { story: "#3fb950", bug: "#f85149", spike: "#58a6ff", + refactor: "#a371f7", unknown: "#444", }; @@ -17,6 +18,7 @@ const TYPE_LABELS: Record = { story: "STORY", bug: "BUG", spike: "SPIKE", + refactor: "REFACTOR", unknown: null, }; @@ -24,7 +26,12 @@ function getWorkItemType(storyId: string): WorkItemType { const match = storyId.match(/^\d+_([a-z]+)_/); if (!match) return "unknown"; const segment = match[1]; - if (segment === "story" || segment === "bug" || segment === "spike") { + if ( + segment === "story" || + segment === "bug" || + segment === "spike" || + segment === "refactor" + ) { return segment; } return "unknown"; diff --git a/server/src/http/mcp.rs b/server/src/http/mcp.rs index 06185fd..8a6833a 100644 --- a/server/src/http/mcp.rs +++ b/server/src/http/mcp.rs @@ -6,9 +6,9 @@ use crate::slog_warn; use crate::http::context::AppContext; use crate::http::settings::get_editor_command_from_store; use crate::http::workflow::{ - add_criterion_to_file, check_criterion_in_file, create_bug_file, create_spike_file, - create_story_file, list_bug_files, load_upcoming_stories, update_story_in_file, - validate_story_dirs, + add_criterion_to_file, check_criterion_in_file, create_bug_file, create_refactor_file, + create_spike_file, create_story_file, list_bug_files, list_refactor_files, + load_upcoming_stories, update_story_in_file, validate_story_dirs, }; use crate::worktree; use crate::io::story_metadata::{parse_front_matter, parse_unchecked_todos, write_merge_failure}; @@ -719,6 +719,37 @@ fn handle_tools_list(id: Option) -> JsonRpcResponse { "properties": {} } }, + { + "name": "create_refactor", + "description": "Create a refactor work item in work/1_upcoming/ with a deterministic filename and YAML front matter. Returns the refactor_id.", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Short human-readable refactor name" + }, + "description": { + "type": "string", + "description": "Optional description of the desired state after refactoring" + }, + "acceptance_criteria": { + "type": "array", + "items": { "type": "string" }, + "description": "Optional list of acceptance criteria" + } + }, + "required": ["name"] + } + }, + { + "name": "list_refactors", + "description": "List all open refactors in work/1_upcoming/ matching the _refactor_ naming convention.", + "inputSchema": { + "type": "object", + "properties": {} + } + }, { "name": "close_bug", "description": "Archive a bug from work/2_current/ or work/1_upcoming/ to work/5_done/ and auto-commit to master.", @@ -896,6 +927,9 @@ async fn handle_tools_call( "create_bug" => tool_create_bug(&args, ctx), "list_bugs" => tool_list_bugs(ctx), "close_bug" => tool_close_bug(&args, ctx), + // Refactor lifecycle tools + "create_refactor" => tool_create_refactor(&args, ctx), + "list_refactors" => tool_list_refactors(ctx), // Mergemaster tools "merge_agent_work" => tool_merge_agent_work(&args, ctx).await, "move_story_to_merge" => tool_move_story_to_merge(&args, ctx).await, @@ -1582,6 +1616,39 @@ fn tool_close_bug(args: &Value, ctx: &AppContext) -> Result { )) } +// ── Refactor lifecycle tool implementations ─────────────────────── + +fn tool_create_refactor(args: &Value, ctx: &AppContext) -> Result { + let name = args + .get("name") + .and_then(|v| v.as_str()) + .ok_or("Missing required argument: name")?; + let description = args.get("description").and_then(|v| v.as_str()); + let acceptance_criteria: Option> = args + .get("acceptance_criteria") + .and_then(|v| serde_json::from_value(v.clone()).ok()); + + let root = ctx.state.get_project_root()?; + let refactor_id = create_refactor_file( + &root, + name, + description, + acceptance_criteria.as_deref(), + )?; + + Ok(format!("Created refactor: {refactor_id}")) +} + +fn tool_list_refactors(ctx: &AppContext) -> Result { + let root = ctx.state.get_project_root()?; + let refactors = list_refactor_files(&root)?; + serde_json::to_string_pretty(&json!(refactors + .iter() + .map(|(id, name)| json!({ "refactor_id": id, "name": name })) + .collect::>())) + .map_err(|e| format!("Serialization error: {e}")) +} + // ── Mergemaster tool implementations ───────────────────────────── async fn tool_merge_agent_work(args: &Value, ctx: &AppContext) -> Result { @@ -2077,13 +2144,15 @@ mod tests { assert!(names.contains(&"create_bug")); assert!(names.contains(&"list_bugs")); assert!(names.contains(&"close_bug")); + assert!(names.contains(&"create_refactor")); + assert!(names.contains(&"list_refactors")); assert!(names.contains(&"merge_agent_work")); assert!(names.contains(&"move_story_to_merge")); assert!(names.contains(&"report_merge_failure")); assert!(names.contains(&"request_qa")); assert!(names.contains(&"get_server_logs")); assert!(names.contains(&"prompt_permission")); - assert_eq!(tools.len(), 31); + assert_eq!(tools.len(), 33); } #[test] diff --git a/server/src/http/workflow.rs b/server/src/http/workflow.rs index d35adce..7c3390a 100644 --- a/server/src/http/workflow.rs +++ b/server/src/http/workflow.rs @@ -338,6 +338,73 @@ pub fn create_spike_file( Ok(spike_id) } +/// Create a refactor work item file in `work/1_upcoming/`. +/// +/// Returns the refactor_id (e.g. `"5_refactor_split_agents_rs"`). +pub fn create_refactor_file( + root: &Path, + name: &str, + description: Option<&str>, + acceptance_criteria: Option<&[String]>, +) -> Result { + let refactor_number = next_item_number(root)?; + let slug = slugify_name(name); + + if slug.is_empty() { + return Err("Name must contain at least one alphanumeric character.".to_string()); + } + + let filename = format!("{refactor_number}_refactor_{slug}.md"); + let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming"); + fs::create_dir_all(&upcoming_dir) + .map_err(|e| format!("Failed to create upcoming directory: {e}"))?; + + let filepath = upcoming_dir.join(&filename); + if filepath.exists() { + return Err(format!("Refactor file already exists: {filename}")); + } + + let refactor_id = filepath + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or_default() + .to_string(); + + let mut content = String::new(); + content.push_str("---\n"); + content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\""))); + content.push_str("---\n\n"); + content.push_str(&format!("# Refactor {refactor_number}: {name}\n\n")); + content.push_str("## Current State\n\n"); + content.push_str("- TBD\n\n"); + content.push_str("## Desired State\n\n"); + if let Some(desc) = description { + content.push_str(desc); + content.push('\n'); + } else { + content.push_str("- TBD\n"); + } + content.push('\n'); + content.push_str("## Acceptance Criteria\n\n"); + if let Some(criteria) = acceptance_criteria { + for criterion in criteria { + content.push_str(&format!("- [ ] {criterion}\n")); + } + } else { + content.push_str("- [ ] Refactoring complete and all tests pass\n"); + } + content.push('\n'); + content.push_str("## Out of Scope\n\n"); + content.push_str("- TBD\n"); + + fs::write(&filepath, &content) + .map_err(|e| format!("Failed to write refactor file: {e}"))?; + + // Watcher handles the git commit asynchronously. + + Ok(refactor_id) +} + /// Returns true if the item stem (filename without extension) is a bug item. /// Bug items follow the pattern: {N}_bug_{slug} fn is_bug_item(stem: &str) -> bool { @@ -403,6 +470,59 @@ pub fn list_bug_files(root: &Path) -> Result, String> { Ok(bugs) } +/// Returns true if the item stem (filename without extension) is a refactor item. +/// Refactor items follow the pattern: {N}_refactor_{slug} +fn is_refactor_item(stem: &str) -> bool { + let after_num = stem.trim_start_matches(|c: char| c.is_ascii_digit()); + after_num.starts_with("_refactor_") +} + +/// List all open refactors — files in `work/1_upcoming/` matching the `_refactor_` naming pattern. +/// +/// Returns a sorted list of `(refactor_id, name)` pairs. +pub fn list_refactor_files(root: &Path) -> Result, String> { + let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming"); + if !upcoming_dir.exists() { + return Ok(Vec::new()); + } + + let mut refactors = Vec::new(); + for entry in fs::read_dir(&upcoming_dir) + .map_err(|e| format!("Failed to read upcoming directory: {e}"))? + { + let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?; + let path = entry.path(); + + if path.is_dir() { + continue; + } + + if path.extension().and_then(|ext| ext.to_str()) != Some("md") { + continue; + } + + let stem = path + .file_stem() + .and_then(|s| s.to_str()) + .ok_or_else(|| "Invalid file name.".to_string())?; + + if !is_refactor_item(stem) { + continue; + } + + let refactor_id = stem.to_string(); + let name = fs::read_to_string(&path) + .ok() + .and_then(|contents| parse_front_matter(&contents).ok()) + .and_then(|m| m.name) + .unwrap_or_else(|| refactor_id.clone()); + refactors.push((refactor_id, name)); + } + + refactors.sort_by(|a, b| a.0.cmp(&b.0)); + Ok(refactors) +} + /// Locate a work item file by searching all active pipeline stages. /// /// Searches in priority order: 2_current, 1_upcoming, 3_qa, 4_merge, 5_done, 6_archived.