story-kit: merge 254_story_add_refactor_work_item_type

This commit is contained in:
Dave
2026-03-17 00:37:45 +00:00
parent 3553f59078
commit 2fae9066e2
3 changed files with 202 additions and 6 deletions

View File

@@ -338,6 +338,73 @@ pub fn create_spike_file(
Ok(spike_id)
}
/// Create a refactor work item file in `work/1_upcoming/`.
///
/// Returns the refactor_id (e.g. `"5_refactor_split_agents_rs"`).
pub fn create_refactor_file(
root: &Path,
name: &str,
description: Option<&str>,
acceptance_criteria: Option<&[String]>,
) -> Result<String, String> {
let refactor_number = next_item_number(root)?;
let slug = slugify_name(name);
if slug.is_empty() {
return Err("Name must contain at least one alphanumeric character.".to_string());
}
let filename = format!("{refactor_number}_refactor_{slug}.md");
let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming");
fs::create_dir_all(&upcoming_dir)
.map_err(|e| format!("Failed to create upcoming directory: {e}"))?;
let filepath = upcoming_dir.join(&filename);
if filepath.exists() {
return Err(format!("Refactor file already exists: {filename}"));
}
let refactor_id = filepath
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or_default()
.to_string();
let mut content = String::new();
content.push_str("---\n");
content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\"")));
content.push_str("---\n\n");
content.push_str(&format!("# Refactor {refactor_number}: {name}\n\n"));
content.push_str("## Current State\n\n");
content.push_str("- TBD\n\n");
content.push_str("## Desired State\n\n");
if let Some(desc) = description {
content.push_str(desc);
content.push('\n');
} else {
content.push_str("- TBD\n");
}
content.push('\n');
content.push_str("## Acceptance Criteria\n\n");
if let Some(criteria) = acceptance_criteria {
for criterion in criteria {
content.push_str(&format!("- [ ] {criterion}\n"));
}
} else {
content.push_str("- [ ] Refactoring complete and all tests pass\n");
}
content.push('\n');
content.push_str("## Out of Scope\n\n");
content.push_str("- TBD\n");
fs::write(&filepath, &content)
.map_err(|e| format!("Failed to write refactor file: {e}"))?;
// Watcher handles the git commit asynchronously.
Ok(refactor_id)
}
/// Returns true if the item stem (filename without extension) is a bug item.
/// Bug items follow the pattern: {N}_bug_{slug}
fn is_bug_item(stem: &str) -> bool {
@@ -403,6 +470,59 @@ pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
Ok(bugs)
}
/// Returns true if the item stem (filename without extension) is a refactor item.
/// Refactor items follow the pattern: {N}_refactor_{slug}
fn is_refactor_item(stem: &str) -> bool {
let after_num = stem.trim_start_matches(|c: char| c.is_ascii_digit());
after_num.starts_with("_refactor_")
}
/// List all open refactors — files in `work/1_upcoming/` matching the `_refactor_` naming pattern.
///
/// Returns a sorted list of `(refactor_id, name)` pairs.
pub fn list_refactor_files(root: &Path) -> Result<Vec<(String, String)>, String> {
let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming");
if !upcoming_dir.exists() {
return Ok(Vec::new());
}
let mut refactors = Vec::new();
for entry in fs::read_dir(&upcoming_dir)
.map_err(|e| format!("Failed to read upcoming directory: {e}"))?
{
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
let path = entry.path();
if path.is_dir() {
continue;
}
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
continue;
}
let stem = path
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| "Invalid file name.".to_string())?;
if !is_refactor_item(stem) {
continue;
}
let refactor_id = stem.to_string();
let name = fs::read_to_string(&path)
.ok()
.and_then(|contents| parse_front_matter(&contents).ok())
.and_then(|m| m.name)
.unwrap_or_else(|| refactor_id.clone());
refactors.push((refactor_id, name));
}
refactors.sort_by(|a, b| a.0.cmp(&b.0));
Ok(refactors)
}
/// Locate a work item file by searching all active pipeline stages.
///
/// Searches in priority order: 2_current, 1_upcoming, 3_qa, 4_merge, 5_done, 6_archived.