Files
huskies/server/src/agents/lifecycle.rs
T

465 lines
17 KiB
Rust
Raw Normal View History

//! Story lifecycle helpers — archival and stage transitions for pipeline items.
//!
//! All pipeline state lives in the CRDT. These functions never consult the
//! filesystem for work-item data — CRDT lookup failures propagate as errors.
use std::path::Path;
use std::process::Command;
use crate::io::story_metadata::clear_front_matter_field_in_content;
use crate::slog;
type ContentTransform = Option<Box<dyn Fn(&str) -> String>>;
/// Determine the item type ("story", "bug", "spike", or "refactor") from the item ID.
///
/// For slug-format IDs (e.g. `"4_bug_login_crash"`), the type is embedded in the ID.
/// For numeric-only IDs (e.g. `"4"`), the type is read from the `type:` field in
/// the content-store front matter. Falls back to `"story"` if not found.
pub(crate) fn item_type_from_id(item_id: &str) -> &'static str {
let after_num = item_id.trim_start_matches(|c: char| c.is_ascii_digit());
if after_num.starts_with("_bug_") {
return "bug";
} else if after_num.starts_with("_spike_") {
return "spike";
} else if after_num.starts_with("_refactor_") {
return "refactor";
}
// Numeric-only ID: check content store front matter for explicit type.
if after_num.is_empty()
&& let Some(content) = crate::db::read_content(item_id)
&& let Ok(meta) = crate::io::story_metadata::parse_front_matter(&content)
&& let Some(t) = meta.item_type.as_deref()
{
return match t {
"bug" => "bug",
"spike" => "spike",
"refactor" => "refactor",
_ => "story",
};
}
"story"
}
/// Move a work item to a new pipeline stage via the database.
///
/// Looks up the item in the CRDT to verify it exists in one of the expected
/// `sources` stages, then updates the stage. Optionally clears front-matter
/// fields from the stored content. Returns the source stage on success.
fn move_item<'a>(
story_id: &str,
sources: &'a [&'a str],
target_dir: &str,
extra_done_dirs: &[&str],
missing_ok: bool,
fields_to_clear: &[&str],
) -> Result<Option<&'a str>, String> {
// Check if the item is already in the target stage or a done stage.
// Use the typed projection for compile-safe stage comparison.
if let Ok(Some(typed_item)) = crate::pipeline_state::read_typed(story_id) {
let current_dir = typed_item.stage.dir_name();
if current_dir == target_dir || extra_done_dirs.contains(&current_dir) {
return Ok(None); // Idempotent: already there.
}
// Verify it's in one of the expected source stages.
let src_dir = sources.iter().find(|&&s| current_dir == s).copied();
let src_dir = match src_dir {
Some(s) => s,
None if missing_ok => {
// Item is in CRDT but not in an expected source stage — do not
// overwrite its current stage. This prevents promote_ready_backlog_stories
// from demoting a story that has already advanced to merge/done (bug 524).
return Ok(None);
}
None => {
let locs = sources
.iter()
.map(|s| format!("work/{s}/"))
.collect::<Vec<_>>()
.join(" or ");
return Err(format!("Work item '{story_id}' not found in {locs}."));
}
};
// Optionally clear front-matter fields from the stored content.
let transform: ContentTransform = if fields_to_clear.is_empty() {
None
} else {
let fields: Vec<String> = fields_to_clear.iter().map(|s| s.to_string()).collect();
Some(Box::new(move |content: &str| {
let mut result = content.to_string();
for field in &fields {
result = clear_front_matter_field_in_content(&result, field);
}
result
}))
};
crate::db::move_item_stage(story_id, target_dir, transform.as_ref().map(|f| f.as_ref()));
slog!("[lifecycle] Moved '{story_id}' from work/{src_dir}/ to work/{target_dir}/");
return Ok(Some(src_dir));
}
if missing_ok {
slog!("[lifecycle] Work item '{story_id}' not found; skipping move to work/{target_dir}/");
return Ok(None);
}
let locs = sources
.iter()
.map(|s| format!("work/{s}/"))
.collect::<Vec<_>>()
.join(" or ");
Err(format!("Work item '{story_id}' not found in {locs}."))
}
/// Move a work item (story, bug, or spike) from `1_backlog` to `work/2_current/`.
///
/// Only promotes from `1_backlog` — stories already in later stages (3_qa, 4_merge,
/// etc.) are left untouched. This prevents coders from accidentally demoting a story
/// that has already advanced past the coding stage.
/// Idempotent: if already in `2_current/`, returns Ok. If not found, logs and returns Ok.
pub fn move_story_to_current(story_id: &str) -> Result<(), String> {
move_item(story_id, &["1_backlog"], "2_current", &[], true, &[]).map(|_| ())
}
/// Check whether a feature branch `feature/story-{story_id}` exists and has
/// commits that are not yet on master. Returns `true` when there is unmerged
/// work, `false` when there is no branch or all its commits are already
/// reachable from master.
pub fn feature_branch_has_unmerged_changes(project_root: &Path, story_id: &str) -> bool {
let branch = format!("feature/story-{story_id}");
// Check if the branch exists.
let branch_check = Command::new("git")
.args(["rev-parse", "--verify", &branch])
.current_dir(project_root)
.output();
match branch_check {
Ok(out) if out.status.success() => {}
_ => return false, // No feature branch → nothing to merge.
}
// Check if the branch has commits not reachable from master.
let log = Command::new("git")
.args(["log", &format!("master..{branch}"), "--oneline"])
.current_dir(project_root)
.output();
match log {
Ok(out) => {
let stdout = String::from_utf8_lossy(&out.stdout);
!stdout.trim().is_empty()
}
Err(_) => false,
}
}
/// Move a story from `work/2_current/`, `work/3_qa/`, or `work/4_merge/` to `work/5_done/`.
///
/// Idempotent if already in `5_done/` or `6_archived/`. Errors if not found in any earlier stage.
/// Spikes may transition directly from `3_qa/` to `5_done/`, skipping the merge stage.
pub fn move_story_to_done(story_id: &str) -> Result<(), String> {
move_item(
story_id,
&["2_current", "3_qa", "4_merge"],
"5_done",
&["6_archived"],
false,
2026-04-29 15:17:47 +00:00
&["merge_failure", "blocked"],
)
.map(|_| ())
}
/// Move a story/bug from `work/2_current/` or `work/3_qa/` to `work/4_merge/`.
///
/// Idempotent if already in `4_merge/`. Errors if not found in `2_current/` or `3_qa/`.
pub fn move_story_to_merge(story_id: &str) -> Result<(), String> {
move_item(
story_id,
&["2_current", "3_qa"],
"4_merge",
&["5_done", "6_archived"],
false,
2026-04-29 15:17:47 +00:00
&["blocked"],
)
.map(|_| ())
}
/// Move a story/bug from `work/2_current/` to `work/3_qa/`.
///
/// Idempotent if already in `3_qa/`. Errors if not found in `2_current/`.
pub fn move_story_to_qa(story_id: &str) -> Result<(), String> {
move_item(
story_id,
&["2_current"],
"3_qa",
&["5_done", "6_archived"],
false,
2026-04-29 15:17:47 +00:00
&["blocked"],
)
.map(|_| ())
}
/// Move a story from `work/3_qa/` back to `work/2_current/`, clearing `review_hold` and writing notes.
pub fn reject_story_from_qa(story_id: &str, notes: &str) -> Result<(), String> {
let moved = move_item(
story_id,
&["3_qa"],
"2_current",
&[],
false,
&["review_hold"],
)?;
if moved.is_some() && !notes.is_empty() {
// Append rejection notes to the stored content.
if let Some(content) = crate::db::read_content(story_id) {
let updated =
crate::io::story_metadata::write_rejection_notes_to_content(&content, notes);
crate::db::write_content(story_id, &updated);
// Re-sync to DB.
crate::db::write_item_with_content(story_id, "2_current", &updated);
}
}
Ok(())
}
/// Move any work item to an arbitrary pipeline stage by searching all stages.
///
/// Accepts `target_stage` as one of: `backlog`, `current`, `qa`, `merge`, `done`.
/// Idempotent: if the item is already in the target stage, returns Ok.
/// Returns `(from_stage, to_stage)` on success.
pub fn move_story_to_stage(story_id: &str, target_stage: &str) -> Result<(String, String), String> {
const STAGES: &[(&str, &str)] = &[
("backlog", "1_backlog"),
("current", "2_current"),
("qa", "3_qa"),
("merge", "4_merge"),
("done", "5_done"),
("archived", "6_archived"),
];
let target_dir = STAGES
.iter()
.filter(|(name, _)| *name != "archived")
.find(|(name, _)| *name == target_stage)
.map(|(_, dir)| *dir)
.ok_or_else(|| {
format!(
"Invalid target_stage '{target_stage}'. Must be one of: backlog, current, qa, merge, done"
)
})?;
let all_dirs: Vec<&str> = STAGES.iter().map(|(_, dir)| *dir).collect();
match move_item(story_id, &all_dirs, target_dir, &[], false, &[])
.map_err(|_| format!("Work item '{story_id}' not found in any pipeline stage."))?
{
Some(src_dir) => {
let from_stage = STAGES
.iter()
.find(|(_, dir)| *dir == src_dir)
.map(|(name, _)| *name)
.unwrap_or(src_dir);
Ok((from_stage.to_string(), target_stage.to_string()))
}
None => Ok((target_stage.to_string(), target_stage.to_string())),
}
}
/// Move a bug from `work/2_current/` or `work/1_backlog/` to `work/5_done/`.
///
/// Idempotent if already in `5_done/`. Errors if not found in `2_current/` or `1_backlog/`.
pub fn close_bug_to_archive(bug_id: &str) -> Result<(), String> {
move_item(
bug_id,
&["2_current", "1_backlog"],
"5_done",
&[],
false,
&[],
)
.map(|_| ())
}
#[cfg(test)]
mod tests {
use super::*;
// ── move_story_to_current tests ────────────────────────────────────────────
#[test]
fn move_story_to_current_from_crdt() {
// Seed via CRDT — the sole source of truth for pipeline state.
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"99950_story_lifecycle",
"1_backlog",
"---\nname: Lifecycle Test\n---\n# Story\n",
);
move_story_to_current("99950_story_lifecycle").unwrap();
// Verify the CRDT now has the item in 2_current.
let item = crate::pipeline_state::read_typed("99950_story_lifecycle")
.expect("CRDT read should succeed")
.expect("item should exist in CRDT after move");
assert_eq!(
item.stage.dir_name(),
"2_current",
"item should be in 2_current after move"
);
}
#[test]
fn move_story_to_current_noop_when_not_found() {
assert!(move_story_to_current("99_missing").is_ok());
}
/// Lifecycle operation runs to completion using only CRDT state;
/// no `.huskies/work/<stage>/` tree is consulted because no `project_root`
/// is passed — the functions operate purely on the CRDT.
#[test]
fn move_story_uses_only_crdt_no_fs_shadow() {
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"99951_story_crdt_only",
"2_current",
"---\nname: CRDT Only Test\n---\n# Story\n",
);
// No filesystem path is involved — lifecycle functions no longer
// accept a project_root, proving they never touch the filesystem.
move_story_to_done("99951_story_crdt_only").unwrap();
let item = crate::pipeline_state::read_typed("99951_story_crdt_only")
.expect("CRDT read should succeed")
.expect("item should exist in CRDT");
assert_eq!(
item.stage.dir_name(),
"5_done",
"item should be in 5_done after move"
);
}
// ── item_type_from_id tests ────────────────────────────────────────────────
#[test]
fn item_type_from_id_detects_types() {
assert_eq!(item_type_from_id("1_bug_test"), "bug");
assert_eq!(item_type_from_id("1_spike_research"), "spike");
assert_eq!(item_type_from_id("50_story_my_story"), "story");
assert_eq!(item_type_from_id("1_story_simple"), "story");
assert_eq!(item_type_from_id("1_refactor_cleanup"), "refactor");
}
#[test]
fn item_type_from_id_falls_back_to_content_store_for_numeric_ids() {
crate::db::ensure_content_store();
// Write a bug item with numeric-only ID into the content store.
let bug_content = "---\ntype: bug\nname: \"Test Bug\"\n---\n\n# Bug 9999: Test Bug\n";
crate::db::write_content("9999", bug_content);
let spike_content =
"---\ntype: spike\nname: \"Test Spike\"\n---\n\n# Spike 9998: Test Spike\n";
crate::db::write_content("9998", spike_content);
let refactor_content =
"---\ntype: refactor\nname: \"Test Refactor\"\n---\n\n# Refactor 9997: Test Refactor\n";
crate::db::write_content("9997", refactor_content);
let story_content =
"---\ntype: story\nname: \"Test Story\"\n---\n\n# Story 9996: Test Story\n";
crate::db::write_content("9996", story_content);
assert_eq!(item_type_from_id("9999"), "bug");
assert_eq!(item_type_from_id("9998"), "spike");
assert_eq!(item_type_from_id("9997"), "refactor");
assert_eq!(item_type_from_id("9996"), "story");
// No content store entry → defaults to "story".
assert_eq!(item_type_from_id("99999"), "story");
}
// ── feature_branch_has_unmerged_changes tests ────────────────────────────
fn init_git_repo(repo: &std::path::Path) {
Command::new("git")
.args(["init"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["config", "user.email", "test@test.com"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["config", "user.name", "Test"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "--allow-empty", "-m", "init"])
.current_dir(repo)
.output()
.unwrap();
}
/// Bug 226: feature_branch_has_unmerged_changes returns true when the
/// feature branch has commits not on master.
#[test]
fn feature_branch_has_unmerged_changes_detects_unmerged_code() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
// Create a feature branch with a code commit.
Command::new("git")
.args(["checkout", "-b", "feature/story-50_story_test"])
.current_dir(repo)
.output()
.unwrap();
fs::write(repo.join("feature.rs"), "fn main() {}").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add feature"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["checkout", "master"])
.current_dir(repo)
.output()
.unwrap();
assert!(
feature_branch_has_unmerged_changes(repo, "50_story_test"),
"should detect unmerged changes on feature branch"
);
}
/// Bug 226: feature_branch_has_unmerged_changes returns false when no
/// feature branch exists.
#[test]
fn feature_branch_has_unmerged_changes_false_when_no_branch() {
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
assert!(
!feature_branch_has_unmerged_changes(repo, "99_nonexistent"),
"should return false when no feature branch"
);
}
}