Files
huskies/server/src/agents/lifecycle.rs
T

536 lines
19 KiB
Rust
Raw Normal View History

//! Story lifecycle helpers — archival and stage transitions for pipeline items.
//!
//! All pipeline state lives in the CRDT. These functions never consult the
//! filesystem for work-item data — CRDT lookup failures propagate as errors.
2026-04-29 17:38:38 +00:00
//!
//! Every lifecycle function routes through the typed state machine
//! ([`crate::pipeline_state::apply_transition`]) so that illegal transitions
//! are rejected and every stage change emits a [`TransitionFired`] event.
use std::num::NonZeroU32;
use std::path::Path;
use std::process::Command;
use crate::io::story_metadata::clear_front_matter_field_in_content;
2026-04-29 17:38:38 +00:00
use crate::pipeline_state::{
ApplyError, ArchiveReason, BranchName, GitSha, PipelineEvent, Stage, apply_transition,
stage_label,
};
use crate::slog;
/// Determine the item type ("story", "bug", "spike", or "refactor") from the item ID.
///
/// For slug-format IDs (e.g. `"4_bug_login_crash"`), the type is embedded in the ID.
/// For numeric-only IDs (e.g. `"4"`), the type is read from the `type:` field in
/// the content-store front matter. Falls back to `"story"` if not found.
pub(crate) fn item_type_from_id(item_id: &str) -> &'static str {
let after_num = item_id.trim_start_matches(|c: char| c.is_ascii_digit());
if after_num.starts_with("_bug_") {
return "bug";
} else if after_num.starts_with("_spike_") {
return "spike";
} else if after_num.starts_with("_refactor_") {
return "refactor";
}
// Numeric-only ID: check content store front matter for explicit type.
if after_num.is_empty()
&& let Some(content) = crate::db::read_content(item_id)
&& let Ok(meta) = crate::io::story_metadata::parse_front_matter(&content)
&& let Some(t) = meta.item_type.as_deref()
{
return match t {
"bug" => "bug",
"spike" => "spike",
"refactor" => "refactor",
_ => "story",
};
}
"story"
}
2026-04-29 17:38:38 +00:00
type ContentTransform = Box<dyn Fn(&str) -> String>;
2026-04-29 17:38:38 +00:00
/// Build a content-transform closure that clears the given front-matter fields.
fn fields_to_clear_transform(fields: &[&str]) -> Option<ContentTransform> {
if fields.is_empty() {
return None;
}
2026-04-29 17:38:38 +00:00
let fields: Vec<String> = fields.iter().map(|s| s.to_string()).collect();
Some(Box::new(move |content: &str| {
let mut result = content.to_string();
for field in &fields {
result = clear_front_matter_field_in_content(&result, field);
}
result
}))
}
/// Move a work item (story, bug, or spike) from `1_backlog` to `work/2_current/`.
///
/// Only promotes from `1_backlog` — stories already in later stages (3_qa, 4_merge,
/// etc.) are left untouched. This prevents coders from accidentally demoting a story
/// that has already advanced past the coding stage.
/// Idempotent: if already in `2_current/`, returns Ok. If not found, logs and returns Ok.
pub fn move_story_to_current(story_id: &str) -> Result<(), String> {
2026-04-29 17:38:38 +00:00
match apply_transition(story_id, PipelineEvent::DepsMet, None) {
Ok(_) => Ok(()),
Err(ApplyError::NotFound(_)) => {
slog!("[lifecycle] Work item '{story_id}' not found; skipping move to work/2_current/");
Ok(())
}
Err(ApplyError::InvalidTransition(_)) => {
// Already promoted or in a later stage — idempotent no-op.
Ok(())
}
Err(ApplyError::Projection(_)) => Ok(()),
}
}
/// Check whether a feature branch `feature/story-{story_id}` exists and has
/// commits that are not yet on master. Returns `true` when there is unmerged
/// work, `false` when there is no branch or all its commits are already
/// reachable from master.
pub fn feature_branch_has_unmerged_changes(project_root: &Path, story_id: &str) -> bool {
let branch = format!("feature/story-{story_id}");
// Check if the branch exists.
let branch_check = Command::new("git")
.args(["rev-parse", "--verify", &branch])
.current_dir(project_root)
.output();
match branch_check {
Ok(out) if out.status.success() => {}
_ => return false, // No feature branch → nothing to merge.
}
// Check if the branch has commits not reachable from master.
let log = Command::new("git")
.args(["log", &format!("master..{branch}"), "--oneline"])
.current_dir(project_root)
.output();
match log {
Ok(out) => {
let stdout = String::from_utf8_lossy(&out.stdout);
!stdout.trim().is_empty()
}
Err(_) => false,
}
}
/// Move a story from `work/2_current/`, `work/3_qa/`, or `work/4_merge/` to `work/5_done/`.
///
/// Idempotent if already in `5_done/` or `6_archived/`. Errors if not found in any earlier stage.
/// Spikes may transition directly from `3_qa/` to `5_done/`, skipping the merge stage.
pub fn move_story_to_done(story_id: &str) -> Result<(), String> {
2026-04-29 17:38:38 +00:00
let item = read_typed_or_err(story_id)?;
let dir = item.stage.dir_name();
// Idempotent: already at or past done.
if dir >= "5_done" {
return Ok(());
}
let event = match &item.stage {
Stage::Merge { .. } => PipelineEvent::MergeSucceeded {
merge_commit: GitSha("accepted".to_string()),
},
Stage::Coding | Stage::Qa | Stage::Backlog => PipelineEvent::Close,
_ => {
return Err(format!(
"Work item '{story_id}' is in {} — cannot move to done.",
stage_label(&item.stage)
));
}
};
let transform = fields_to_clear_transform(&["merge_failure", "blocked"]);
apply_transition(story_id, event, transform.as_ref().map(|f| f.as_ref()))
.map(|_| ())
.map_err(|e| e.to_string())
}
/// Move a story/bug from `work/2_current/` or `work/3_qa/` to `work/4_merge/`.
///
/// Idempotent if already in `4_merge/`. Errors if not found in `2_current/` or `3_qa/`.
pub fn move_story_to_merge(story_id: &str) -> Result<(), String> {
2026-04-29 17:38:38 +00:00
let item = read_typed_or_err(story_id)?;
let dir = item.stage.dir_name();
// Idempotent: already at or past merge.
if dir >= "4_merge" {
return Ok(());
}
let branch = BranchName(format!("feature/story-{story_id}"));
let commits = NonZeroU32::new(1).expect("1 is non-zero");
let event = match &item.stage {
Stage::Coding => PipelineEvent::QaSkipped {
feature_branch: branch,
commits_ahead: commits,
},
Stage::Qa => PipelineEvent::GatesPassed {
feature_branch: branch,
commits_ahead: commits,
},
_ => {
return Err(format!(
"Work item '{story_id}' not found in work/2_current/ or work/3_qa/."
));
}
};
let transform = fields_to_clear_transform(&["blocked"]);
apply_transition(story_id, event, transform.as_ref().map(|f| f.as_ref()))
.map(|_| ())
.map_err(|e| e.to_string())
}
/// Move a story/bug from `work/2_current/` to `work/3_qa/`.
///
/// Idempotent if already in `3_qa/`. Errors if not found in `2_current/`.
pub fn move_story_to_qa(story_id: &str) -> Result<(), String> {
2026-04-29 17:38:38 +00:00
let item = read_typed_or_err(story_id)?;
let dir = item.stage.dir_name();
// Idempotent: already at or past qa.
if dir >= "3_qa" {
return Ok(());
}
let transform = fields_to_clear_transform(&["blocked"]);
apply_transition(
story_id,
2026-04-29 17:38:38 +00:00
PipelineEvent::GatesStarted,
transform.as_ref().map(|f| f.as_ref()),
)
.map(|_| ())
2026-04-29 17:38:38 +00:00
.map_err(|e| e.to_string())
}
/// Move a story from `work/3_qa/` back to `work/2_current/`, clearing `review_hold` and writing notes.
pub fn reject_story_from_qa(story_id: &str, notes: &str) -> Result<(), String> {
2026-04-29 17:38:38 +00:00
let notes_owned = notes.to_string();
let transform: Box<dyn Fn(&str) -> String> = Box::new(move |content: &str| {
let mut result = clear_front_matter_field_in_content(content, "review_hold");
if !notes_owned.is_empty() {
result =
crate::io::story_metadata::write_rejection_notes_to_content(&result, &notes_owned);
}
result
});
apply_transition(
story_id,
2026-04-29 17:38:38 +00:00
PipelineEvent::GatesFailed {
reason: notes.to_string(),
},
Some(&*transform),
)
.map(|_| ())
.map_err(|e| e.to_string())
}
/// Map a (current stage, target stage name) pair to the appropriate PipelineEvent.
fn map_stage_move_to_event(
from: &Stage,
target: &str,
story_id: &str,
) -> Result<PipelineEvent, String> {
let branch = || BranchName(format!("feature/story-{story_id}"));
let nz1 = || NonZeroU32::new(1).expect("1 is non-zero");
match (from, target) {
(Stage::Upcoming, "backlog") => Ok(PipelineEvent::Triage),
(Stage::Backlog, "current") => Ok(PipelineEvent::DepsMet),
(Stage::Coding, "qa") => Ok(PipelineEvent::GatesStarted),
(Stage::Coding, "merge") => Ok(PipelineEvent::QaSkipped {
feature_branch: branch(),
commits_ahead: nz1(),
}),
(Stage::Qa, "merge") => Ok(PipelineEvent::GatesPassed {
feature_branch: branch(),
commits_ahead: nz1(),
}),
(Stage::Coding, "backlog") | (Stage::Qa, "backlog") | (Stage::Merge { .. }, "backlog") => {
Ok(PipelineEvent::Demote)
}
2026-04-29 17:38:38 +00:00
(Stage::Qa, "current") => Ok(PipelineEvent::GatesFailed {
reason: "manual move".to_string(),
}),
(Stage::Merge { .. }, "done") => Ok(PipelineEvent::MergeSucceeded {
merge_commit: GitSha("manual".to_string()),
}),
(Stage::Coding | Stage::Qa | Stage::Backlog, "done") => Ok(PipelineEvent::Close),
(
Stage::Archived {
reason: ArchiveReason::Blocked { .. },
..
},
"backlog",
)
| (
Stage::Archived {
reason: ArchiveReason::MergeFailed { .. },
..
},
"backlog",
) => Ok(PipelineEvent::Unblock),
_ => Err(format!(
"Invalid target_stage '{target}'. Cannot transition from {} to {target}.",
stage_label(from),
)),
}
}
/// Move any work item to an arbitrary pipeline stage by searching all stages.
///
/// Accepts `target_stage` as one of: `backlog`, `current`, `qa`, `merge`, `done`.
/// Idempotent: if the item is already in the target stage, returns Ok.
/// Returns `(from_stage, to_stage)` on success.
pub fn move_story_to_stage(story_id: &str, target_stage: &str) -> Result<(String, String), String> {
2026-04-29 17:38:38 +00:00
// Validate target.
let target_dir = match target_stage {
"backlog" => "1_backlog",
"current" => "2_current",
"qa" => "3_qa",
"merge" => "4_merge",
"done" => "5_done",
_ => {
return Err(format!(
"Invalid target_stage '{target_stage}'. Must be one of: backlog, current, qa, merge, done"
2026-04-29 17:38:38 +00:00
));
}
};
2026-04-29 17:38:38 +00:00
let item = read_typed_or_err(story_id)?;
let from_name = stage_to_name(&item.stage);
2026-04-29 17:38:38 +00:00
// Idempotent: already in the target stage.
if item.stage.dir_name() == target_dir {
return Ok((target_stage.to_string(), target_stage.to_string()));
}
2026-04-29 17:38:38 +00:00
let event = map_stage_move_to_event(&item.stage, target_stage, story_id)?;
apply_transition(story_id, event, None).map_err(|e| e.to_string())?;
Ok((from_name.to_string(), target_stage.to_string()))
}
/// Move a bug from `work/2_current/` or `work/1_backlog/` to `work/5_done/`.
///
/// Idempotent if already in `5_done/`. Errors if not found in `2_current/` or `1_backlog/`.
pub fn close_bug_to_archive(bug_id: &str) -> Result<(), String> {
2026-04-29 17:38:38 +00:00
let item = read_typed_or_err(bug_id)?;
if item.stage.dir_name() >= "5_done" {
return Ok(());
}
apply_transition(bug_id, PipelineEvent::Close, None)
.map(|_| ())
.map_err(|e| e.to_string())
}
/// Read a typed pipeline item or return a user-facing error.
fn read_typed_or_err(story_id: &str) -> Result<crate::pipeline_state::PipelineItem, String> {
crate::pipeline_state::read_typed(story_id)
.map_err(|e| format!("Work item '{story_id}': {e}"))?
.ok_or_else(|| format!("Work item '{story_id}' not found in any pipeline stage."))
}
/// Map a Stage variant to the short name used by `move_story_to_stage` return values.
fn stage_to_name(s: &Stage) -> &'static str {
match s {
Stage::Upcoming => "upcoming",
Stage::Backlog => "backlog",
Stage::Coding => "current",
Stage::Qa => "qa",
Stage::Merge { .. } => "merge",
Stage::Done { .. } => "done",
Stage::Archived { .. } => "archived",
2026-04-29 22:12:23 +00:00
Stage::Frozen { .. } => "frozen",
2026-04-29 17:38:38 +00:00
}
}
#[cfg(test)]
mod tests {
use super::*;
// ── move_story_to_current tests ────────────────────────────────────────────
#[test]
fn move_story_to_current_from_crdt() {
// Seed via CRDT — the sole source of truth for pipeline state.
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"99950_story_lifecycle",
"1_backlog",
"---\nname: Lifecycle Test\n---\n# Story\n",
);
move_story_to_current("99950_story_lifecycle").unwrap();
// Verify the CRDT now has the item in 2_current.
let item = crate::pipeline_state::read_typed("99950_story_lifecycle")
.expect("CRDT read should succeed")
.expect("item should exist in CRDT after move");
assert_eq!(
item.stage.dir_name(),
"2_current",
"item should be in 2_current after move"
);
}
#[test]
fn move_story_to_current_noop_when_not_found() {
assert!(move_story_to_current("99_missing").is_ok());
}
/// Lifecycle operation runs to completion using only CRDT state;
/// no `.huskies/work/<stage>/` tree is consulted because no `project_root`
/// is passed — the functions operate purely on the CRDT.
#[test]
fn move_story_uses_only_crdt_no_fs_shadow() {
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"99951_story_crdt_only",
"2_current",
"---\nname: CRDT Only Test\n---\n# Story\n",
);
// No filesystem path is involved — lifecycle functions no longer
// accept a project_root, proving they never touch the filesystem.
move_story_to_done("99951_story_crdt_only").unwrap();
let item = crate::pipeline_state::read_typed("99951_story_crdt_only")
.expect("CRDT read should succeed")
.expect("item should exist in CRDT");
assert_eq!(
item.stage.dir_name(),
"5_done",
"item should be in 5_done after move"
);
}
// ── item_type_from_id tests ────────────────────────────────────────────────
#[test]
fn item_type_from_id_detects_types() {
assert_eq!(item_type_from_id("1_bug_test"), "bug");
assert_eq!(item_type_from_id("1_spike_research"), "spike");
assert_eq!(item_type_from_id("50_story_my_story"), "story");
assert_eq!(item_type_from_id("1_story_simple"), "story");
assert_eq!(item_type_from_id("1_refactor_cleanup"), "refactor");
}
#[test]
fn item_type_from_id_falls_back_to_content_store_for_numeric_ids() {
crate::db::ensure_content_store();
// Write a bug item with numeric-only ID into the content store.
let bug_content = "---\ntype: bug\nname: \"Test Bug\"\n---\n\n# Bug 9999: Test Bug\n";
crate::db::write_content("9999", bug_content);
let spike_content =
"---\ntype: spike\nname: \"Test Spike\"\n---\n\n# Spike 9998: Test Spike\n";
crate::db::write_content("9998", spike_content);
let refactor_content =
"---\ntype: refactor\nname: \"Test Refactor\"\n---\n\n# Refactor 9997: Test Refactor\n";
crate::db::write_content("9997", refactor_content);
let story_content =
"---\ntype: story\nname: \"Test Story\"\n---\n\n# Story 9996: Test Story\n";
crate::db::write_content("9996", story_content);
assert_eq!(item_type_from_id("9999"), "bug");
assert_eq!(item_type_from_id("9998"), "spike");
assert_eq!(item_type_from_id("9997"), "refactor");
assert_eq!(item_type_from_id("9996"), "story");
// No content store entry → defaults to "story".
assert_eq!(item_type_from_id("99999"), "story");
}
// ── feature_branch_has_unmerged_changes tests ────────────────────────────
fn init_git_repo(repo: &std::path::Path) {
Command::new("git")
.args(["init"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["config", "user.email", "test@test.com"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["config", "user.name", "Test"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "--allow-empty", "-m", "init"])
.current_dir(repo)
.output()
.unwrap();
}
/// Bug 226: feature_branch_has_unmerged_changes returns true when the
/// feature branch has commits not on master.
#[test]
fn feature_branch_has_unmerged_changes_detects_unmerged_code() {
use std::fs;
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
// Create a feature branch with a code commit.
Command::new("git")
.args(["checkout", "-b", "feature/story-50_story_test"])
.current_dir(repo)
.output()
.unwrap();
fs::write(repo.join("feature.rs"), "fn main() {}").unwrap();
Command::new("git")
.args(["add", "."])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["commit", "-m", "add feature"])
.current_dir(repo)
.output()
.unwrap();
Command::new("git")
.args(["checkout", "master"])
.current_dir(repo)
.output()
.unwrap();
assert!(
feature_branch_has_unmerged_changes(repo, "50_story_test"),
"should detect unmerged changes on feature branch"
);
}
/// Bug 226: feature_branch_has_unmerged_changes returns false when no
/// feature branch exists.
#[test]
fn feature_branch_has_unmerged_changes_false_when_no_branch() {
use tempfile::tempdir;
let tmp = tempdir().unwrap();
let repo = tmp.path();
init_git_repo(repo);
assert!(
!feature_branch_has_unmerged_changes(repo, "99_nonexistent"),
"should return false when no feature branch"
);
}
}