huskies: merge 671_refactor_migrate_pipeline_state_consumers_from_string_comparisons_to_typed_pipelinestage_enum

This commit is contained in:
dave
2026-04-27 16:35:25 +00:00
parent 39a9766d7d
commit 4a0f57478c
15 changed files with 161 additions and 103 deletions
+6 -4
View File
@@ -144,7 +144,8 @@ pub async fn run(
if let Some(mut crdt_rx) = crdt_state::subscribe() { if let Some(mut crdt_rx) = crdt_state::subscribe() {
tokio::spawn(async move { tokio::spawn(async move {
while let Ok(evt) = crdt_rx.recv().await { while let Ok(evt) = crdt_rx.recv().await {
if evt.to_stage == "6_archived" if crate::pipeline_state::Stage::from_dir(&evt.to_stage)
.is_some_and(|s| matches!(s, crate::pipeline_state::Stage::Archived { .. }))
&& let Some(root) = crdt_prune_root.as_ref().cloned() && let Some(root) = crdt_prune_root.as_ref().cloned()
{ {
let story_id = evt.story_id.clone(); let story_id = evt.story_id.clone();
@@ -179,7 +180,8 @@ pub async fn run(
let mut rx = auto_rx; let mut rx = auto_rx;
while let Ok(event) = rx.recv().await { while let Ok(event) = rx.recv().await {
if let watcher::WatcherEvent::WorkItem { ref stage, .. } = event if let watcher::WatcherEvent::WorkItem { ref stage, .. } = event
&& matches!(stage.as_str(), "2_current" | "3_qa" | "4_merge") && crate::pipeline_state::Stage::from_dir(stage.as_str())
.is_some_and(|s| s.is_active())
{ {
slog!("[agent-mode] CRDT transition in {stage}/; triggering auto-assign."); slog!("[agent-mode] CRDT transition in {stage}/; triggering auto-assign.");
auto_agents.auto_assign_available_work(&auto_root).await; auto_agents.auto_assign_available_work(&auto_root).await;
@@ -316,7 +318,7 @@ async fn scan_and_claim(
for item in &items { for item in &items {
// Only claim stories in active stages. // Only claim stories in active stages.
if !matches!(item.stage.as_str(), "2_current" | "3_qa" | "4_merge") { if !crate::pipeline_state::Stage::from_dir(&item.stage).is_some_and(|s| s.is_active()) {
continue; continue;
} }
@@ -425,7 +427,7 @@ fn reclaim_timed_out_work(_project_root: &Path) {
let now = chrono::Utc::now().timestamp() as f64; let now = chrono::Utc::now().timestamp() as f64;
for item in &items { for item in &items {
if !matches!(item.stage.as_str(), "2_current" | "3_qa" | "4_merge") { if !crate::pipeline_state::Stage::from_dir(&item.stage).is_some_and(|s| s.is_active()) {
continue; continue;
} }
@@ -3,6 +3,7 @@
use std::path::Path; use std::path::Path;
use tokio::sync::broadcast; use tokio::sync::broadcast;
use crate::pipeline_state::Stage;
use crate::worktree; use crate::worktree;
use super::super::super::ReconciliationEvent; use super::super::super::ReconciliationEvent;
@@ -52,20 +53,20 @@ impl AgentPool {
let wt_path = wt_entry.path.clone(); let wt_path = wt_entry.path.clone();
// Determine which active stage the story is in. // Determine which active stage the story is in.
let stage_dir = match find_active_story_stage(project_root, story_id) { let stage = match find_active_story_stage(project_root, story_id) {
Some(s) => s, Some(s) => s,
None => continue, // Not in any active stage (backlog/archived or unknown). None => continue, // Not in any active stage (backlog/archived or unknown).
}; };
// 4_merge/ is left for auto_assign to handle with a fresh mergemaster. // 4_merge/ is left for auto_assign to handle with a fresh mergemaster.
if stage_dir == "4_merge" { if matches!(stage, Stage::Merge { .. }) {
continue; continue;
} }
let _ = progress_tx.send(ReconciliationEvent { let _ = progress_tx.send(ReconciliationEvent {
story_id: story_id.clone(), story_id: story_id.clone(),
status: "checking".to_string(), status: "checking".to_string(),
message: format!("Checking for committed work in {stage_dir}/"), message: format!("Checking for committed work in {}/", stage.dir_name()),
}); });
// Check whether the worktree has commits ahead of the base branch. // Check whether the worktree has commits ahead of the base branch.
@@ -78,7 +79,8 @@ impl AgentPool {
if !has_work { if !has_work {
eprintln!( eprintln!(
"[startup:reconcile] No committed work for '{story_id}' in {stage_dir}/; skipping." "[startup:reconcile] No committed work for '{story_id}' in {}/; skipping.",
stage.dir_name()
); );
let _ = progress_tx.send(ReconciliationEvent { let _ = progress_tx.send(ReconciliationEvent {
story_id: story_id.clone(), story_id: story_id.clone(),
@@ -89,7 +91,8 @@ impl AgentPool {
} }
eprintln!( eprintln!(
"[startup:reconcile] Found committed work for '{story_id}' in {stage_dir}/. Running acceptance gates." "[startup:reconcile] Found committed work for '{story_id}' in {}/. Running acceptance gates.",
stage.dir_name()
); );
let _ = progress_tx.send(ReconciliationEvent { let _ = progress_tx.send(ReconciliationEvent {
story_id: story_id.clone(), story_id: story_id.clone(),
@@ -130,7 +133,8 @@ impl AgentPool {
if !gates_passed { if !gates_passed {
eprintln!( eprintln!(
"[startup:reconcile] Gates failed for '{story_id}': {gate_output}\n\ "[startup:reconcile] Gates failed for '{story_id}': {gate_output}\n\
Leaving in {stage_dir}/ for auto-assign to restart the agent." Leaving in {}/ for auto-assign to restart the agent.",
stage.dir_name()
); );
let _ = progress_tx.send(ReconciliationEvent { let _ = progress_tx.send(ReconciliationEvent {
story_id: story_id.clone(), story_id: story_id.clone(),
@@ -140,9 +144,12 @@ impl AgentPool {
continue; continue;
} }
eprintln!("[startup:reconcile] Gates passed for '{story_id}' (stage: {stage_dir}/)."); eprintln!(
"[startup:reconcile] Gates passed for '{story_id}' (stage: {}/).",
stage.dir_name()
);
if stage_dir == "2_current" { if matches!(stage, Stage::Coding) {
// Coder stage — determine qa mode to decide next step. // Coder stage — determine qa mode to decide next step.
let qa_mode = { let qa_mode = {
let item_type = crate::agents::lifecycle::item_type_from_id(story_id); let item_type = crate::agents::lifecycle::item_type_from_id(story_id);
@@ -232,7 +239,7 @@ impl AgentPool {
} }
} }
} }
} else if stage_dir == "3_qa" { } else if matches!(stage, Stage::Qa) {
// QA stage → run coverage gate before advancing to merge. // QA stage → run coverage gate before advancing to merge.
let wt_path_for_cov = wt_path.clone(); let wt_path_for_cov = wt_path.clone();
let coverage_result = tokio::task::spawn_blocking(move || { let coverage_result = tokio::task::spawn_blocking(move || {
@@ -342,9 +342,14 @@ impl AgentPool {
// has already reached done or archived (e.g. a previous mergemaster // has already reached done or archived (e.g. a previous mergemaster
// succeeded), this advance is a zombie — skip it entirely to avoid // succeeded), this advance is a zombie — skip it entirely to avoid
// phantom notifications and redundant post-merge test runs. // phantom notifications and redundant post-merge test runs.
if let Ok(Some(typed_item)) = crate::pipeline_state::read_typed(story_id) { if let Ok(Some(typed_item)) = crate::pipeline_state::read_typed(story_id)
&& matches!(
typed_item.stage,
crate::pipeline_state::Stage::Done { .. }
| crate::pipeline_state::Stage::Archived { .. }
)
{
let current_dir = typed_item.stage.dir_name(); let current_dir = typed_item.stage.dir_name();
if current_dir == "5_done" || current_dir == "6_archived" {
slog!( slog!(
"[pipeline] Skipping stale mergemaster advance for '{story_id}': \ "[pipeline] Skipping stale mergemaster advance for '{story_id}': \
story is already in work/{current_dir}/" story is already in work/{current_dir}/"
@@ -353,7 +358,6 @@ impl AgentPool {
// do not emit notifications, do not restart agents. // do not emit notifications, do not restart agents.
return; return;
} }
}
// Block advancement if the mergemaster explicitly reported a failure. // Block advancement if the mergemaster explicitly reported a failure.
// The server-owned gate check runs in the feature-branch worktree (not // The server-owned gate check runs in the feature-branch worktree (not
+8 -6
View File
@@ -3,6 +3,7 @@
use std::path::Path; use std::path::Path;
use crate::config::ProjectConfig; use crate::config::ProjectConfig;
use crate::pipeline_state::Stage;
use super::super::super::{PipelineStage, agent_config_stage, pipeline_stage}; use super::super::super::{PipelineStage, agent_config_stage, pipeline_stage};
use super::super::worktree::find_active_story_stage; use super::super::worktree::find_active_story_stage;
@@ -30,19 +31,20 @@ pub(super) fn validate_agent_stage(
if agent_stage == PipelineStage::Other { if agent_stage == PipelineStage::Other {
return Ok(()); return Ok(());
} }
let Some(story_stage_dir) = find_active_story_stage(project_root, story_id) else { let Some(story_stage) = find_active_story_stage(project_root, story_id) else {
return Ok(()); return Ok(());
}; };
let expected_stage = match story_stage_dir { let expected_stage = match story_stage {
"2_current" => PipelineStage::Coder, Stage::Coding => PipelineStage::Coder,
"3_qa" => PipelineStage::Qa, Stage::Qa => PipelineStage::Qa,
"4_merge" => PipelineStage::Mergemaster, Stage::Merge { .. } => PipelineStage::Mergemaster,
_ => PipelineStage::Other, _ => PipelineStage::Other,
}; };
if expected_stage != PipelineStage::Other && expected_stage != agent_stage { if expected_stage != PipelineStage::Other && expected_stage != agent_stage {
return Err(format!( return Err(format!(
"Agent '{name}' (stage: {agent_stage:?}) cannot be assigned to \ "Agent '{name}' (stage: {agent_stage:?}) cannot be assigned to \
story '{story_id}' in {story_stage_dir}/ (requires stage: {expected_stage:?})" story '{story_id}' in {}/ (requires stage: {expected_stage:?})",
story_stage.dir_name()
)); ));
} }
Ok(()) Ok(())
+13 -13
View File
@@ -21,16 +21,16 @@ impl AgentPool {
} }
} }
/// Return the active pipeline stage directory name for `story_id`, or `None` if the /// Return the active pipeline stage for `story_id`, or `None` if the story is not
/// story is not in any active stage (`2_current/`, `3_qa/`, `4_merge/`). /// in any active stage (`2_current/`, `3_qa/`, `4_merge/`).
pub(super) fn find_active_story_stage( pub(super) fn find_active_story_stage(
_project_root: &Path, _project_root: &Path,
story_id: &str, story_id: &str,
) -> Option<&'static str> { ) -> Option<crate::pipeline_state::Stage> {
if let Ok(Some(item)) = crate::pipeline_state::read_typed(story_id) if let Ok(Some(item)) = crate::pipeline_state::read_typed(story_id)
&& item.stage.is_active() && item.stage.is_active()
{ {
return Some(item.stage.dir_name()); return Some(item.stage);
} }
None None
} }
@@ -44,10 +44,10 @@ mod tests {
crate::db::ensure_content_store(); crate::db::ensure_content_store();
crate::db::write_item_with_content("10_story_test", "2_current", "---\nname: Test\n---\n"); crate::db::write_item_with_content("10_story_test", "2_current", "---\nname: Test\n---\n");
let tmp = tempfile::tempdir().unwrap(); let tmp = tempfile::tempdir().unwrap();
assert_eq!( assert!(matches!(
find_active_story_stage(tmp.path(), "10_story_test"), find_active_story_stage(tmp.path(), "10_story_test"),
Some("2_current") Some(crate::pipeline_state::Stage::Coding)
); ));
} }
#[test] #[test]
@@ -55,10 +55,10 @@ mod tests {
crate::db::ensure_content_store(); crate::db::ensure_content_store();
crate::db::write_item_with_content("11_story_test", "3_qa", "---\nname: Test\n---\n"); crate::db::write_item_with_content("11_story_test", "3_qa", "---\nname: Test\n---\n");
let tmp = tempfile::tempdir().unwrap(); let tmp = tempfile::tempdir().unwrap();
assert_eq!( assert!(matches!(
find_active_story_stage(tmp.path(), "11_story_test"), find_active_story_stage(tmp.path(), "11_story_test"),
Some("3_qa") Some(crate::pipeline_state::Stage::Qa)
); ));
} }
#[test] #[test]
@@ -66,10 +66,10 @@ mod tests {
crate::db::ensure_content_store(); crate::db::ensure_content_store();
crate::db::write_item_with_content("12_story_test", "4_merge", "---\nname: Test\n---\n"); crate::db::write_item_with_content("12_story_test", "4_merge", "---\nname: Test\n---\n");
let tmp = tempfile::tempdir().unwrap(); let tmp = tempfile::tempdir().unwrap();
assert_eq!( assert!(matches!(
find_active_story_stage(tmp.path(), "12_story_test"), find_active_story_stage(tmp.path(), "12_story_test"),
Some("4_merge") Some(crate::pipeline_state::Stage::Merge { .. })
); ));
} }
#[test] #[test]
+9 -8
View File
@@ -119,14 +119,15 @@ pub async fn handle_delete(
/// Human-readable label for a pipeline stage directory name. /// Human-readable label for a pipeline stage directory name.
fn stage_display_name(stage: &str) -> &str { fn stage_display_name(stage: &str) -> &str {
match stage { use crate::pipeline_state::Stage;
"1_backlog" => "backlog", match Stage::from_dir(stage) {
"2_current" => "in-progress", Some(Stage::Backlog) => "backlog",
"3_qa" => "QA", Some(Stage::Coding) => "in-progress",
"4_merge" => "merge", Some(Stage::Qa) => "QA",
"5_done" => "done", Some(Stage::Merge { .. }) => "merge",
"6_archived" => "archived", Some(Stage::Done { .. }) => "done",
other => other, Some(Stage::Archived { .. }) => "archived",
None => stage,
} }
} }
+8 -14
View File
@@ -335,15 +335,12 @@ pub(super) fn extract_item_view(item: &PipelineItemCrdt) -> Option<PipelineItemV
/// Returns `true` if the dependency is satisfied (item found in a done stage). /// Returns `true` if the dependency is satisfied (item found in a done stage).
/// See `dep_is_archived_crdt` to distinguish archive-satisfied from cleanly-done. /// See `dep_is_archived_crdt` to distinguish archive-satisfied from cleanly-done.
pub fn dep_is_done_crdt(dep_number: u32) -> bool { pub fn dep_is_done_crdt(dep_number: u32) -> bool {
use crate::pipeline_state::{Stage, read_all_typed};
let prefix = format!("{dep_number}_"); let prefix = format!("{dep_number}_");
if let Some(items) = read_all_items() { read_all_typed().into_iter().any(|item| {
items.iter().any(|item| { item.story_id.0.starts_with(&prefix)
item.story_id.starts_with(&prefix) && matches!(item.stage, Stage::Done { .. } | Stage::Archived { .. })
&& matches!(item.stage.as_str(), "5_done" | "6_archived")
}) })
} else {
false
}
} }
/// Check whether a dependency (by numeric ID prefix) is specifically in `6_archived` /// Check whether a dependency (by numeric ID prefix) is specifically in `6_archived`
@@ -352,14 +349,11 @@ pub fn dep_is_done_crdt(dep_number: u32) -> bool {
/// Used to detect when a dependency is satisfied via archive rather than via a clean /// Used to detect when a dependency is satisfied via archive rather than via a clean
/// completion through `5_done`. Returns `false` when the CRDT layer is not initialised. /// completion through `5_done`. Returns `false` when the CRDT layer is not initialised.
pub fn dep_is_archived_crdt(dep_number: u32) -> bool { pub fn dep_is_archived_crdt(dep_number: u32) -> bool {
use crate::pipeline_state::{Stage, read_all_typed};
let prefix = format!("{dep_number}_"); let prefix = format!("{dep_number}_");
if let Some(items) = read_all_items() { read_all_typed().into_iter().any(|item| {
items item.story_id.0.starts_with(&prefix) && matches!(item.stage, Stage::Archived { .. })
.iter() })
.any(|item| item.story_id.starts_with(&prefix) && item.stage == "6_archived")
} else {
false
}
} }
/// Check unmet dependencies for a story by reading its `depends_on` from the /// Check unmet dependencies for a story by reading its `depends_on` from the
+6 -2
View File
@@ -247,7 +247,9 @@ pub fn write_item_with_content(story_id: &str, stage: &str, content: &str) {
write_content(story_id, content); write_content(story_id, content);
// Primary: CRDT ops. // Primary: CRDT ops.
let merged_at_ts = if stage == "5_done" { let merged_at_ts = if crate::pipeline_state::Stage::from_dir(stage)
.is_some_and(|s| matches!(s, crate::pipeline_state::Stage::Done { .. }))
{
Some(chrono::Utc::now().timestamp() as f64) Some(chrono::Utc::now().timestamp() as f64)
} else { } else {
None None
@@ -321,7 +323,9 @@ pub fn move_item_stage(
.unwrap_or((None, None, None, None, None)); .unwrap_or((None, None, None, None, None));
// CRDT stage transition. // CRDT stage transition.
let merged_at_ts = if new_stage == "5_done" { let merged_at_ts = if crate::pipeline_state::Stage::from_dir(new_stage)
.is_some_and(|s| matches!(s, crate::pipeline_state::Stage::Done { .. }))
{
Some(chrono::Utc::now().timestamp() as f64) Some(chrono::Utc::now().timestamp() as f64)
} else { } else {
None None
+7 -1
View File
@@ -18,7 +18,13 @@ pub(super) async fn tool_merge_agent_work(
// Check CRDT stage before attempting merge — if already done or archived, // Check CRDT stage before attempting merge — if already done or archived,
// return success immediately to avoid spurious error notifications. // return success immediately to avoid spurious error notifications.
if let Some(item) = crate::crdt_state::read_item(story_id) if let Some(item) = crate::crdt_state::read_item(story_id)
&& (item.stage == "5_done" || item.stage == "6_archived") && crate::pipeline_state::Stage::from_dir(&item.stage).is_some_and(|s| {
matches!(
s,
crate::pipeline_state::Stage::Done { .. }
| crate::pipeline_state::Stage::Archived { .. }
)
})
{ {
return serde_json::to_string_pretty(&json!({ return serde_json::to_string_pretty(&json!({
"story_id": story_id, "story_id": story_id,
+9 -9
View File
@@ -106,16 +106,16 @@ pub fn is_config_file(path: &Path, git_root: &Path) -> bool {
/// Used by the CRDT-to-watcher bridge (in `main.rs`) to derive the action and /// Used by the CRDT-to-watcher bridge (in `main.rs`) to derive the action and
/// commit message for `WatcherEvent::WorkItem` events. /// commit message for `WatcherEvent::WorkItem` events.
pub fn stage_metadata(stage: &str, item_id: &str) -> Option<(&'static str, String)> { pub fn stage_metadata(stage: &str, item_id: &str) -> Option<(&'static str, String)> {
let (action, prefix) = match stage { use crate::pipeline_state::Stage;
"1_backlog" => ("create", format!("huskies: create {item_id}")), let (action, msg) = match Stage::from_dir(stage)? {
"2_current" => ("start", format!("huskies: start {item_id}")), Stage::Backlog => ("create", format!("huskies: create {item_id}")),
"3_qa" => ("qa", format!("huskies: queue {item_id} for QA")), Stage::Coding => ("start", format!("huskies: start {item_id}")),
"4_merge" => ("merge", format!("huskies: queue {item_id} for merge")), Stage::Qa => ("qa", format!("huskies: queue {item_id} for QA")),
"5_done" => ("done", format!("huskies: done {item_id}")), Stage::Merge { .. } => ("merge", format!("huskies: queue {item_id} for merge")),
"6_archived" => ("accept", format!("huskies: accept {item_id}")), Stage::Done { .. } => ("done", format!("huskies: done {item_id}")),
_ => return None, Stage::Archived { .. } => ("accept", format!("huskies: accept {item_id}")),
}; };
Some((action, prefix)) Some((action, msg))
} }
/// Return the pipeline stage name for a path if it is a `.md` file living /// Return the pipeline stage name for a path if it is a `.md` file living
+4 -2
View File
@@ -352,7 +352,8 @@ async fn main() -> Result<(), std::io::Error> {
tokio::spawn(async move { tokio::spawn(async move {
while let Ok(evt) = crdt_rx.recv().await { while let Ok(evt) = crdt_rx.recv().await {
// Prune the worktree when a story is archived. // Prune the worktree when a story is archived.
if evt.to_stage == "6_archived" if crate::pipeline_state::Stage::from_dir(&evt.to_stage)
.is_some_and(|s| matches!(s, crate::pipeline_state::Stage::Archived { .. }))
&& let Some(root) = crdt_prune_root.as_ref().cloned() && let Some(root) = crdt_prune_root.as_ref().cloned()
{ {
let story_id = evt.story_id.clone(); let story_id = evt.story_id.clone();
@@ -389,7 +390,8 @@ async fn main() -> Result<(), std::io::Error> {
let mut rx = watcher_auto_rx; let mut rx = watcher_auto_rx;
while let Ok(event) = rx.recv().await { while let Ok(event) = rx.recv().await {
if let io::watcher::WatcherEvent::WorkItem { ref stage, .. } = event if let io::watcher::WatcherEvent::WorkItem { ref stage, .. } = event
&& matches!(stage.as_str(), "2_current" | "3_qa" | "4_merge") && crate::pipeline_state::Stage::from_dir(stage.as_str())
.is_some_and(|s| s.is_active())
{ {
slog!( slog!(
"[auto-assign] CRDT transition detected in {stage}/; \ "[auto-assign] CRDT transition detected in {stage}/; \
+37 -6
View File
@@ -12,17 +12,15 @@
//! event bus are fully defined and tested here. Consumers will be migrated to //! event bus are fully defined and tested here. Consumers will be migrated to
//! the typed API incrementally in follow-up stories. //! the typed API incrementally in follow-up stories.
// Foundation module — all items are exercised by tests but not yet called from // Some items are exercised by tests or used only in non-active code paths;
// non-test code. The dead_code lint is suppressed until consumer migration. // the dead_code lint is suppressed for the module.
#![allow(unused_imports, dead_code)] #![allow(dead_code)]
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use std::num::NonZeroU32; use std::num::NonZeroU32;
use crate::crdt_state::PipelineItemView;
// ── Newtypes ──────────────────────────────────────────────────────────────── // ── Newtypes ────────────────────────────────────────────────────────────────
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
@@ -134,6 +132,35 @@ impl Stage {
} }
) )
} }
/// Parse a stage from its filesystem directory name.
///
/// This is the single canonical conversion boundary for turning a loose
/// stage-directory string (from CRDT fields or watcher events) into a
/// typed `Stage`. Rich variants (`Done`, `Archived`, `Merge`) are
/// synthesised with zero-value fields — callers should use this only for
/// stage *classification* (e.g. `is_active()`, `matches!`), not for
/// accessing the rich metadata fields.
pub fn from_dir(s: &str) -> Option<Self> {
match s {
"1_backlog" => Some(Stage::Backlog),
"2_current" => Some(Stage::Coding),
"3_qa" => Some(Stage::Qa),
"4_merge" => Some(Stage::Merge {
feature_branch: BranchName(String::new()),
commits_ahead: NonZeroU32::new(1).expect("1 is non-zero"),
}),
"5_done" => Some(Stage::Done {
merged_at: DateTime::<Utc>::UNIX_EPOCH,
merge_commit: GitSha(String::new()),
}),
"6_archived" => Some(Stage::Archived {
archived_at: DateTime::<Utc>::UNIX_EPOCH,
reason: ArchiveReason::Completed,
}),
_ => None,
}
}
} }
// ── Per-node execution state ──────────────────────────────────────────────── // ── Per-node execution state ────────────────────────────────────────────────
@@ -464,8 +491,12 @@ mod events;
mod projection; mod projection;
mod subscribers; mod subscribers;
#[allow(unused_imports)]
pub use events::{EventBus, TransitionFired, TransitionSubscriber}; pub use events::{EventBus, TransitionFired, TransitionSubscriber};
pub use projection::{ProjectionError, project_stage, read_all_typed, read_typed}; #[allow(unused_imports)]
pub use projection::{ProjectionError, project_stage};
pub use projection::{read_all_typed, read_typed};
#[allow(unused_imports)]
pub use subscribers::{ pub use subscribers::{
AutoAssignSubscriber, FileRendererSubscriber, MatrixBotSubscriber, PipelineItemsSubscriber, AutoAssignSubscriber, FileRendererSubscriber, MatrixBotSubscriber, PipelineItemsSubscriber,
WebUiBroadcastSubscriber, WebUiBroadcastSubscriber,
+1
View File
@@ -2,6 +2,7 @@
use super::Stage; use super::Stage;
use super::events::{TransitionFired, TransitionSubscriber}; use super::events::{TransitionFired, TransitionSubscriber};
#[allow(unused_imports)]
use super::{event_label, stage_dir_name, stage_label}; use super::{event_label, stage_dir_name, stage_label};
// ── Subscriber stubs (real dispatch uses these as the interface) ───────────── // ── Subscriber stubs (real dispatch uses these as the interface) ─────────────
+9 -8
View File
@@ -8,14 +8,15 @@ use crate::service::common::item_id::extract_item_number;
/// Human-readable display name for a pipeline stage directory. /// Human-readable display name for a pipeline stage directory.
pub fn stage_display_name(stage: &str) -> &'static str { pub fn stage_display_name(stage: &str) -> &'static str {
match stage { use crate::pipeline_state::Stage;
"1_backlog" => "Backlog", match Stage::from_dir(stage) {
"2_current" => "Current", Some(Stage::Backlog) => "Backlog",
"3_qa" => "QA", Some(Stage::Coding) => "Current",
"4_merge" => "Merge", Some(Stage::Qa) => "QA",
"5_done" => "Done", Some(Stage::Merge { .. }) => "Merge",
"6_archived" => "Archived", Some(Stage::Done { .. }) => "Done",
_ => "Unknown", Some(Stage::Archived { .. }) => "Archived",
None => "Unknown",
} }
} }
+13 -10
View File
@@ -8,23 +8,26 @@
/// ///
/// Valid stage names match the `.huskies/work/N_name/` directory scheme. /// Valid stage names match the `.huskies/work/N_name/` directory scheme.
pub fn is_valid_stage(stage: &str) -> bool { pub fn is_valid_stage(stage: &str) -> bool {
matches!( crate::pipeline_state::Stage::from_dir(stage).is_some()
stage,
"1_backlog" | "2_current" | "3_qa" | "4_merge" | "5_done" | "6_archived"
)
} }
#[allow(dead_code)] #[allow(dead_code)]
/// Map a human-readable stage alias (e.g. `"backlog"`) to its directory name /// Map a human-readable stage alias (e.g. `"backlog"`) to its directory name
/// (e.g. `"1_backlog"`). Returns `None` for unrecognised aliases. /// (e.g. `"1_backlog"`). Returns `None` for unrecognised aliases.
pub fn stage_alias_to_dir(alias: &str) -> Option<&'static str> { pub fn stage_alias_to_dir(alias: &str) -> Option<&'static str> {
use crate::pipeline_state::Stage;
// Canonical directory names (e.g. "1_backlog") round-trip through the typed enum.
if let Some(stage) = Stage::from_dir(alias) {
return Some(stage.dir_name());
}
// Short human-readable aliases (user-facing input normalization).
match alias { match alias {
"backlog" | "1_backlog" => Some("1_backlog"), "backlog" => Some("1_backlog"),
"current" | "2_current" => Some("2_current"), "current" => Some("2_current"),
"qa" | "3_qa" => Some("3_qa"), "qa" => Some("3_qa"),
"merge" | "4_merge" => Some("4_merge"), "merge" => Some("4_merge"),
"done" | "5_done" => Some("5_done"), "done" => Some("5_done"),
"archived" | "6_archived" => Some("6_archived"), "archived" => Some("6_archived"),
_ => None, _ => None,
} }
} }