Files
huskies/server/src/pipeline_state/projection.rs
T

481 lines
16 KiB
Rust
Raw Normal View History

//! Projection layer — converts loose CRDT views into typed `PipelineItem` enums.
#![allow(unused_imports, dead_code)]
use chrono::{DateTime, Utc};
use std::fmt;
use std::num::NonZeroU32;
use crate::crdt_state::{PipelineItemView, read_all_items, read_item};
use super::{
ArchiveReason, BranchName, ExecutionState, GitSha, PipelineItem, Stage, StoryId, stage_dir_name,
};
/// Errors from projecting loose CRDT data into typed enums.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ProjectionError {
/// The stage string from the CRDT doesn't map to any known Stage variant.
UnknownStage(String),
/// A required field is missing from the CRDT data.
MissingField(&'static str),
/// A field has an invalid value.
InvalidField { field: &'static str, detail: String },
}
impl fmt::Display for ProjectionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::UnknownStage(s) => write!(f, "unknown stage: {s:?}"),
Self::MissingField(field) => write!(f, "missing required field: {field}"),
Self::InvalidField { field, detail } => {
write!(f, "invalid field {field}: {detail}")
}
}
}
}
impl std::error::Error for ProjectionError {}
// ── Projection: PipelineItemView → PipelineItem ─────────────────────────────
impl TryFrom<&PipelineItemView> for PipelineItem {
type Error = ProjectionError;
fn try_from(view: &PipelineItemView) -> Result<Self, ProjectionError> {
let story_id = StoryId(view.story_id.clone());
let name = view.name.clone().unwrap_or_default();
let depends_on: Vec<StoryId> = view
.depends_on
.as_ref()
.map(|deps| deps.iter().map(|d| StoryId(d.to_string())).collect())
.unwrap_or_default();
let retry_count = view.retry_count.unwrap_or(0).max(0) as u32;
let stage = project_stage(view)?;
Ok(PipelineItem {
story_id,
name,
stage,
depends_on,
retry_count,
})
}
}
/// Project the stage string + associated fields from a PipelineItemView into
/// a typed Stage enum. This is the one carefully-controlled boundary where
/// loose CRDT data becomes typed.
pub fn project_stage(view: &PipelineItemView) -> Result<Stage, ProjectionError> {
match view.stage.as_str() {
2026-04-29 17:38:38 +00:00
"0_upcoming" => Ok(Stage::Upcoming),
"1_backlog" => Ok(Stage::Backlog),
2026-04-29 22:42:59 +00:00
"2_blocked" => Ok(Stage::Blocked {
reason: String::new(),
}),
"2_current" => Ok(Stage::Coding),
"3_qa" => Ok(Stage::Qa),
"4_merge" => {
// Merge stage in the current CRDT doesn't carry feature_branch or
// commits_ahead — those are computed at transition time. For
// projection from existing CRDT data, we synthesize defaults.
// The feature branch follows the naming convention.
let branch = format!("feature/story-{}", view.story_id);
// Existing CRDT data doesn't track commits_ahead, so we use 1 as
// a safe non-zero default (the item is in merge, so there must be
// at least one commit).
Ok(Stage::Merge {
feature_branch: BranchName(branch),
commits_ahead: NonZeroU32::new(1).expect("1 is non-zero"),
})
}
"5_done" => {
// Use the stored merged_at timestamp if present. Legacy items
// that pre-date this field have merged_at = None, so we fall back
// to UNIX_EPOCH, which makes them older than any retention window
// and therefore eligible for immediate sweep to 6_archived.
let merged_at = view
.merged_at
.map(|ts| {
DateTime::from_timestamp(ts as i64, 0).unwrap_or(DateTime::<Utc>::UNIX_EPOCH)
})
.unwrap_or(DateTime::<Utc>::UNIX_EPOCH);
Ok(Stage::Done {
merged_at,
merge_commit: GitSha("legacy".to_string()),
})
}
"6_archived" => {
// Determine the archive reason from the CRDT fields.
let reason = if view.blocked == Some(true) {
ArchiveReason::Blocked {
reason: "migrated from legacy blocked field".to_string(),
}
} else {
// Default to Completed for legacy archived items.
ArchiveReason::Completed
};
Ok(Stage::Archived {
archived_at: Utc::now(),
reason,
})
}
2026-04-29 22:12:23 +00:00
"7_frozen" => {
// The stage to resume to is stored in front matter as `resume_to_stage`.
// Fall back to Coding if the field is absent (e.g. legacy frozen items).
let resume_to = crate::db::read_content(&view.story_id)
.and_then(|content| {
crate::io::story_metadata::parse_front_matter(&content)
.ok()
.and_then(|m| m.resume_to_stage)
.and_then(|dir| Stage::from_dir(&dir))
})
.unwrap_or(Stage::Coding);
Ok(Stage::Frozen {
resume_to: Box::new(resume_to),
})
}
other => Err(ProjectionError::UnknownStage(other.to_string())),
}
}
// ── Reverse projection: PipelineItem → stage dir string ─────────────────────
impl PipelineItem {
/// Convert back to the loose fields that the CRDT write path expects.
/// Returns `(stage_dir, blocked)`.
pub fn to_crdt_fields(&self) -> (&'static str, bool) {
let dir = stage_dir_name(&self.stage);
let blocked = matches!(
self.stage,
2026-04-29 22:42:59 +00:00
Stage::Blocked { .. }
| Stage::Archived {
reason: ArchiveReason::Blocked { .. },
..
}
);
2026-04-29 22:12:23 +00:00
// Frozen stories map to "7_frozen"; they are not "blocked" in the CRDT sense.
(dir, blocked)
}
}
// ── Bridge to existing CRDT reads ───────────────────────────────────────────
/// Read all pipeline items from the CRDT and project them into typed enums.
///
/// Items that fail projection (e.g. unknown stage strings from a future
/// version) are logged and skipped — they don't poison the entire read.
pub fn read_all_typed() -> Vec<PipelineItem> {
let Some(views) = crate::crdt_state::read_all_items() else {
return Vec::new();
};
views
.iter()
.filter_map(|v| match PipelineItem::try_from(v) {
Ok(item) => Some(item),
Err(e) => {
crate::slog!(
"[pipeline_state] projection error for '{}': {e}",
v.story_id
);
None
}
})
.collect()
}
/// Read a single pipeline item by story_id and project it into the typed enum.
pub fn read_typed(story_id: &str) -> Result<Option<PipelineItem>, ProjectionError> {
let Some(view) = crate::crdt_state::read_item(story_id) else {
return Ok(None);
};
PipelineItem::try_from(&view).map(Some)
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::TimeZone;
use std::num::NonZeroU32;
fn nz(n: u32) -> NonZeroU32 {
NonZeroU32::new(n).unwrap()
}
fn fb(name: &str) -> BranchName {
BranchName(name.to_string())
}
fn sha(s: &str) -> GitSha {
GitSha(s.to_string())
}
fn sid(s: &str) -> StoryId {
StoryId(s.to_string())
}
2026-04-29 17:38:38 +00:00
#[test]
fn project_upcoming_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "0_upcoming".to_string(),
name: Some("Test Story".to_string()),
agent: None,
retry_count: None,
blocked: None,
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
qa_mode: None,
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert!(matches!(item.stage, Stage::Upcoming));
}
#[test]
fn project_backlog_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "1_backlog".to_string(),
name: Some("Test Story".to_string()),
agent: None,
retry_count: None,
blocked: None,
depends_on: Some(vec![10, 20]),
claimed_by: None,
claimed_at: None,
merged_at: None,
2026-04-29 14:51:00 +00:00
qa_mode: None,
2026-04-29 16:05:54 +00:00
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert_eq!(item.story_id, StoryId("42_story_test".to_string()));
assert_eq!(item.name, "Test Story");
assert!(matches!(item.stage, Stage::Backlog));
assert_eq!(item.depends_on.len(), 2);
assert_eq!(item.retry_count, 0);
}
#[test]
fn project_current_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "2_current".to_string(),
name: Some("Test".to_string()),
agent: Some("coder-1".to_string()),
retry_count: Some(2),
blocked: None,
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
2026-04-29 14:51:00 +00:00
qa_mode: None,
2026-04-29 16:05:54 +00:00
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert!(matches!(item.stage, Stage::Coding));
assert_eq!(item.retry_count, 2);
}
#[test]
fn project_merge_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "4_merge".to_string(),
name: Some("Test".to_string()),
agent: None,
retry_count: None,
blocked: None,
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
2026-04-29 14:51:00 +00:00
qa_mode: None,
2026-04-29 16:05:54 +00:00
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert!(matches!(item.stage, Stage::Merge { .. }));
if let Stage::Merge {
feature_branch,
commits_ahead,
} = &item.stage
{
assert_eq!(feature_branch.0, "feature/story-42_story_test");
assert_eq!(commits_ahead.get(), 1);
}
}
2026-04-29 22:42:59 +00:00
#[test]
fn project_blocked_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "2_blocked".to_string(),
name: Some("Test".to_string()),
agent: None,
retry_count: None,
blocked: None,
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
qa_mode: None,
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert!(matches!(item.stage, Stage::Blocked { .. }));
}
#[test]
fn project_archived_blocked_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "6_archived".to_string(),
name: Some("Test".to_string()),
agent: None,
retry_count: None,
blocked: Some(true),
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
2026-04-29 14:51:00 +00:00
qa_mode: None,
2026-04-29 16:05:54 +00:00
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert!(matches!(
item.stage,
Stage::Archived {
reason: ArchiveReason::Blocked { .. },
..
}
));
}
#[test]
fn project_archived_completed_item() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "6_archived".to_string(),
name: Some("Test".to_string()),
agent: None,
retry_count: None,
blocked: Some(false),
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
2026-04-29 14:51:00 +00:00
qa_mode: None,
2026-04-29 16:05:54 +00:00
mergemaster_attempted: None,
};
let item = PipelineItem::try_from(&view).unwrap();
assert!(matches!(
item.stage,
Stage::Archived {
reason: ArchiveReason::Completed,
..
}
));
}
#[test]
fn project_unknown_stage_returns_error() {
let view = PipelineItemView {
story_id: "42_story_test".to_string(),
stage: "9_invalid".to_string(),
name: Some("Test".to_string()),
agent: None,
retry_count: None,
blocked: None,
depends_on: None,
claimed_by: None,
claimed_at: None,
merged_at: None,
2026-04-29 14:51:00 +00:00
qa_mode: None,
2026-04-29 16:05:54 +00:00
mergemaster_attempted: None,
};
let result = PipelineItem::try_from(&view);
assert!(matches!(
result,
Err(ProjectionError::UnknownStage(s)) if s == "9_invalid"
));
}
// ── Reverse projection tests ────────────────────────────────────────
#[test]
fn reverse_projection_stage_dirs() {
let cases: Vec<(Stage, &str, bool)> = vec![
2026-04-29 17:38:38 +00:00
(Stage::Upcoming, "0_upcoming", false),
(Stage::Backlog, "1_backlog", false),
(Stage::Coding, "2_current", false),
2026-04-29 22:42:59 +00:00
(
Stage::Blocked {
reason: "stuck".into(),
},
"2_blocked",
true,
),
(Stage::Qa, "3_qa", false),
(
Stage::Merge {
feature_branch: fb("f"),
commits_ahead: nz(1),
},
"4_merge",
false,
),
(
Stage::Done {
merged_at: Utc::now(),
merge_commit: sha("abc"),
},
"5_done",
false,
),
(
Stage::Archived {
archived_at: Utc::now(),
reason: ArchiveReason::Completed,
},
"6_archived",
false,
),
(
Stage::Archived {
archived_at: Utc::now(),
reason: ArchiveReason::Blocked {
reason: "stuck".into(),
},
},
"6_archived",
true,
),
];
for (stage, expected_dir, expected_blocked) in cases {
let item = PipelineItem {
story_id: StoryId("test".into()),
name: "test".into(),
stage,
depends_on: vec![],
retry_count: 0,
};
let (dir, blocked) = item.to_crdt_fields();
assert_eq!(dir, expected_dir);
assert_eq!(blocked, expected_blocked);
}
}
// ── Event bus tests ─────────────────────────────────────────────────
#[test]
fn projection_error_display() {
let err = ProjectionError::UnknownStage("9_invalid".into());
assert_eq!(err.to_string(), "unknown stage: \"9_invalid\"");
let err = ProjectionError::MissingField("story_id");
assert_eq!(err.to_string(), "missing required field: story_id");
}
}