huskies: merge 880

This commit is contained in:
dave
2026-04-29 21:41:44 +00:00
parent 4d24b5b661
commit 7e2f122d36
16 changed files with 508 additions and 40 deletions
+76
View File
@@ -0,0 +1,76 @@
//! Epic work-item creation operations.
//!
//! Epics are shared-context containers that group related stories, bugs, spikes, and
//! refactors under a common goal. They are stored in the CRDT items list with
//! `type: epic` and are not pipeline-driven (no stage advancement).
use std::path::Path;
use super::super::{next_item_number, slugify_name, write_story_content};
/// Create an epic file and store it in the database.
///
/// Returns the epic_id (e.g. `"880"`).
pub fn create_epic_file(
root: &Path,
name: &str,
goal: &str,
motivation: Option<&str>,
key_files: Option<&str>,
success_criteria: Option<&[String]>,
) -> Result<String, String> {
let epic_number = next_item_number(root)?;
let slug = slugify_name(name);
if slug.is_empty() {
return Err("Name must contain at least one alphanumeric character.".to_string());
}
let epic_id = format!("{epic_number}");
let mut content = String::new();
content.push_str("---\n");
content.push_str("type: epic\n");
content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\"")));
content.push_str("---\n\n");
content.push_str(&format!("# Epic {epic_number}: {name}\n\n"));
content.push_str("## Goal\n\n");
content.push_str(goal);
content.push_str("\n\n");
content.push_str("## Motivation\n\n");
if let Some(m) = motivation {
content.push_str(m);
content.push('\n');
} else {
content.push_str("- TBD\n");
}
content.push('\n');
content.push_str("## Key Files\n\n");
if let Some(kf) = key_files {
content.push_str(kf);
content.push('\n');
} else {
content.push_str("- TBD\n");
}
content.push('\n');
content.push_str("## Success Criteria\n\n");
match success_criteria {
Some(criteria) if !criteria.is_empty() => {
for c in criteria {
content.push_str(&format!("- {c}\n"));
}
}
_ => {
content.push_str("- TBD\n");
}
}
// Epics are stored in backlog (no pipeline advancement).
write_story_content(root, &epic_id, "1_backlog", &content);
Ok(epic_id)
}
+3 -1
View File
@@ -1,6 +1,7 @@
//! Bug, spike, and refactor pipeline-item operations — creation and listing.
//! Bug, spike, refactor, and epic pipeline-item operations — creation and listing.
mod bug;
mod epic;
mod refactor;
mod spike;
@@ -8,5 +9,6 @@ mod spike;
mod tests;
pub use bug::{create_bug_file, list_bug_files};
pub use epic::create_epic_file;
pub use refactor::{create_refactor_file, list_refactor_files};
pub use spike::create_spike_file;
+2 -1
View File
@@ -6,7 +6,8 @@ mod test_results;
mod utils;
pub use bug_ops::{
create_bug_file, create_refactor_file, create_spike_file, list_bug_files, list_refactor_files,
create_bug_file, create_epic_file, create_refactor_file, create_spike_file, list_bug_files,
list_refactor_files,
};
pub use pipeline::{
PipelineState, UpcomingStory, load_pipeline_state, load_upcoming_stories, validate_story_dirs,
+47 -35
View File
@@ -40,6 +40,9 @@ pub struct UpcomingStory {
/// Story numbers this story depends on.
#[serde(skip_serializing_if = "Option::is_none")]
pub depends_on: Option<Vec<u32>>,
/// Epic this item belongs to (numeric ID as string, e.g. "880").
#[serde(skip_serializing_if = "Option::is_none")]
pub epic_id: Option<String>,
}
/// Validation outcome for a single story.
@@ -92,17 +95,18 @@ pub fn load_pipeline_state(ctx: &AppContext) -> Result<PipelineState, String> {
let sid = &item.story_id.0;
let agent = agent_map.get(sid).cloned();
// Enrich with content-derived metadata (merge_failure, review_hold, qa).
let (merge_failure, review_hold, qa) = crate::db::read_content(sid)
// Enrich with content-derived metadata (merge_failure, review_hold, qa, epic_id).
let (merge_failure, review_hold, qa, epic_id) = crate::db::read_content(sid)
.and_then(|c| parse_front_matter(&c).ok())
.map(|meta| {
(
meta.merge_failure,
meta.review_hold,
meta.qa.map(|m| m.as_str().to_string()),
meta.epic,
)
})
.unwrap_or((None, None, None));
.unwrap_or((None, None, None, None));
let story = UpcomingStory {
story_id: sid.clone(),
@@ -136,6 +140,7 @@ pub fn load_pipeline_state(ctx: &AppContext) -> Result<PipelineState, String> {
.collect(),
)
},
epic_id,
};
match &item.stage {
Stage::Upcoming => state.backlog.push(story), // upcoming shown with backlog
@@ -201,38 +206,45 @@ pub fn load_upcoming_stories(_ctx: &AppContext) -> Result<Vec<UpcomingStory>, St
let mut stories: Vec<UpcomingStory> = typed_items
.into_iter()
.filter(|item| matches!(item.stage, Stage::Backlog))
.map(|item| UpcomingStory {
story_id: item.story_id.0,
name: if item.name.is_empty() {
None
} else {
Some(item.name)
},
error: None,
merge_failure: None,
agent: None,
review_hold: None,
qa: None,
retry_count: if item.retry_count > 0 {
Some(item.retry_count)
} else {
None
},
blocked: if item.stage.is_blocked() {
Some(true)
} else {
None
},
depends_on: if item.depends_on.is_empty() {
None
} else {
Some(
item.depends_on
.iter()
.filter_map(|d| d.0.split('_').next()?.parse::<u32>().ok())
.collect(),
)
},
.map(|item| {
let sid = &item.story_id.0;
let epic_id = crate::db::read_content(sid)
.and_then(|c| parse_front_matter(&c).ok())
.and_then(|meta| meta.epic);
UpcomingStory {
story_id: item.story_id.0.clone(),
name: if item.name.is_empty() {
None
} else {
Some(item.name)
},
error: None,
merge_failure: None,
agent: None,
review_hold: None,
qa: None,
retry_count: if item.retry_count > 0 {
Some(item.retry_count)
} else {
None
},
blocked: if item.stage.is_blocked() {
Some(true)
} else {
None
},
depends_on: if item.depends_on.is_empty() {
None
} else {
Some(
item.depends_on
.iter()
.filter_map(|d| d.0.split('_').next()?.parse::<u32>().ok())
.collect(),
)
},
epic_id,
}
})
.collect();
stories.sort_by(|a, b| a.story_id.cmp(&b.story_id));