huskies: merge 880

This commit is contained in:
dave
2026-04-29 21:41:44 +00:00
parent 4d24b5b661
commit 7e2f122d36
16 changed files with 508 additions and 40 deletions
+17
View File
@@ -170,6 +170,23 @@ pub(super) async fn run_agent_spawn(
prompt.push_str(&local);
}
// Prepend epic context when the story belongs to an epic (AC3, story 880).
// Read the story's front matter to find the epic ID, then load the epic's
// content and prepend it to the system prompt so the agent treats it as
// authoritative context.
if let Some(story_content) = crate::db::read_content(&sid)
&& let Ok(meta) = crate::io::story_metadata::parse_front_matter(&story_content)
&& let Some(ref epic_id) = meta.epic
&& let Some(epic_content) = crate::db::read_content(epic_id)
{
let block = format!(
"# Epic Context\n\nThis work item belongs to epic `{epic_id}`.\
The following is the authoritative epic context you must respect:\n\n\
---\n{epic_content}\n---"
);
prompt = format!("{block}\n\n{prompt}");
}
// Append a reference to the source map if the file was written.
let source_map_path = project_root_clone.join(".huskies").join("source-map.json");
if source_map_path.exists() {
+4
View File
@@ -62,6 +62,10 @@ pub async fn dispatch_tool_call(
// Refactor lifecycle tools
"create_refactor" => story_tools::tool_create_refactor(&args, ctx),
"list_refactors" => story_tools::tool_list_refactors(ctx),
// Epic lifecycle tools
"create_epic" => story_tools::tool_create_epic(&args, ctx),
"list_epics" => story_tools::tool_list_epics(ctx),
"show_epic" => story_tools::tool_show_epic(&args, ctx),
// Mergemaster tools
"merge_agent_work" => merge_tools::tool_merge_agent_work(&args, ctx).await,
"get_merge_status" => merge_tools::tool_get_merge_status(&args, ctx),
+271
View File
@@ -0,0 +1,271 @@
//! Epic work-item MCP tools — create, list, and show epics.
//!
//! Epics are shared-context containers that group related stories, bugs, spikes,
//! and refactors. They are not pipeline-driven but provide authoritative context
//! injected into agent prompts for all member work items.
use crate::http::context::AppContext;
use crate::http::workflow::create_epic_file;
use crate::io::story_metadata::parse_front_matter;
use serde_json::{Value, json};
/// Create a new epic and store it in the CRDT items list.
pub(crate) fn tool_create_epic(args: &Value, ctx: &AppContext) -> Result<String, String> {
let name = args
.get("name")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: name")?;
let goal = args
.get("goal")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: goal")?;
let motivation = args.get("motivation").and_then(|v| v.as_str());
let key_files = args.get("key_files").and_then(|v| v.as_str());
let success_criteria: Option<Vec<String>> = args
.get("success_criteria")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(str::to_string))
.collect()
});
let root = ctx.state.get_project_root()?;
let epic_id = create_epic_file(
&root,
name,
goal,
motivation,
key_files,
success_criteria.as_deref(),
)?;
Ok(format!("Created epic: {epic_id}"))
}
/// List all epics with member work item counts and `n/m done` rollup.
pub(crate) fn tool_list_epics(_ctx: &AppContext) -> Result<String, String> {
use crate::pipeline_state::Stage;
let all_items = crate::pipeline_state::read_all_typed();
// Collect epics: items with type == "epic".
let mut epics: Vec<(String, String)> = Vec::new(); // (id, name)
// Collect member items: map from epic_id → list of (story_id, is_done).
let mut members: std::collections::HashMap<String, Vec<(String, bool)>> =
std::collections::HashMap::new();
for item in &all_items {
let sid = &item.story_id.0;
let content = match crate::db::read_content(sid) {
Some(c) => c,
None => continue,
};
let meta = match parse_front_matter(&content) {
Ok(m) => m,
Err(_) => continue,
};
if meta.item_type.as_deref() == Some("epic") {
epics.push((sid.clone(), item.name.clone()));
}
if let Some(epic_id) = meta.epic {
let is_done = matches!(item.stage, Stage::Done { .. });
members
.entry(epic_id)
.or_default()
.push((sid.clone(), is_done));
}
}
epics.sort_by(|a, b| a.0.cmp(&b.0));
let result: Vec<Value> = epics
.iter()
.map(|(id, name)| {
let member_list = members.get(id).cloned().unwrap_or_default();
let total = member_list.len();
let done = member_list.iter().filter(|(_, d)| *d).count();
json!({
"epic_id": id,
"name": name,
"members_total": total,
"members_done": done,
"rollup": format!("{done}/{total} done"),
})
})
.collect();
serde_json::to_string_pretty(&result).map_err(|e| format!("Serialization error: {e}"))
}
/// Show details for a single epic: content and member work items with their stages.
pub(crate) fn tool_show_epic(args: &Value, _ctx: &AppContext) -> Result<String, String> {
use crate::pipeline_state::Stage;
let epic_id = args
.get("epic_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: epic_id")?;
let content = crate::db::read_content(epic_id)
.ok_or_else(|| format!("Epic '{epic_id}' not found in content store"))?;
let meta = parse_front_matter(&content)
.map_err(|e| format!("Failed to parse epic front matter: {e}"))?;
if meta.item_type.as_deref() != Some("epic") {
return Err(format!(
"'{epic_id}' is not an epic (type: {:?})",
meta.item_type
));
}
// Find member items.
let all_items = crate::pipeline_state::read_all_typed();
let mut member_items: Vec<Value> = Vec::new();
for item in &all_items {
let sid = &item.story_id.0;
let member_content = match crate::db::read_content(sid) {
Some(c) => c,
None => continue,
};
let member_meta = match parse_front_matter(&member_content) {
Ok(m) => m,
Err(_) => continue,
};
if member_meta.epic.as_deref() == Some(epic_id) {
let stage_name = match &item.stage {
Stage::Upcoming | Stage::Backlog => "backlog",
Stage::Coding => "current",
Stage::Qa => "qa",
Stage::Merge { .. } => "merge",
Stage::Done { .. } => "done",
Stage::Archived { .. } => "archived",
};
member_items.push(json!({
"story_id": sid,
"name": item.name,
"stage": stage_name,
}));
}
}
let total = member_items.len();
let done = member_items.iter().filter(|i| i["stage"] == "done").count();
serde_json::to_string_pretty(&json!({
"epic_id": epic_id,
"name": meta.name,
"content": content,
"members": member_items,
"rollup": format!("{done}/{total} done"),
}))
.map_err(|e| format!("Serialization error: {e}"))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::http::test_helpers::test_ctx;
use serde_json::json;
#[test]
fn tool_create_epic_creates_epic_and_returns_id() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
let ctx = test_ctx(tmp.path());
let result = tool_create_epic(
&json!({
"name": "My Test Epic",
"goal": "Achieve something great",
"motivation": "Because it matters",
"success_criteria": ["All stories done", "Tests pass"]
}),
&ctx,
);
assert!(result.is_ok(), "expected ok: {result:?}");
let msg = result.unwrap();
assert!(msg.contains("Created epic:"), "unexpected msg: {msg}");
}
#[test]
fn tool_create_epic_missing_name_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_epic(&json!({"goal": "Achieve something"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("name"));
}
#[test]
fn tool_create_epic_missing_goal_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_create_epic(&json!({"name": "My Epic"}), &ctx);
assert!(result.is_err());
assert!(result.unwrap_err().contains("goal"));
}
#[test]
fn tool_list_epics_includes_created_epic() {
let tmp = tempfile::tempdir().unwrap();
crate::db::ensure_content_store();
let ctx = test_ctx(tmp.path());
// Create an epic.
tool_create_epic(
&json!({"name": "List Epics Test Epic", "goal": "Testing list"}),
&ctx,
)
.unwrap();
let result = tool_list_epics(&ctx);
assert!(result.is_ok(), "expected ok: {result:?}");
let parsed: Vec<Value> = serde_json::from_str(&result.unwrap()).unwrap();
assert!(
parsed.iter().any(|e| e["name"] == "List Epics Test Epic"),
"expected epic in list: {parsed:?}"
);
}
#[test]
fn tool_list_epics_shows_member_rollup() {
crate::db::ensure_content_store();
// Write a fake epic.
crate::db::write_item_with_content(
"9990_epic_rollup",
"1_backlog",
"---\ntype: epic\nname: \"Rollup Epic\"\n---\n\n## Goal\n\nTest\n",
);
// Write two member items: one done, one current.
crate::db::write_item_with_content(
"9991_story_member_done",
"5_done",
"---\ntype: story\nname: \"Done Member\"\nepic: \"9990_epic_rollup\"\n---\n",
);
crate::db::write_item_with_content(
"9992_story_member_current",
"2_current",
"---\ntype: story\nname: \"Current Member\"\nepic: \"9990_epic_rollup\"\n---\n",
);
let tmp = tempfile::tempdir().unwrap();
let ctx = crate::http::test_helpers::test_ctx(tmp.path());
let result = tool_list_epics(&ctx).unwrap();
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
let epic = parsed
.iter()
.find(|e| e["epic_id"] == "9990_epic_rollup")
.expect("expected rollup epic in list");
assert_eq!(epic["members_total"], 2, "two members expected");
assert_eq!(epic["members_done"], 1, "one done member expected");
assert_eq!(epic["rollup"], "1/2 done");
}
}
+3 -1
View File
@@ -1,5 +1,5 @@
//! MCP story tools — create, update, move, and manage stories, bugs, refactors,
//! and spikes via MCP.
//! spikes, and epics via MCP.
//!
//! This module is a thin adapter: it deserialises MCP payloads, delegates to
//! `crate::service::story` and `crate::http::workflow` for business logic,
@@ -7,6 +7,7 @@
mod bug;
mod criteria;
mod epic;
mod refactor;
mod spike;
mod story;
@@ -16,6 +17,7 @@ pub(crate) use criteria::{
tool_add_criterion, tool_check_criterion, tool_edit_criterion, tool_ensure_acceptance,
tool_get_story_todos, tool_record_tests, tool_remove_criterion,
};
pub(crate) use epic::{tool_create_epic, tool_list_epics, tool_show_epic};
pub(crate) use refactor::{tool_create_refactor, tool_list_refactors};
pub(crate) use spike::tool_create_spike;
pub(crate) use story::{
+10 -1
View File
@@ -64,6 +64,9 @@ pub(crate) fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, Strin
if let Some(ref mf) = s.merge_failure {
item["merge_failure"] = json!(mf);
}
if let Some(ref epic_id) = s.epic_id {
item["epic_id"] = json!(epic_id);
}
item
})
.collect()
@@ -78,7 +81,13 @@ pub(crate) fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, Strin
let backlog: Vec<Value> = state
.backlog
.iter()
.map(|s| json!({ "story_id": s.story_id, "name": s.name }))
.map(|s| {
let mut item = json!({ "story_id": s.story_id, "name": s.name });
if let Some(ref epic_id) = s.epic_id {
item["epic_id"] = json!(epic_id);
}
item
})
.collect();
serde_json::to_string_pretty(&json!({
@@ -25,6 +25,9 @@ pub(crate) fn tool_update_story(args: &Value, ctx: &AppContext) -> Result<String
if let Some(agent) = args.get("agent").and_then(|v| v.as_str()) {
front_matter.insert("agent".to_string(), Value::String(agent.to_string()));
}
if let Some(epic) = args.get("epic").and_then(|v| v.as_str()) {
front_matter.insert("epic".to_string(), Value::String(epic.to_string()));
}
if let Some(obj) = args.get("front_matter").and_then(|v| v.as_object()) {
for (k, v) in obj {
front_matter.insert(k.clone(), v.clone());
+4 -1
View File
@@ -101,7 +101,10 @@ mod tests {
assert!(names.contains(&"mesh_status"));
assert!(names.contains(&"run_check"));
assert!(names.contains(&"cleanup_worktrees"));
assert_eq!(tools.len(), 69);
assert!(names.contains(&"create_epic"));
assert!(names.contains(&"list_epics"));
assert!(names.contains(&"show_epic"));
assert_eq!(tools.len(), 72);
}
#[test]
@@ -242,6 +242,10 @@ pub(super) fn story_tools() -> Vec<Value> {
"type": "string",
"description": "Set or change the 'agent' YAML front matter field"
},
"epic": {
"type": "string",
"description": "Set or change the 'epic' field — the numeric epic ID this item belongs to (e.g. '880'). Agents spawned for this item will have the epic context prepended to their system prompt."
},
"front_matter": {
"type": "object",
"description": "Arbitrary YAML front matter key-value pairs to set or update. Values may be strings, booleans, integers, numbers, or arrays (e.g. [490, 491]).",
@@ -369,6 +373,59 @@ pub(super) fn story_tools() -> Vec<Value> {
"properties": {}
}
}),
json!({
"name": "create_epic",
"description": "Create an epic: a shared-context container for chains of related stories. Epics are not pipeline-driven but inject their goal and context into agent prompts for all member work items. Returns the epic_id.",
"inputSchema": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Human-readable epic name"
},
"goal": {
"type": "string",
"description": "What this epic aims to achieve"
},
"motivation": {
"type": "string",
"description": "Optional: why this epic matters"
},
"key_files": {
"type": "string",
"description": "Optional: key files or modules relevant to this epic"
},
"success_criteria": {
"type": "array",
"items": { "type": "string" },
"description": "Optional: list of high-level success criteria for the epic"
}
},
"required": ["name", "goal"]
}
}),
json!({
"name": "list_epics",
"description": "List all epics with their member work item counts and n/m done rollup.",
"inputSchema": {
"type": "object",
"properties": {}
}
}),
json!({
"name": "show_epic",
"description": "Show details for a single epic: its content and all member work items with their pipeline stages.",
"inputSchema": {
"type": "object",
"properties": {
"epic_id": {
"type": "string",
"description": "Epic identifier (e.g. '880')"
}
},
"required": ["epic_id"]
}
}),
json!({
"name": "close_bug",
"description": "Archive a bug from work/2_current/ or work/1_backlog/ to work/5_done/ and auto-commit to master.",
+76
View File
@@ -0,0 +1,76 @@
//! Epic work-item creation operations.
//!
//! Epics are shared-context containers that group related stories, bugs, spikes, and
//! refactors under a common goal. They are stored in the CRDT items list with
//! `type: epic` and are not pipeline-driven (no stage advancement).
use std::path::Path;
use super::super::{next_item_number, slugify_name, write_story_content};
/// Create an epic file and store it in the database.
///
/// Returns the epic_id (e.g. `"880"`).
pub fn create_epic_file(
root: &Path,
name: &str,
goal: &str,
motivation: Option<&str>,
key_files: Option<&str>,
success_criteria: Option<&[String]>,
) -> Result<String, String> {
let epic_number = next_item_number(root)?;
let slug = slugify_name(name);
if slug.is_empty() {
return Err("Name must contain at least one alphanumeric character.".to_string());
}
let epic_id = format!("{epic_number}");
let mut content = String::new();
content.push_str("---\n");
content.push_str("type: epic\n");
content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\"")));
content.push_str("---\n\n");
content.push_str(&format!("# Epic {epic_number}: {name}\n\n"));
content.push_str("## Goal\n\n");
content.push_str(goal);
content.push_str("\n\n");
content.push_str("## Motivation\n\n");
if let Some(m) = motivation {
content.push_str(m);
content.push('\n');
} else {
content.push_str("- TBD\n");
}
content.push('\n');
content.push_str("## Key Files\n\n");
if let Some(kf) = key_files {
content.push_str(kf);
content.push('\n');
} else {
content.push_str("- TBD\n");
}
content.push('\n');
content.push_str("## Success Criteria\n\n");
match success_criteria {
Some(criteria) if !criteria.is_empty() => {
for c in criteria {
content.push_str(&format!("- {c}\n"));
}
}
_ => {
content.push_str("- TBD\n");
}
}
// Epics are stored in backlog (no pipeline advancement).
write_story_content(root, &epic_id, "1_backlog", &content);
Ok(epic_id)
}
+3 -1
View File
@@ -1,6 +1,7 @@
//! Bug, spike, and refactor pipeline-item operations — creation and listing.
//! Bug, spike, refactor, and epic pipeline-item operations — creation and listing.
mod bug;
mod epic;
mod refactor;
mod spike;
@@ -8,5 +9,6 @@ mod spike;
mod tests;
pub use bug::{create_bug_file, list_bug_files};
pub use epic::create_epic_file;
pub use refactor::{create_refactor_file, list_refactor_files};
pub use spike::create_spike_file;
+2 -1
View File
@@ -6,7 +6,8 @@ mod test_results;
mod utils;
pub use bug_ops::{
create_bug_file, create_refactor_file, create_spike_file, list_bug_files, list_refactor_files,
create_bug_file, create_epic_file, create_refactor_file, create_spike_file, list_bug_files,
list_refactor_files,
};
pub use pipeline::{
PipelineState, UpcomingStory, load_pipeline_state, load_upcoming_stories, validate_story_dirs,
+17 -5
View File
@@ -40,6 +40,9 @@ pub struct UpcomingStory {
/// Story numbers this story depends on.
#[serde(skip_serializing_if = "Option::is_none")]
pub depends_on: Option<Vec<u32>>,
/// Epic this item belongs to (numeric ID as string, e.g. "880").
#[serde(skip_serializing_if = "Option::is_none")]
pub epic_id: Option<String>,
}
/// Validation outcome for a single story.
@@ -92,17 +95,18 @@ pub fn load_pipeline_state(ctx: &AppContext) -> Result<PipelineState, String> {
let sid = &item.story_id.0;
let agent = agent_map.get(sid).cloned();
// Enrich with content-derived metadata (merge_failure, review_hold, qa).
let (merge_failure, review_hold, qa) = crate::db::read_content(sid)
// Enrich with content-derived metadata (merge_failure, review_hold, qa, epic_id).
let (merge_failure, review_hold, qa, epic_id) = crate::db::read_content(sid)
.and_then(|c| parse_front_matter(&c).ok())
.map(|meta| {
(
meta.merge_failure,
meta.review_hold,
meta.qa.map(|m| m.as_str().to_string()),
meta.epic,
)
})
.unwrap_or((None, None, None));
.unwrap_or((None, None, None, None));
let story = UpcomingStory {
story_id: sid.clone(),
@@ -136,6 +140,7 @@ pub fn load_pipeline_state(ctx: &AppContext) -> Result<PipelineState, String> {
.collect(),
)
},
epic_id,
};
match &item.stage {
Stage::Upcoming => state.backlog.push(story), // upcoming shown with backlog
@@ -201,8 +206,13 @@ pub fn load_upcoming_stories(_ctx: &AppContext) -> Result<Vec<UpcomingStory>, St
let mut stories: Vec<UpcomingStory> = typed_items
.into_iter()
.filter(|item| matches!(item.stage, Stage::Backlog))
.map(|item| UpcomingStory {
story_id: item.story_id.0,
.map(|item| {
let sid = &item.story_id.0;
let epic_id = crate::db::read_content(sid)
.and_then(|c| parse_front_matter(&c).ok())
.and_then(|meta| meta.epic);
UpcomingStory {
story_id: item.story_id.0.clone(),
name: if item.name.is_empty() {
None
} else {
@@ -233,6 +243,8 @@ pub fn load_upcoming_stories(_ctx: &AppContext) -> Result<Vec<UpcomingStory>, St
.collect(),
)
},
epic_id,
}
})
.collect();
stories.sort_by(|a, b| a.story_id.cmp(&b.story_id));
+3
View File
@@ -32,6 +32,8 @@ pub(super) struct FrontMatter {
/// Set to `true` when the auto-assigner has already spawned a mergemaster
/// session for a content-conflict failure.
pub mergemaster_attempted: Option<bool>,
/// Epic this item belongs to (numeric ID as string, e.g. "880").
pub epic: Option<String>,
}
/// Parse the YAML front matter block from a story markdown string.
@@ -77,6 +79,7 @@ fn build_metadata(front: FrontMatter) -> StoryMetadata {
run_tests_passed: front.run_tests_passed,
item_type: front.item_type,
mergemaster_attempted: front.mergemaster_attempted,
epic: front.epic,
}
}
+3
View File
@@ -71,6 +71,9 @@ pub struct StoryMetadata {
/// Set to `true` when the auto-assigner has already spawned a mergemaster
/// session for a content-conflict failure. Prevents repeated spawns.
pub mergemaster_attempted: Option<bool>,
/// Epic this item belongs to. The value is the epic's numeric ID (e.g. "880").
/// Set on story/bug/spike/refactor items to declare membership in an epic.
pub epic: Option<String>,
}
/// Errors that can occur when parsing story front-matter metadata.
+4
View File
@@ -215,6 +215,7 @@ mod tests {
retry_count: None,
blocked: None,
depends_on: None,
epic_id: None,
}],
current: vec![UpcomingStory {
story_id: "2_story_b".to_string(),
@@ -227,6 +228,7 @@ mod tests {
retry_count: None,
blocked: None,
depends_on: None,
epic_id: None,
}],
qa: vec![],
merge: vec![],
@@ -241,6 +243,7 @@ mod tests {
retry_count: None,
blocked: None,
depends_on: None,
epic_id: None,
}],
deterministic_merges_in_flight: vec![],
};
@@ -296,6 +299,7 @@ mod tests {
retry_count: None,
blocked: None,
depends_on: None,
epic_id: None,
}],
qa: vec![],
merge: vec![],
@@ -210,6 +210,7 @@ mod tests {
retry_count: None,
blocked: None,
depends_on: None,
epic_id: None,
};
let resp = WsResponse::PipelineState {
backlog: vec![story],