2026-03-22 19:07:07 +00:00
|
|
|
mod bug_ops;
|
|
|
|
|
mod story_ops;
|
|
|
|
|
mod test_results;
|
|
|
|
|
|
|
|
|
|
pub use bug_ops::{
|
|
|
|
|
create_bug_file, create_refactor_file, create_spike_file, list_bug_files, list_refactor_files,
|
|
|
|
|
};
|
|
|
|
|
pub use story_ops::{
|
|
|
|
|
add_criterion_to_file, check_criterion_in_file, create_story_file, update_story_in_file,
|
|
|
|
|
};
|
|
|
|
|
pub use test_results::{
|
|
|
|
|
read_test_results_from_story_file, write_coverage_baseline_to_story_file,
|
|
|
|
|
write_test_results_to_story_file,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
use crate::agents::AgentStatus;
|
|
|
|
|
use crate::http::context::AppContext;
|
|
|
|
|
use crate::io::story_metadata::parse_front_matter;
|
|
|
|
|
use serde::Serialize;
|
|
|
|
|
use std::collections::HashMap;
|
2026-04-08 03:03:59 +00:00
|
|
|
use std::path::Path;
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
/// Agent assignment embedded in a pipeline stage item.
|
|
|
|
|
#[derive(Clone, Debug, Serialize)]
|
|
|
|
|
pub struct AgentAssignment {
|
|
|
|
|
pub agent_name: String,
|
|
|
|
|
pub model: Option<String>,
|
|
|
|
|
pub status: String,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, Serialize)]
|
|
|
|
|
pub struct UpcomingStory {
|
|
|
|
|
pub story_id: String,
|
|
|
|
|
pub name: Option<String>,
|
|
|
|
|
pub error: Option<String>,
|
|
|
|
|
/// Merge failure reason persisted to front matter by the mergemaster agent.
|
|
|
|
|
pub merge_failure: Option<String>,
|
|
|
|
|
/// Active agent working on this item, if any.
|
|
|
|
|
pub agent: Option<AgentAssignment>,
|
|
|
|
|
/// True when the item is held in QA for human review.
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub review_hold: Option<bool>,
|
|
|
|
|
/// QA mode for this item: "human", "server", or "agent".
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub qa: Option<String>,
|
|
|
|
|
/// Number of retries at the current pipeline stage.
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub retry_count: Option<u32>,
|
|
|
|
|
/// True when the story has exceeded its retry limit and will not be auto-assigned.
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub blocked: Option<bool>,
|
2026-04-07 11:46:25 +00:00
|
|
|
/// Story numbers this story depends on.
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub depends_on: Option<Vec<u32>>,
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct StoryValidationResult {
|
|
|
|
|
pub story_id: String,
|
|
|
|
|
pub valid: bool,
|
|
|
|
|
pub error: Option<String>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Full pipeline state across all stages.
|
|
|
|
|
#[derive(Clone, Debug, Serialize)]
|
|
|
|
|
pub struct PipelineState {
|
|
|
|
|
pub backlog: Vec<UpcomingStory>,
|
|
|
|
|
pub current: Vec<UpcomingStory>,
|
|
|
|
|
pub qa: Vec<UpcomingStory>,
|
|
|
|
|
pub merge: Vec<UpcomingStory>,
|
|
|
|
|
pub done: Vec<UpcomingStory>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Load the full pipeline state (all 5 active stages).
|
2026-04-07 16:12:19 +00:00
|
|
|
///
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Reads from the CRDT document and enriches with content from the
|
|
|
|
|
/// in-memory content store. Agent assignments are overlaid from the
|
|
|
|
|
/// in-memory agent pool. Falls back to filesystem for items not yet
|
|
|
|
|
/// migrated to the database.
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn load_pipeline_state(ctx: &AppContext) -> Result<PipelineState, String> {
|
|
|
|
|
let agent_map = build_active_agent_map(ctx);
|
2026-04-07 16:12:19 +00:00
|
|
|
|
2026-04-09 21:24:11 +00:00
|
|
|
// Try CRDT-first read via the typed projection layer.
|
|
|
|
|
let typed_items = crate::pipeline_state::read_all_typed();
|
|
|
|
|
if !typed_items.is_empty() {
|
|
|
|
|
use crate::pipeline_state::Stage;
|
|
|
|
|
|
2026-04-07 16:12:19 +00:00
|
|
|
let mut state = PipelineState {
|
|
|
|
|
backlog: Vec::new(),
|
|
|
|
|
current: Vec::new(),
|
|
|
|
|
qa: Vec::new(),
|
|
|
|
|
merge: Vec::new(),
|
|
|
|
|
done: Vec::new(),
|
|
|
|
|
};
|
|
|
|
|
|
2026-04-09 21:24:11 +00:00
|
|
|
for item in typed_items {
|
|
|
|
|
let sid = &item.story_id.0;
|
|
|
|
|
let agent = agent_map.get(sid).cloned();
|
2026-04-08 03:03:59 +00:00
|
|
|
|
|
|
|
|
// Enrich with content-derived metadata (merge_failure, review_hold, qa).
|
2026-04-09 21:24:11 +00:00
|
|
|
let (merge_failure, review_hold, qa) = crate::db::read_content(sid)
|
2026-04-08 03:03:59 +00:00
|
|
|
.and_then(|c| parse_front_matter(&c).ok())
|
|
|
|
|
.map(|meta| {
|
|
|
|
|
(
|
|
|
|
|
meta.merge_failure,
|
|
|
|
|
meta.review_hold,
|
|
|
|
|
meta.qa.map(|m| m.as_str().to_string()),
|
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
.unwrap_or((None, None, None));
|
|
|
|
|
|
2026-04-07 16:12:19 +00:00
|
|
|
let story = UpcomingStory {
|
2026-04-09 21:24:11 +00:00
|
|
|
story_id: sid.clone(),
|
|
|
|
|
name: if item.name.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(item.name.clone())
|
|
|
|
|
},
|
2026-04-07 16:12:19 +00:00
|
|
|
error: None,
|
2026-04-08 03:03:59 +00:00
|
|
|
merge_failure,
|
2026-04-07 16:12:19 +00:00
|
|
|
agent,
|
2026-04-08 03:03:59 +00:00
|
|
|
review_hold,
|
|
|
|
|
qa,
|
2026-04-09 21:24:11 +00:00
|
|
|
retry_count: if item.retry_count > 0 {
|
|
|
|
|
Some(item.retry_count)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
},
|
|
|
|
|
blocked: if item.stage.is_blocked() {
|
|
|
|
|
Some(true)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
},
|
|
|
|
|
depends_on: if item.depends_on.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(
|
|
|
|
|
item.depends_on
|
|
|
|
|
.iter()
|
|
|
|
|
.filter_map(|d| d.0.split('_').next()?.parse::<u32>().ok())
|
|
|
|
|
.collect(),
|
|
|
|
|
)
|
|
|
|
|
},
|
2026-04-07 16:12:19 +00:00
|
|
|
};
|
2026-04-09 21:24:11 +00:00
|
|
|
match &item.stage {
|
|
|
|
|
Stage::Backlog => state.backlog.push(story),
|
|
|
|
|
Stage::Coding => state.current.push(story),
|
|
|
|
|
Stage::Qa => state.qa.push(story),
|
|
|
|
|
Stage::Merge { .. } => state.merge.push(story),
|
|
|
|
|
Stage::Done { .. } => state.done.push(story),
|
|
|
|
|
Stage::Archived { .. } => {} // skip archived
|
2026-04-07 16:12:19 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Sort each stage for deterministic output.
|
|
|
|
|
state.backlog.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
state.current.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
state.qa.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
state.merge.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
state.done.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
// Merge in any filesystem-only items not yet in the CRDT (migration fallback).
|
2026-04-07 16:12:19 +00:00
|
|
|
merge_filesystem_items(ctx, &mut state, &agent_map)?;
|
|
|
|
|
|
|
|
|
|
return Ok(state);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fallback: filesystem-only read (CRDT not initialised).
|
2026-03-22 19:07:07 +00:00
|
|
|
Ok(PipelineState {
|
2026-04-08 03:03:59 +00:00
|
|
|
backlog: load_stage_items_from_fs(ctx, "1_backlog", &HashMap::new())?,
|
|
|
|
|
current: load_stage_items_from_fs(ctx, "2_current", &agent_map)?,
|
|
|
|
|
qa: load_stage_items_from_fs(ctx, "3_qa", &agent_map)?,
|
|
|
|
|
merge: load_stage_items_from_fs(ctx, "4_merge", &agent_map)?,
|
|
|
|
|
done: load_stage_items_from_fs(ctx, "5_done", &HashMap::new())?,
|
2026-03-22 19:07:07 +00:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-07 16:12:19 +00:00
|
|
|
/// Merge filesystem items that are not already present in the CRDT state.
|
|
|
|
|
fn merge_filesystem_items(
|
|
|
|
|
ctx: &AppContext,
|
|
|
|
|
state: &mut PipelineState,
|
|
|
|
|
agent_map: &HashMap<String, AgentAssignment>,
|
|
|
|
|
) -> Result<(), String> {
|
|
|
|
|
let stages = [
|
|
|
|
|
("1_backlog", &mut state.backlog),
|
|
|
|
|
("2_current", &mut state.current),
|
|
|
|
|
("3_qa", &mut state.qa),
|
|
|
|
|
("4_merge", &mut state.merge),
|
|
|
|
|
("5_done", &mut state.done),
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
for (stage_dir, stage_vec) in stages {
|
|
|
|
|
let empty_map = HashMap::new();
|
|
|
|
|
let map = if stage_dir == "2_current" || stage_dir == "3_qa" || stage_dir == "4_merge" {
|
|
|
|
|
agent_map
|
|
|
|
|
} else {
|
|
|
|
|
&empty_map
|
|
|
|
|
};
|
2026-04-08 03:03:59 +00:00
|
|
|
let fs_items = load_stage_items_from_fs(ctx, stage_dir, map)?;
|
2026-04-07 16:12:19 +00:00
|
|
|
for fs_item in fs_items {
|
|
|
|
|
if !stage_vec.iter().any(|s| s.story_id == fs_item.story_id) {
|
|
|
|
|
stage_vec.push(fs_item);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
stage_vec.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-22 19:07:07 +00:00
|
|
|
/// Build a map from story_id → AgentAssignment for all pending/running agents.
|
|
|
|
|
fn build_active_agent_map(ctx: &AppContext) -> HashMap<String, AgentAssignment> {
|
|
|
|
|
let agents = match ctx.agents.list_agents() {
|
|
|
|
|
Ok(a) => a,
|
|
|
|
|
Err(_) => return HashMap::new(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let config_opt = ctx
|
|
|
|
|
.state
|
|
|
|
|
.get_project_root()
|
|
|
|
|
.ok()
|
|
|
|
|
.and_then(|root| crate::config::ProjectConfig::load(&root).ok());
|
|
|
|
|
|
|
|
|
|
let mut map = HashMap::new();
|
|
|
|
|
for agent in agents {
|
|
|
|
|
if !matches!(agent.status, AgentStatus::Pending | AgentStatus::Running) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
let model = config_opt
|
|
|
|
|
.as_ref()
|
|
|
|
|
.and_then(|cfg| cfg.find_agent(&agent.agent_name))
|
|
|
|
|
.and_then(|ac| ac.model.clone());
|
|
|
|
|
map.insert(
|
|
|
|
|
agent.story_id.clone(),
|
|
|
|
|
AgentAssignment {
|
|
|
|
|
agent_name: agent.agent_name,
|
|
|
|
|
model,
|
|
|
|
|
status: agent.status.to_string(),
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
map
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Load work items from filesystem (fallback for backwards compatibility).
|
|
|
|
|
fn load_stage_items_from_fs(
|
2026-03-22 19:07:07 +00:00
|
|
|
ctx: &AppContext,
|
|
|
|
|
stage_dir: &str,
|
|
|
|
|
agent_map: &HashMap<String, AgentAssignment>,
|
|
|
|
|
) -> Result<Vec<UpcomingStory>, String> {
|
|
|
|
|
let root = ctx.state.get_project_root()?;
|
2026-04-07 16:15:38 +00:00
|
|
|
|
2026-04-03 16:12:52 +01:00
|
|
|
let dir = root.join(".huskies").join("work").join(stage_dir);
|
2026-04-07 16:15:38 +00:00
|
|
|
let mut stories = Vec::new();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-07 16:15:38 +00:00
|
|
|
if dir.exists() {
|
2026-04-08 03:03:59 +00:00
|
|
|
for entry in std::fs::read_dir(&dir)
|
2026-04-07 16:15:38 +00:00
|
|
|
.map_err(|e| format!("Failed to read {stage_dir} directory: {e}"))?
|
|
|
|
|
{
|
|
|
|
|
let entry = entry.map_err(|e| format!("Failed to read {stage_dir} entry: {e}"))?;
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
let story_id = path
|
|
|
|
|
.file_stem()
|
|
|
|
|
.and_then(|stem| stem.to_str())
|
|
|
|
|
.ok_or_else(|| "Invalid story file name.".to_string())?
|
|
|
|
|
.to_string();
|
2026-04-08 03:03:59 +00:00
|
|
|
let contents = std::fs::read_to_string(&path)
|
2026-04-07 16:15:38 +00:00
|
|
|
.map_err(|e| format!("Failed to read story file {}: {e}", path.display()))?;
|
|
|
|
|
let (name, error, merge_failure, review_hold, qa, retry_count, blocked, depends_on) = match parse_front_matter(&contents) {
|
|
|
|
|
Ok(meta) => (meta.name, None, meta.merge_failure, meta.review_hold, meta.qa.map(|m| m.as_str().to_string()), meta.retry_count, meta.blocked, meta.depends_on),
|
|
|
|
|
Err(e) => (None, Some(e.to_string()), None, None, None, None, None, None),
|
|
|
|
|
};
|
|
|
|
|
let agent = agent_map.get(&story_id).cloned();
|
|
|
|
|
stories.push(UpcomingStory { story_id, name, error, merge_failure, agent, review_hold, qa, retry_count, blocked, depends_on });
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
stories.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
Ok(stories)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn load_upcoming_stories(ctx: &AppContext) -> Result<Vec<UpcomingStory>, String> {
|
2026-04-09 21:24:11 +00:00
|
|
|
// Try typed projection first.
|
|
|
|
|
let typed_items = crate::pipeline_state::read_all_typed();
|
|
|
|
|
if !typed_items.is_empty() {
|
|
|
|
|
use crate::pipeline_state::Stage;
|
|
|
|
|
|
|
|
|
|
let mut stories: Vec<UpcomingStory> = typed_items
|
2026-04-08 03:03:59 +00:00
|
|
|
.into_iter()
|
2026-04-09 21:24:11 +00:00
|
|
|
.filter(|item| matches!(item.stage, Stage::Backlog))
|
2026-04-08 03:03:59 +00:00
|
|
|
.map(|item| UpcomingStory {
|
2026-04-09 21:24:11 +00:00
|
|
|
story_id: item.story_id.0,
|
|
|
|
|
name: if item.name.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(item.name)
|
|
|
|
|
},
|
2026-04-08 03:03:59 +00:00
|
|
|
error: None,
|
|
|
|
|
merge_failure: None,
|
|
|
|
|
agent: None,
|
|
|
|
|
review_hold: None,
|
|
|
|
|
qa: None,
|
2026-04-09 21:24:11 +00:00
|
|
|
retry_count: if item.retry_count > 0 {
|
|
|
|
|
Some(item.retry_count)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
},
|
|
|
|
|
blocked: if item.stage.is_blocked() {
|
|
|
|
|
Some(true)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
},
|
|
|
|
|
depends_on: if item.depends_on.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(
|
|
|
|
|
item.depends_on
|
|
|
|
|
.iter()
|
|
|
|
|
.filter_map(|d| d.0.split('_').next()?.parse::<u32>().ok())
|
|
|
|
|
.collect(),
|
|
|
|
|
)
|
|
|
|
|
},
|
2026-04-08 03:03:59 +00:00
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
stories.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
|
|
|
|
|
// Merge filesystem fallback.
|
|
|
|
|
let fs_stories = load_stage_items_from_fs(ctx, "1_backlog", &HashMap::new())?;
|
|
|
|
|
for fs_item in fs_stories {
|
|
|
|
|
if !stories.iter().any(|s| s.story_id == fs_item.story_id) {
|
|
|
|
|
stories.push(fs_item);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
stories.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
return Ok(stories);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
load_stage_items_from_fs(ctx, "1_backlog", &HashMap::new())
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn validate_story_dirs(
|
|
|
|
|
root: &std::path::Path,
|
|
|
|
|
) -> Result<Vec<StoryValidationResult>, String> {
|
|
|
|
|
let mut results = Vec::new();
|
|
|
|
|
|
2026-04-10 10:52:42 +01:00
|
|
|
// Validate from filesystem shadows under the given root.
|
|
|
|
|
// NOTE: We intentionally read the filesystem here (not the global CRDT
|
|
|
|
|
// singleton) so that tests can pass an isolated tempdir and get
|
|
|
|
|
// deterministic results. See bug 525.
|
2026-04-08 03:03:59 +00:00
|
|
|
let dirs_to_validate = vec![
|
2026-04-03 16:12:52 +01:00
|
|
|
root.join(".huskies").join("work").join("2_current"),
|
|
|
|
|
root.join(".huskies").join("work").join("1_backlog"),
|
2026-03-22 19:07:07 +00:00
|
|
|
];
|
|
|
|
|
|
|
|
|
|
for dir in &dirs_to_validate {
|
|
|
|
|
let subdir = dir.file_name().map(|n| n.to_string_lossy().into_owned()).unwrap_or_default();
|
|
|
|
|
if !dir.exists() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
for entry in
|
2026-04-08 03:03:59 +00:00
|
|
|
std::fs::read_dir(dir).map_err(|e| format!("Failed to read {subdir} directory: {e}"))?
|
2026-03-22 19:07:07 +00:00
|
|
|
{
|
|
|
|
|
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
let story_id = path
|
|
|
|
|
.file_stem()
|
|
|
|
|
.and_then(|stem| stem.to_str())
|
|
|
|
|
.unwrap_or_default()
|
|
|
|
|
.to_string();
|
2026-04-08 03:03:59 +00:00
|
|
|
|
|
|
|
|
let contents = std::fs::read_to_string(&path)
|
2026-03-22 19:07:07 +00:00
|
|
|
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
|
|
|
|
|
match parse_front_matter(&contents) {
|
|
|
|
|
Ok(meta) => {
|
|
|
|
|
let mut errors = Vec::new();
|
|
|
|
|
if meta.name.is_none() {
|
|
|
|
|
errors.push("Missing 'name' field".to_string());
|
|
|
|
|
}
|
|
|
|
|
if errors.is_empty() {
|
|
|
|
|
results.push(StoryValidationResult {
|
|
|
|
|
story_id,
|
|
|
|
|
valid: true,
|
|
|
|
|
error: None,
|
|
|
|
|
});
|
|
|
|
|
} else {
|
|
|
|
|
results.push(StoryValidationResult {
|
|
|
|
|
story_id,
|
|
|
|
|
valid: false,
|
|
|
|
|
error: Some(errors.join("; ")),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(e) => results.push(StoryValidationResult {
|
|
|
|
|
story_id,
|
|
|
|
|
valid: false,
|
|
|
|
|
error: Some(e.to_string()),
|
|
|
|
|
}),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
results.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
Ok(results)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── Shared utilities used by submodules ──────────────────────────
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Read story content from the database content store, falling back to
|
|
|
|
|
/// the filesystem if not yet migrated.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Returns the story content or an error if not found.
|
|
|
|
|
pub(super) fn read_story_content(project_root: &Path, story_id: &str) -> Result<String, String> {
|
|
|
|
|
// Try content store first.
|
|
|
|
|
if let Some(content) = crate::db::read_content(story_id) {
|
|
|
|
|
return Ok(content);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Filesystem fallback.
|
|
|
|
|
let path = find_story_file_on_disk(project_root, story_id)?;
|
|
|
|
|
let content = std::fs::read_to_string(&path)
|
|
|
|
|
.map_err(|e| format!("Failed to read story file: {e}"))?;
|
|
|
|
|
|
|
|
|
|
// Import into content store for future reads.
|
|
|
|
|
crate::db::write_content(story_id, &content);
|
|
|
|
|
|
|
|
|
|
Ok(content)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Write story content to both DB and filesystem (backwards compat).
|
|
|
|
|
///
|
|
|
|
|
/// Use this variant when a project_root is available to keep the filesystem
|
|
|
|
|
/// in sync during the migration period.
|
|
|
|
|
pub(super) fn write_story_content_with_fs(project_root: &Path, story_id: &str, stage: &str, content: &str) {
|
|
|
|
|
crate::db::write_item_with_content(story_id, stage, content);
|
|
|
|
|
|
|
|
|
|
// Also write to filesystem if the file exists.
|
|
|
|
|
if let Ok(path) = find_story_file_on_disk(project_root, story_id) {
|
|
|
|
|
let _ = std::fs::write(&path, content);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Determine what stage a story is in (from CRDT).
|
|
|
|
|
pub(super) fn story_stage(story_id: &str) -> Option<String> {
|
2026-04-09 21:24:11 +00:00
|
|
|
crate::pipeline_state::read_typed(story_id)
|
|
|
|
|
.ok()
|
|
|
|
|
.flatten()
|
|
|
|
|
.map(|item| item.stage.dir_name().to_string())
|
2026-04-08 03:03:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Locate a work item file by searching all active pipeline stages on disk.
|
|
|
|
|
///
|
|
|
|
|
/// This is a filesystem fallback used during migration.
|
|
|
|
|
pub(crate) fn find_story_file_on_disk(project_root: &Path, story_id: &str) -> Result<std::path::PathBuf, String> {
|
2026-03-22 19:07:07 +00:00
|
|
|
let filename = format!("{story_id}.md");
|
2026-04-03 16:12:52 +01:00
|
|
|
let sk = project_root.join(".huskies").join("work");
|
2026-03-22 19:07:07 +00:00
|
|
|
for stage in &["2_current", "1_backlog", "3_qa", "4_merge", "5_done", "6_archived"] {
|
|
|
|
|
let path = sk.join(stage).join(&filename);
|
|
|
|
|
if path.exists() {
|
|
|
|
|
return Ok(path);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(format!(
|
|
|
|
|
"Story '{story_id}' not found in any pipeline stage."
|
|
|
|
|
))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Replace the content of a named `## Section` in a story file.
|
|
|
|
|
///
|
|
|
|
|
/// Finds the first occurrence of `## {section_name}` and replaces everything
|
|
|
|
|
/// until the next `##` heading (or end of file) with the provided text.
|
|
|
|
|
/// Returns an error if the section is not found.
|
|
|
|
|
pub(super) fn replace_section_content(content: &str, section_name: &str, new_text: &str) -> Result<String, String> {
|
|
|
|
|
let lines: Vec<&str> = content.lines().collect();
|
|
|
|
|
let heading = format!("## {section_name}");
|
|
|
|
|
|
|
|
|
|
let mut section_start: Option<usize> = None;
|
|
|
|
|
let mut section_end: Option<usize> = None;
|
|
|
|
|
|
|
|
|
|
for (i, line) in lines.iter().enumerate() {
|
|
|
|
|
let trimmed = line.trim();
|
|
|
|
|
if trimmed == heading {
|
|
|
|
|
section_start = Some(i);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if section_start.is_some() && trimmed.starts_with("## ") {
|
|
|
|
|
section_end = Some(i);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let section_start =
|
|
|
|
|
section_start.ok_or_else(|| format!("Section '{heading}' not found in story file."))?;
|
|
|
|
|
|
|
|
|
|
let mut new_lines: Vec<String> = Vec::new();
|
|
|
|
|
// Keep everything up to and including the section heading.
|
|
|
|
|
for line in lines.iter().take(section_start + 1) {
|
|
|
|
|
new_lines.push(line.to_string());
|
|
|
|
|
}
|
|
|
|
|
// Blank line, new content, blank line.
|
|
|
|
|
new_lines.push(String::new());
|
|
|
|
|
new_lines.push(new_text.to_string());
|
|
|
|
|
new_lines.push(String::new());
|
|
|
|
|
// Resume from the next section heading (or EOF).
|
|
|
|
|
let resume_from = section_end.unwrap_or(lines.len());
|
|
|
|
|
for line in lines.iter().skip(resume_from) {
|
|
|
|
|
new_lines.push(line.to_string());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let mut new_str = new_lines.join("\n");
|
|
|
|
|
if content.ends_with('\n') {
|
|
|
|
|
new_str.push('\n');
|
|
|
|
|
}
|
|
|
|
|
Ok(new_str)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Replace the `## Test Results` section in `contents` with `new_section`,
|
|
|
|
|
/// or append it if not present.
|
|
|
|
|
pub(super) fn replace_or_append_section(contents: &str, header: &str, new_section: &str) -> String {
|
|
|
|
|
let lines: Vec<&str> = contents.lines().collect();
|
|
|
|
|
let header_trimmed = header.trim();
|
|
|
|
|
|
|
|
|
|
// Find the start of the existing section
|
|
|
|
|
let section_start = lines.iter().position(|l| l.trim() == header_trimmed);
|
|
|
|
|
|
|
|
|
|
if let Some(start) = section_start {
|
|
|
|
|
// Find the next `##` heading after the section start (the end of this section)
|
|
|
|
|
let section_end = lines[start + 1..]
|
|
|
|
|
.iter()
|
|
|
|
|
.position(|l| {
|
|
|
|
|
let t = l.trim();
|
|
|
|
|
t.starts_with("## ") && t != header_trimmed
|
|
|
|
|
})
|
|
|
|
|
.map(|i| start + 1 + i)
|
|
|
|
|
.unwrap_or(lines.len());
|
|
|
|
|
|
|
|
|
|
let mut result = lines[..start].join("\n");
|
|
|
|
|
if !result.is_empty() {
|
|
|
|
|
result.push('\n');
|
|
|
|
|
}
|
|
|
|
|
result.push_str(new_section);
|
|
|
|
|
if section_end < lines.len() {
|
|
|
|
|
result.push('\n');
|
|
|
|
|
result.push_str(&lines[section_end..].join("\n"));
|
|
|
|
|
}
|
|
|
|
|
if contents.ends_with('\n') {
|
|
|
|
|
result.push('\n');
|
|
|
|
|
}
|
|
|
|
|
result
|
|
|
|
|
} else {
|
|
|
|
|
// Append at the end
|
|
|
|
|
let mut result = contents.trim_end_matches('\n').to_string();
|
|
|
|
|
result.push_str("\n\n");
|
|
|
|
|
result.push_str(new_section);
|
|
|
|
|
if !result.ends_with('\n') {
|
|
|
|
|
result.push('\n');
|
|
|
|
|
}
|
|
|
|
|
result
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub(super) fn slugify_name(name: &str) -> String {
|
|
|
|
|
let slug: String = name
|
|
|
|
|
.chars()
|
|
|
|
|
.map(|c| {
|
|
|
|
|
if c.is_ascii_alphanumeric() {
|
|
|
|
|
c.to_ascii_lowercase()
|
|
|
|
|
} else {
|
|
|
|
|
'_'
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
// Collapse consecutive underscores and trim edges
|
|
|
|
|
let mut result = String::new();
|
|
|
|
|
let mut prev_underscore = true; // start true to trim leading _
|
|
|
|
|
for ch in slug.chars() {
|
|
|
|
|
if ch == '_' {
|
|
|
|
|
if !prev_underscore {
|
|
|
|
|
result.push('_');
|
|
|
|
|
}
|
|
|
|
|
prev_underscore = true;
|
|
|
|
|
} else {
|
|
|
|
|
result.push(ch);
|
|
|
|
|
prev_underscore = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Trim trailing underscore
|
|
|
|
|
if result.ends_with('_') {
|
|
|
|
|
result.pop();
|
|
|
|
|
}
|
|
|
|
|
result
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Get the next available item number by scanning both the database and filesystem.
|
2026-03-22 19:07:07 +00:00
|
|
|
pub(super) fn next_item_number(root: &std::path::Path) -> Result<u32, String> {
|
2026-04-08 03:03:59 +00:00
|
|
|
let mut max_num = crate::db::next_item_number().saturating_sub(1); // db returns next, we want max
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
// Also scan filesystem for backwards compatibility.
|
|
|
|
|
let work_base = root.join(".huskies").join("work");
|
2026-03-22 19:07:07 +00:00
|
|
|
for subdir in &["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
|
|
|
|
|
let dir = work_base.join(subdir);
|
|
|
|
|
if !dir.exists() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
for entry in
|
2026-04-08 03:03:59 +00:00
|
|
|
std::fs::read_dir(&dir).map_err(|e| format!("Failed to read {subdir} directory: {e}"))?
|
2026-03-22 19:07:07 +00:00
|
|
|
{
|
|
|
|
|
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
|
|
|
|
|
let name = entry.file_name();
|
|
|
|
|
let name_str = name.to_string_lossy();
|
|
|
|
|
let num_str: String = name_str.chars().take_while(|c| c.is_ascii_digit()).collect();
|
|
|
|
|
if let Ok(n) = num_str.parse::<u32>()
|
|
|
|
|
&& n > max_num
|
|
|
|
|
{
|
|
|
|
|
max_num = n;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(max_num + 1)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::*;
|
2026-04-08 03:03:59 +00:00
|
|
|
use std::fs;
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn load_pipeline_state_loads_all_stages() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path().to_path_buf();
|
|
|
|
|
|
|
|
|
|
for (stage, id) in &[
|
|
|
|
|
("1_backlog", "10_story_upcoming"),
|
|
|
|
|
("2_current", "20_story_current"),
|
|
|
|
|
("3_qa", "30_story_qa"),
|
|
|
|
|
("4_merge", "40_story_merge"),
|
|
|
|
|
("5_done", "50_story_done"),
|
|
|
|
|
] {
|
2026-04-03 16:12:52 +01:00
|
|
|
let dir = root.join(".huskies").join("work").join(stage);
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&dir).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
dir.join(format!("{id}.md")),
|
|
|
|
|
format!("---\nname: {id}\n---\n"),
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(root);
|
|
|
|
|
let state = load_pipeline_state(&ctx).unwrap();
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.backlog.len(), 1);
|
|
|
|
|
assert_eq!(state.backlog[0].story_id, "10_story_upcoming");
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.current.len(), 1);
|
|
|
|
|
assert_eq!(state.current[0].story_id, "20_story_current");
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.qa.len(), 1);
|
|
|
|
|
assert_eq!(state.qa[0].story_id, "30_story_qa");
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.merge.len(), 1);
|
|
|
|
|
assert_eq!(state.merge[0].story_id, "40_story_merge");
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.done.len(), 1);
|
|
|
|
|
assert_eq!(state.done[0].story_id, "50_story_done");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn load_upcoming_returns_empty_when_no_dir() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path().to_path_buf();
|
2026-04-03 16:12:52 +01:00
|
|
|
// No .huskies directory at all
|
2026-03-22 19:07:07 +00:00
|
|
|
let ctx = crate::http::context::AppContext::new_test(root);
|
|
|
|
|
let result = load_upcoming_stories(&ctx).unwrap();
|
|
|
|
|
assert!(result.is_empty());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_state_includes_agent_for_running_story() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path().to_path_buf();
|
|
|
|
|
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = root.join(".huskies/work/2_current");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
current.join("10_story_test.md"),
|
|
|
|
|
"---\nname: Test Story\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(root);
|
|
|
|
|
ctx.agents.inject_test_agent("10_story_test", "coder-1", crate::agents::AgentStatus::Running);
|
|
|
|
|
|
|
|
|
|
let state = load_pipeline_state(&ctx).unwrap();
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.current.len(), 1);
|
|
|
|
|
let item = &state.current[0];
|
|
|
|
|
assert!(item.agent.is_some(), "running agent should appear on work item");
|
|
|
|
|
let agent = item.agent.as_ref().unwrap();
|
|
|
|
|
assert_eq!(agent.agent_name, "coder-1");
|
|
|
|
|
assert_eq!(agent.status, "running");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_state_no_agent_for_completed_story() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path().to_path_buf();
|
|
|
|
|
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = root.join(".huskies/work/2_current");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
current.join("11_story_done.md"),
|
|
|
|
|
"---\nname: Done Story\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(root);
|
|
|
|
|
ctx.agents.inject_test_agent("11_story_done", "coder-1", crate::agents::AgentStatus::Completed);
|
|
|
|
|
|
|
|
|
|
let state = load_pipeline_state(&ctx).unwrap();
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.current.len(), 1);
|
|
|
|
|
assert!(
|
|
|
|
|
state.current[0].agent.is_none(),
|
|
|
|
|
"completed agent should not appear on work item"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_state_pending_agent_included() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path().to_path_buf();
|
|
|
|
|
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = root.join(".huskies/work/2_current");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
current.join("12_story_pending.md"),
|
|
|
|
|
"---\nname: Pending Story\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(root);
|
|
|
|
|
ctx.agents.inject_test_agent("12_story_pending", "coder-1", crate::agents::AgentStatus::Pending);
|
|
|
|
|
|
|
|
|
|
let state = load_pipeline_state(&ctx).unwrap();
|
|
|
|
|
|
|
|
|
|
assert_eq!(state.current.len(), 1);
|
|
|
|
|
let item = &state.current[0];
|
|
|
|
|
assert!(item.agent.is_some(), "pending agent should appear on work item");
|
|
|
|
|
assert_eq!(item.agent.as_ref().unwrap().status, "pending");
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-07 11:46:25 +00:00
|
|
|
#[test]
|
|
|
|
|
fn pipeline_state_includes_depends_on() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
|
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
backlog.join("20_story_dependent.md"),
|
|
|
|
|
"---\nname: Dependent Story\ndepends_on: [10, 11]\n---\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
backlog.join("21_story_independent.md"),
|
|
|
|
|
"---\nname: Independent Story\n---\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(tmp.path().to_path_buf());
|
|
|
|
|
let state = load_pipeline_state(&ctx).unwrap();
|
|
|
|
|
|
|
|
|
|
let dependent = state.backlog.iter().find(|s| s.story_id == "20_story_dependent").unwrap();
|
|
|
|
|
assert_eq!(dependent.depends_on, Some(vec![10, 11]));
|
|
|
|
|
|
|
|
|
|
let independent = state.backlog.iter().find(|s| s.story_id == "21_story_independent").unwrap();
|
|
|
|
|
assert_eq!(independent.depends_on, None);
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-22 19:07:07 +00:00
|
|
|
#[test]
|
|
|
|
|
fn load_upcoming_parses_metadata() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
backlog.join("31_story_view_upcoming.md"),
|
|
|
|
|
"---\nname: View Upcoming\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
backlog.join("32_story_worktree.md"),
|
|
|
|
|
"---\nname: Worktree Orchestration\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(tmp.path().to_path_buf());
|
|
|
|
|
let stories = load_upcoming_stories(&ctx).unwrap();
|
|
|
|
|
assert_eq!(stories.len(), 2);
|
|
|
|
|
assert_eq!(stories[0].story_id, "31_story_view_upcoming");
|
|
|
|
|
assert_eq!(stories[0].name.as_deref(), Some("View Upcoming"));
|
|
|
|
|
assert_eq!(stories[1].story_id, "32_story_worktree");
|
|
|
|
|
assert_eq!(stories[1].name.as_deref(), Some("Worktree Orchestration"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn load_upcoming_skips_non_md_files() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::write(backlog.join(".gitkeep"), "").unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
backlog.join("31_story_example.md"),
|
|
|
|
|
"---\nname: A Story\n---\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let ctx = crate::http::context::AppContext::new_test(tmp.path().to_path_buf());
|
|
|
|
|
let stories = load_upcoming_stories(&ctx).unwrap();
|
|
|
|
|
assert_eq!(stories.len(), 1);
|
|
|
|
|
assert_eq!(stories[0].story_id, "31_story_example");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn validate_story_dirs_valid_files() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = tmp.path().join(".huskies/work/2_current");
|
|
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
current.join("28_story_todos.md"),
|
|
|
|
|
"---\nname: Show TODOs\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
fs::write(
|
|
|
|
|
backlog.join("36_story_front_matter.md"),
|
|
|
|
|
"---\nname: Enforce Front Matter\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let results = validate_story_dirs(tmp.path()).unwrap();
|
|
|
|
|
assert_eq!(results.len(), 2);
|
|
|
|
|
assert!(results.iter().all(|r| r.valid));
|
|
|
|
|
assert!(results.iter().all(|r| r.error.is_none()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn validate_story_dirs_missing_front_matter() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = tmp.path().join(".huskies/work/2_current");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("28_story_todos.md"), "# No front matter\n").unwrap();
|
|
|
|
|
|
|
|
|
|
let results = validate_story_dirs(tmp.path()).unwrap();
|
|
|
|
|
assert_eq!(results.len(), 1);
|
|
|
|
|
assert!(!results[0].valid);
|
|
|
|
|
assert_eq!(results[0].error.as_deref(), Some("Missing front matter"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn validate_story_dirs_missing_required_fields() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = tmp.path().join(".huskies/work/2_current");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("28_story_todos.md"), "---\n---\n# Story\n").unwrap();
|
|
|
|
|
|
|
|
|
|
let results = validate_story_dirs(tmp.path()).unwrap();
|
|
|
|
|
assert_eq!(results.len(), 1);
|
|
|
|
|
assert!(!results[0].valid);
|
|
|
|
|
let err = results[0].error.as_deref().unwrap();
|
|
|
|
|
assert!(err.contains("Missing 'name' field"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn validate_story_dirs_empty_when_no_dirs() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let results = validate_story_dirs(tmp.path()).unwrap();
|
|
|
|
|
assert!(results.is_empty());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// --- slugify_name tests ---
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn slugify_simple_name() {
|
|
|
|
|
assert_eq!(
|
|
|
|
|
slugify_name("Enforce Front Matter on All Story Files"),
|
|
|
|
|
"enforce_front_matter_on_all_story_files"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn slugify_with_special_chars() {
|
|
|
|
|
assert_eq!(slugify_name("Hello, World! (v2)"), "hello_world_v2");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn slugify_leading_trailing_underscores() {
|
|
|
|
|
assert_eq!(slugify_name(" spaces "), "spaces");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn slugify_consecutive_separators() {
|
|
|
|
|
assert_eq!(slugify_name("a--b__c d"), "a_b_c_d");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn slugify_empty_after_strip() {
|
|
|
|
|
assert_eq!(slugify_name("!!!"), "");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn slugify_already_snake_case() {
|
|
|
|
|
assert_eq!(slugify_name("my_story_name"), "my_story_name");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// --- next_item_number tests ---
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn next_item_number_empty_dirs() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let base = tmp.path().join(".huskies/work/1_backlog");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&base).unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
// At least 1; may be higher due to shared global CRDT state in tests.
|
|
|
|
|
assert!(next_item_number(tmp.path()).unwrap() >= 1);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn next_item_number_scans_all_dirs() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
|
|
|
|
let current = tmp.path().join(".huskies/work/2_current");
|
|
|
|
|
let archived = tmp.path().join(".huskies/work/5_done");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::create_dir_all(&archived).unwrap();
|
|
|
|
|
fs::write(backlog.join("10_story_foo.md"), "").unwrap();
|
|
|
|
|
fs::write(current.join("20_story_bar.md"), "").unwrap();
|
|
|
|
|
fs::write(archived.join("15_story_baz.md"), "").unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
// At least 21 (filesystem max is 20); may be higher due to shared CRDT state.
|
|
|
|
|
assert!(next_item_number(tmp.path()).unwrap() >= 21);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn next_item_number_no_work_dirs() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
// No .huskies at all — at least 1.
|
|
|
|
|
assert!(next_item_number(tmp.path()).unwrap() >= 1);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
// --- read_story_content tests ---
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
#[test]
|
2026-04-08 03:03:59 +00:00
|
|
|
fn read_story_content_from_filesystem_fallback() {
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let current = tmp.path().join(".huskies/work/2_current");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
let content = "---\nname: Test\n---\n# Story\n";
|
|
|
|
|
fs::write(current.join("6_test.md"), content).unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
let result = read_story_content(tmp.path(), "6_test").unwrap();
|
|
|
|
|
assert_eq!(result, content);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-04-08 03:03:59 +00:00
|
|
|
fn read_story_content_not_found_returns_error() {
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
let result = read_story_content(tmp.path(), "99_missing");
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(result.is_err());
|
|
|
|
|
assert!(result.unwrap_err().contains("not found"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// --- replace_or_append_section tests ---
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn replace_or_append_section_appends_when_absent() {
|
|
|
|
|
let contents = "---\nname: T\n---\n# Story\n";
|
|
|
|
|
let new = replace_or_append_section(contents, "## Test Results", "## Test Results\n\nfoo\n");
|
|
|
|
|
assert!(new.contains("## Test Results"));
|
|
|
|
|
assert!(new.contains("foo"));
|
|
|
|
|
assert!(new.contains("# Story"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn replace_or_append_section_replaces_existing() {
|
|
|
|
|
let contents = "# Story\n\n## Test Results\n\nold content\n\n## Other\n\nother content\n";
|
|
|
|
|
let new = replace_or_append_section(contents, "## Test Results", "## Test Results\n\nnew content\n");
|
|
|
|
|
assert!(new.contains("new content"));
|
|
|
|
|
assert!(!new.contains("old content"));
|
|
|
|
|
assert!(new.contains("## Other"));
|
|
|
|
|
}
|
|
|
|
|
}
|