2026-04-12 13:11:23 +00:00
|
|
|
//! Bug operations — creates bug, refactor, and spike files in the pipeline.
|
2026-03-22 19:07:07 +00:00
|
|
|
use crate::io::story_metadata::parse_front_matter;
|
|
|
|
|
use std::path::Path;
|
|
|
|
|
|
2026-04-10 14:56:13 +00:00
|
|
|
use super::{next_item_number, slugify_name, write_story_content};
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Create a bug file and store it in the database.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Also writes to the filesystem for backwards compatibility during migration.
|
2026-04-27 20:17:03 +00:00
|
|
|
/// Returns the bug_id (e.g. `"4"`).
|
2026-04-25 19:33:38 +00:00
|
|
|
#[allow(clippy::too_many_arguments)]
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn create_bug_file(
|
|
|
|
|
root: &Path,
|
|
|
|
|
name: &str,
|
|
|
|
|
description: &str,
|
|
|
|
|
steps_to_reproduce: &str,
|
|
|
|
|
actual_result: &str,
|
|
|
|
|
expected_result: &str,
|
|
|
|
|
acceptance_criteria: Option<&[String]>,
|
2026-04-25 19:33:38 +00:00
|
|
|
depends_on: Option<&[u32]>,
|
2026-03-22 19:07:07 +00:00
|
|
|
) -> Result<String, String> {
|
|
|
|
|
let bug_number = next_item_number(root)?;
|
|
|
|
|
let slug = slugify_name(name);
|
|
|
|
|
|
|
|
|
|
if slug.is_empty() {
|
|
|
|
|
return Err("Name must contain at least one alphanumeric character.".to_string());
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-27 20:17:03 +00:00
|
|
|
let bug_id = format!("{bug_number}");
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
let mut content = String::new();
|
|
|
|
|
content.push_str("---\n");
|
2026-04-27 20:17:03 +00:00
|
|
|
content.push_str("type: bug\n");
|
2026-03-22 19:07:07 +00:00
|
|
|
content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\"")));
|
2026-04-25 19:33:38 +00:00
|
|
|
if let Some(deps) = depends_on.filter(|d| !d.is_empty()) {
|
|
|
|
|
let nums: Vec<String> = deps.iter().map(|n| n.to_string()).collect();
|
|
|
|
|
content.push_str(&format!("depends_on: [{}]\n", nums.join(", ")));
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
content.push_str("---\n\n");
|
|
|
|
|
content.push_str(&format!("# Bug {bug_number}: {name}\n\n"));
|
|
|
|
|
content.push_str("## Description\n\n");
|
|
|
|
|
content.push_str(description);
|
|
|
|
|
content.push_str("\n\n");
|
|
|
|
|
content.push_str("## How to Reproduce\n\n");
|
|
|
|
|
content.push_str(steps_to_reproduce);
|
|
|
|
|
content.push_str("\n\n");
|
|
|
|
|
content.push_str("## Actual Result\n\n");
|
|
|
|
|
content.push_str(actual_result);
|
|
|
|
|
content.push_str("\n\n");
|
|
|
|
|
content.push_str("## Expected Result\n\n");
|
|
|
|
|
content.push_str(expected_result);
|
|
|
|
|
content.push_str("\n\n");
|
|
|
|
|
content.push_str("## Acceptance Criteria\n\n");
|
|
|
|
|
if let Some(criteria) = acceptance_criteria {
|
|
|
|
|
for criterion in criteria {
|
|
|
|
|
content.push_str(&format!("- [ ] {criterion}\n"));
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
content.push_str("- [ ] Bug is fixed and verified\n");
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-10 14:56:13 +00:00
|
|
|
// Write to database content store and CRDT.
|
|
|
|
|
write_story_content(root, &bug_id, "1_backlog", &content);
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
Ok(bug_id)
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Create a spike file and store it in the database.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-27 20:17:03 +00:00
|
|
|
/// Returns the spike_id (e.g. `"4"`).
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn create_spike_file(
|
|
|
|
|
root: &Path,
|
|
|
|
|
name: &str,
|
|
|
|
|
description: Option<&str>,
|
2026-04-27 16:26:15 +00:00
|
|
|
acceptance_criteria: &[String],
|
2026-03-22 19:07:07 +00:00
|
|
|
) -> Result<String, String> {
|
|
|
|
|
let spike_number = next_item_number(root)?;
|
|
|
|
|
let slug = slugify_name(name);
|
|
|
|
|
|
|
|
|
|
if slug.is_empty() {
|
|
|
|
|
return Err("Name must contain at least one alphanumeric character.".to_string());
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-27 20:17:03 +00:00
|
|
|
let spike_id = format!("{spike_number}");
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
let mut content = String::new();
|
|
|
|
|
content.push_str("---\n");
|
2026-04-27 20:17:03 +00:00
|
|
|
content.push_str("type: spike\n");
|
2026-03-22 19:07:07 +00:00
|
|
|
content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\"")));
|
|
|
|
|
content.push_str("---\n\n");
|
|
|
|
|
content.push_str(&format!("# Spike {spike_number}: {name}\n\n"));
|
|
|
|
|
content.push_str("## Question\n\n");
|
|
|
|
|
if let Some(desc) = description {
|
|
|
|
|
content.push_str(desc);
|
|
|
|
|
content.push('\n');
|
|
|
|
|
} else {
|
|
|
|
|
content.push_str("- TBD\n");
|
|
|
|
|
}
|
|
|
|
|
content.push('\n');
|
|
|
|
|
content.push_str("## Hypothesis\n\n");
|
|
|
|
|
content.push_str("- TBD\n\n");
|
|
|
|
|
content.push_str("## Timebox\n\n");
|
|
|
|
|
content.push_str("- TBD\n\n");
|
|
|
|
|
content.push_str("## Investigation Plan\n\n");
|
|
|
|
|
content.push_str("- TBD\n\n");
|
|
|
|
|
content.push_str("## Findings\n\n");
|
|
|
|
|
content.push_str("- TBD\n\n");
|
|
|
|
|
content.push_str("## Recommendation\n\n");
|
2026-04-27 16:26:15 +00:00
|
|
|
content.push_str("- TBD\n\n");
|
|
|
|
|
content.push_str("## Acceptance Criteria\n\n");
|
|
|
|
|
if acceptance_criteria.is_empty() {
|
|
|
|
|
content.push_str("- [ ] TBD\n");
|
|
|
|
|
} else {
|
|
|
|
|
for criterion in acceptance_criteria {
|
|
|
|
|
content.push_str(&format!("- [ ] {criterion}\n"));
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-10 14:56:13 +00:00
|
|
|
// Write to database content store and CRDT.
|
|
|
|
|
write_story_content(root, &spike_id, "1_backlog", &content);
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
Ok(spike_id)
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Create a refactor work item and store it in the database.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-27 20:17:03 +00:00
|
|
|
/// Returns the refactor_id (e.g. `"5"`).
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn create_refactor_file(
|
|
|
|
|
root: &Path,
|
|
|
|
|
name: &str,
|
|
|
|
|
description: Option<&str>,
|
|
|
|
|
acceptance_criteria: Option<&[String]>,
|
2026-04-25 19:33:38 +00:00
|
|
|
depends_on: Option<&[u32]>,
|
2026-03-22 19:07:07 +00:00
|
|
|
) -> Result<String, String> {
|
|
|
|
|
let refactor_number = next_item_number(root)?;
|
|
|
|
|
let slug = slugify_name(name);
|
|
|
|
|
|
|
|
|
|
if slug.is_empty() {
|
|
|
|
|
return Err("Name must contain at least one alphanumeric character.".to_string());
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-27 20:17:03 +00:00
|
|
|
let refactor_id = format!("{refactor_number}");
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
let mut content = String::new();
|
|
|
|
|
content.push_str("---\n");
|
2026-04-27 20:17:03 +00:00
|
|
|
content.push_str("type: refactor\n");
|
2026-03-22 19:07:07 +00:00
|
|
|
content.push_str(&format!("name: \"{}\"\n", name.replace('"', "\\\"")));
|
2026-04-25 19:33:38 +00:00
|
|
|
if let Some(deps) = depends_on.filter(|d| !d.is_empty()) {
|
|
|
|
|
let nums: Vec<String> = deps.iter().map(|n| n.to_string()).collect();
|
|
|
|
|
content.push_str(&format!("depends_on: [{}]\n", nums.join(", ")));
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
content.push_str("---\n\n");
|
|
|
|
|
content.push_str(&format!("# Refactor {refactor_number}: {name}\n\n"));
|
|
|
|
|
content.push_str("## Current State\n\n");
|
|
|
|
|
content.push_str("- TBD\n\n");
|
|
|
|
|
content.push_str("## Desired State\n\n");
|
|
|
|
|
if let Some(desc) = description {
|
|
|
|
|
content.push_str(desc);
|
|
|
|
|
content.push('\n');
|
|
|
|
|
} else {
|
|
|
|
|
content.push_str("- TBD\n");
|
|
|
|
|
}
|
|
|
|
|
content.push('\n');
|
|
|
|
|
content.push_str("## Acceptance Criteria\n\n");
|
|
|
|
|
if let Some(criteria) = acceptance_criteria {
|
|
|
|
|
for criterion in criteria {
|
|
|
|
|
content.push_str(&format!("- [ ] {criterion}\n"));
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
content.push_str("- [ ] Refactoring complete and all tests pass\n");
|
|
|
|
|
}
|
|
|
|
|
content.push('\n');
|
|
|
|
|
content.push_str("## Out of Scope\n\n");
|
|
|
|
|
content.push_str("- TBD\n");
|
|
|
|
|
|
2026-04-10 14:56:13 +00:00
|
|
|
// Write to database content store and CRDT.
|
|
|
|
|
write_story_content(root, &refactor_id, "1_backlog", &content);
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
Ok(refactor_id)
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-27 20:17:03 +00:00
|
|
|
/// Returns true if the item stem is a bug item.
|
|
|
|
|
///
|
|
|
|
|
/// Checks the slug-based ID format first (e.g. `"4_bug_login_crash"`), then
|
|
|
|
|
/// falls back to reading `type: bug` from the content store for numeric-only IDs.
|
2026-03-22 19:07:07 +00:00
|
|
|
fn is_bug_item(stem: &str) -> bool {
|
|
|
|
|
let after_num = stem.trim_start_matches(|c: char| c.is_ascii_digit());
|
2026-04-27 20:17:03 +00:00
|
|
|
if after_num.starts_with("_bug_") {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
// Numeric-only ID: check content store front matter.
|
|
|
|
|
if after_num.is_empty() {
|
|
|
|
|
return crate::db::read_content(stem)
|
|
|
|
|
.and_then(|c| parse_front_matter(&c).ok())
|
|
|
|
|
.and_then(|m| m.item_type)
|
|
|
|
|
.map(|t| t == "bug")
|
|
|
|
|
.unwrap_or(false);
|
|
|
|
|
}
|
|
|
|
|
false
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Extract bug name from content (heading or front matter).
|
|
|
|
|
fn extract_bug_name_from_content(content: &str) -> Option<String> {
|
|
|
|
|
// Try front matter first.
|
2026-04-13 14:07:08 +00:00
|
|
|
if let Ok(meta) = parse_front_matter(content)
|
|
|
|
|
&& let Some(name) = meta.name
|
|
|
|
|
{
|
2026-04-08 03:03:59 +00:00
|
|
|
return Some(name);
|
|
|
|
|
}
|
|
|
|
|
// Fallback: heading.
|
|
|
|
|
for line in content.lines() {
|
2026-04-13 14:07:08 +00:00
|
|
|
if let Some(rest) = line.strip_prefix("# Bug ")
|
|
|
|
|
&& let Some(colon_pos) = rest.find(": ")
|
|
|
|
|
{
|
2026-04-08 03:03:59 +00:00
|
|
|
return Some(rest[colon_pos + 2..].to_string());
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-10 14:56:13 +00:00
|
|
|
/// List all open bugs from CRDT + content store.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
|
|
|
|
/// Returns a sorted list of `(bug_id, name)` pairs.
|
2026-04-10 14:56:13 +00:00
|
|
|
pub fn list_bug_files(_root: &Path) -> Result<Vec<(String, String)>, String> {
|
2026-03-22 19:07:07 +00:00
|
|
|
let mut bugs = Vec::new();
|
|
|
|
|
|
2026-04-09 21:24:11 +00:00
|
|
|
for item in crate::pipeline_state::read_all_typed() {
|
2026-04-13 14:07:08 +00:00
|
|
|
if !matches!(item.stage, crate::pipeline_state::Stage::Backlog)
|
|
|
|
|
|| !is_bug_item(&item.story_id.0)
|
|
|
|
|
{
|
2026-04-09 21:24:11 +00:00
|
|
|
continue;
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
2026-04-09 21:24:11 +00:00
|
|
|
let sid = item.story_id.0;
|
2026-04-13 14:07:08 +00:00
|
|
|
let name = if item.name.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(item.name)
|
|
|
|
|
}
|
|
|
|
|
.or_else(|| crate::db::read_content(&sid).and_then(|c| extract_bug_name_from_content(&c)))
|
|
|
|
|
.unwrap_or_else(|| sid.clone());
|
2026-04-09 21:24:11 +00:00
|
|
|
bugs.push((sid, name));
|
2026-04-08 03:03:59 +00:00
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
bugs.sort_by(|a, b| a.0.cmp(&b.0));
|
|
|
|
|
Ok(bugs)
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Returns true if the item stem is a refactor item.
|
2026-04-27 20:17:03 +00:00
|
|
|
///
|
|
|
|
|
/// Checks the slug-based ID format first (e.g. `"5_refactor_split_agents_rs"`), then
|
|
|
|
|
/// falls back to reading `type: refactor` from the content store for numeric-only IDs.
|
2026-03-22 19:07:07 +00:00
|
|
|
fn is_refactor_item(stem: &str) -> bool {
|
|
|
|
|
let after_num = stem.trim_start_matches(|c: char| c.is_ascii_digit());
|
2026-04-27 20:17:03 +00:00
|
|
|
if after_num.starts_with("_refactor_") {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
// Numeric-only ID: check content store front matter.
|
|
|
|
|
if after_num.is_empty() {
|
|
|
|
|
return crate::db::read_content(stem)
|
|
|
|
|
.and_then(|c| parse_front_matter(&c).ok())
|
|
|
|
|
.and_then(|m| m.item_type)
|
|
|
|
|
.map(|t| t == "refactor")
|
|
|
|
|
.unwrap_or(false);
|
|
|
|
|
}
|
|
|
|
|
false
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
2026-04-10 14:56:13 +00:00
|
|
|
/// List all open refactors from CRDT + content store.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
|
|
|
|
/// Returns a sorted list of `(refactor_id, name)` pairs.
|
2026-04-10 14:56:13 +00:00
|
|
|
pub fn list_refactor_files(_root: &Path) -> Result<Vec<(String, String)>, String> {
|
2026-03-22 19:07:07 +00:00
|
|
|
let mut refactors = Vec::new();
|
|
|
|
|
|
2026-04-09 21:24:11 +00:00
|
|
|
for item in crate::pipeline_state::read_all_typed() {
|
2026-04-13 14:07:08 +00:00
|
|
|
if !matches!(item.stage, crate::pipeline_state::Stage::Backlog)
|
|
|
|
|
|| !is_refactor_item(&item.story_id.0)
|
|
|
|
|
{
|
2026-04-09 21:24:11 +00:00
|
|
|
continue;
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
2026-04-09 21:24:11 +00:00
|
|
|
let sid = item.story_id.0;
|
2026-04-13 14:07:08 +00:00
|
|
|
let name = if item.name.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(item.name)
|
|
|
|
|
}
|
|
|
|
|
.or_else(|| {
|
|
|
|
|
crate::db::read_content(&sid)
|
|
|
|
|
.and_then(|c| parse_front_matter(&c).ok())
|
|
|
|
|
.and_then(|m| m.name)
|
|
|
|
|
})
|
|
|
|
|
.unwrap_or_else(|| sid.clone());
|
2026-04-09 21:24:11 +00:00
|
|
|
refactors.push((sid, name));
|
2026-04-08 03:03:59 +00:00
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
refactors.sort_by(|a, b| a.0.cmp(&b.0));
|
|
|
|
|
Ok(refactors)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::*;
|
2026-04-10 14:56:13 +00:00
|
|
|
use std::fs;
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
fn setup_git_repo(root: &std::path::Path) {
|
|
|
|
|
std::process::Command::new("git")
|
|
|
|
|
.args(["init"])
|
|
|
|
|
.current_dir(root)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
std::process::Command::new("git")
|
|
|
|
|
.args(["config", "user.email", "test@test.com"])
|
|
|
|
|
.current_dir(root)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
std::process::Command::new("git")
|
|
|
|
|
.args(["config", "user.name", "Test"])
|
|
|
|
|
.current_dir(root)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
std::process::Command::new("git")
|
|
|
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
|
|
|
|
.current_dir(root)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── Bug file helper tests ──────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn next_item_number_starts_at_1_when_empty_bugs() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
assert!(super::super::next_item_number(tmp.path()).unwrap() >= 1);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn next_item_number_increments_from_existing_bugs() {
|
2026-04-11 13:02:09 +00:00
|
|
|
crate::db::ensure_content_store();
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::write(backlog.join("1_bug_crash.md"), "").unwrap();
|
|
|
|
|
fs::write(backlog.join("3_bug_another.md"), "").unwrap();
|
2026-04-11 13:02:09 +00:00
|
|
|
// Also write to content store so next_item_number sees them.
|
|
|
|
|
crate::db::write_item_with_content("1_bug_crash", "1_backlog", "---\nname: Crash\n---\n");
|
2026-04-13 14:07:08 +00:00
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"3_bug_another",
|
|
|
|
|
"1_backlog",
|
|
|
|
|
"---\nname: Another\n---\n",
|
|
|
|
|
);
|
2026-04-08 03:03:59 +00:00
|
|
|
assert!(super::super::next_item_number(tmp.path()).unwrap() >= 4);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn next_item_number_scans_archived_too() {
|
2026-04-11 13:02:09 +00:00
|
|
|
crate::db::ensure_content_store();
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-03 16:12:52 +01:00
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
|
|
|
|
let archived = tmp.path().join(".huskies/work/5_done");
|
2026-03-22 19:07:07 +00:00
|
|
|
fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
fs::create_dir_all(&archived).unwrap();
|
|
|
|
|
fs::write(archived.join("5_bug_old.md"), "").unwrap();
|
2026-04-11 13:02:09 +00:00
|
|
|
// Also write to content store so next_item_number sees it.
|
|
|
|
|
crate::db::write_item_with_content("5_bug_old", "5_done", "---\nname: Old Bug\n---\n");
|
2026-04-08 03:03:59 +00:00
|
|
|
assert!(super::super::next_item_number(tmp.path()).unwrap() >= 6);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-04-10 14:56:13 +00:00
|
|
|
fn list_bug_files_no_crash_on_missing_dir() {
|
|
|
|
|
// list_bug_files now reads from the global CRDT, not the filesystem.
|
|
|
|
|
// Verify it does not panic when called with a non-existent project root.
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-10 14:56:13 +00:00
|
|
|
let result = list_bug_files(tmp.path());
|
|
|
|
|
assert!(result.is_ok());
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn list_bug_files_excludes_archive_subdir() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-10 14:56:13 +00:00
|
|
|
crate::db::ensure_content_store();
|
|
|
|
|
// Bug in backlog (should appear).
|
|
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"7001_bug_open",
|
|
|
|
|
"1_backlog",
|
|
|
|
|
"---\nname: Open Bug\n---\n# Bug 7001: Open Bug\n",
|
|
|
|
|
);
|
|
|
|
|
// Bug in done (should NOT appear — list_bug_files only returns Backlog).
|
|
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"7002_bug_closed",
|
|
|
|
|
"5_done",
|
|
|
|
|
"---\nname: Closed Bug\n---\n# Bug 7002: Closed Bug\n",
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
let result = list_bug_files(tmp.path()).unwrap();
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
|
|
|
|
result
|
|
|
|
|
.iter()
|
|
|
|
|
.any(|(id, name)| id == "7001_bug_open" && name == "Open Bug")
|
|
|
|
|
);
|
2026-04-10 14:56:13 +00:00
|
|
|
assert!(!result.iter().any(|(id, _)| id == "7002_bug_closed"));
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn list_bug_files_sorted_by_id() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-10 14:56:13 +00:00
|
|
|
crate::db::ensure_content_store();
|
|
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"7013_bug_third",
|
|
|
|
|
"1_backlog",
|
|
|
|
|
"---\nname: Third\n---\n# Bug 7013: Third\n",
|
|
|
|
|
);
|
|
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"7011_bug_first",
|
|
|
|
|
"1_backlog",
|
|
|
|
|
"---\nname: First\n---\n# Bug 7011: First\n",
|
|
|
|
|
);
|
|
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"7012_bug_second",
|
|
|
|
|
"1_backlog",
|
|
|
|
|
"---\nname: Second\n---\n# Bug 7012: Second\n",
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
let result = list_bug_files(tmp.path()).unwrap();
|
2026-04-10 14:56:13 +00:00
|
|
|
// Find positions of our three bugs in the sorted result.
|
2026-04-13 14:07:08 +00:00
|
|
|
let pos_first = result
|
|
|
|
|
.iter()
|
|
|
|
|
.position(|(id, _)| id == "7011_bug_first")
|
|
|
|
|
.unwrap();
|
|
|
|
|
let pos_second = result
|
|
|
|
|
.iter()
|
|
|
|
|
.position(|(id, _)| id == "7012_bug_second")
|
|
|
|
|
.unwrap();
|
|
|
|
|
let pos_third = result
|
|
|
|
|
.iter()
|
|
|
|
|
.position(|(id, _)| id == "7013_bug_third")
|
|
|
|
|
.unwrap();
|
2026-04-10 14:56:13 +00:00
|
|
|
assert!(pos_first < pos_second);
|
|
|
|
|
assert!(pos_second < pos_third);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-04-08 03:03:59 +00:00
|
|
|
fn extract_bug_name_from_content_parses_heading() {
|
|
|
|
|
let content = "# Bug 1: Login page crashes\n\n## Description\n";
|
|
|
|
|
let name = extract_bug_name_from_content(content).unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
assert_eq!(name, "Login page crashes");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_bug_file_writes_correct_content() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
setup_git_repo(tmp.path());
|
|
|
|
|
|
|
|
|
|
let bug_id = create_bug_file(
|
|
|
|
|
tmp.path(),
|
|
|
|
|
"Login Crash",
|
|
|
|
|
"The login page crashes on submit.",
|
|
|
|
|
"1. Go to /login\n2. Click submit",
|
|
|
|
|
"Page crashes with 500 error",
|
|
|
|
|
"Login succeeds",
|
|
|
|
|
Some(&["Login form submits without error".to_string()]),
|
2026-04-25 19:33:38 +00:00
|
|
|
None,
|
2026-03-22 19:07:07 +00:00
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
2026-04-27 20:17:03 +00:00
|
|
|
bug_id.chars().all(|c| c.is_ascii_digit()),
|
|
|
|
|
"bug ID must be numeric-only, got: {bug_id}"
|
2026-04-13 14:07:08 +00:00
|
|
|
);
|
2026-04-08 03:03:59 +00:00
|
|
|
|
|
|
|
|
// Check content exists (either in DB or filesystem).
|
|
|
|
|
let contents = crate::db::read_content(&bug_id)
|
|
|
|
|
.or_else(|| {
|
2026-04-13 14:07:08 +00:00
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{bug_id}.md"));
|
2026-04-08 03:03:59 +00:00
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("bug content should exist");
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
assert!(
|
2026-04-27 20:17:03 +00:00
|
|
|
contents.starts_with("---\ntype: bug\nname: \"Login Crash\"\n---"),
|
|
|
|
|
"bug file must start with YAML front matter including type field"
|
2026-03-22 19:07:07 +00:00
|
|
|
);
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
|
|
|
|
contents.contains("Login Crash"),
|
|
|
|
|
"content should mention bug name"
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(contents.contains("## Description"));
|
|
|
|
|
assert!(contents.contains("The login page crashes on submit."));
|
|
|
|
|
assert!(contents.contains("## How to Reproduce"));
|
|
|
|
|
assert!(contents.contains("1. Go to /login"));
|
|
|
|
|
assert!(contents.contains("## Actual Result"));
|
|
|
|
|
assert!(contents.contains("Page crashes with 500 error"));
|
|
|
|
|
assert!(contents.contains("## Expected Result"));
|
|
|
|
|
assert!(contents.contains("Login succeeds"));
|
|
|
|
|
assert!(contents.contains("## Acceptance Criteria"));
|
|
|
|
|
assert!(contents.contains("- [ ] Login form submits without error"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_bug_file_rejects_empty_name() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-13 14:07:08 +00:00
|
|
|
let result = create_bug_file(
|
|
|
|
|
tmp.path(),
|
|
|
|
|
"!!!",
|
|
|
|
|
"desc",
|
|
|
|
|
"steps",
|
|
|
|
|
"actual",
|
|
|
|
|
"expected",
|
|
|
|
|
None,
|
2026-04-25 19:33:38 +00:00
|
|
|
None,
|
2026-04-13 14:07:08 +00:00
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(result.is_err());
|
|
|
|
|
assert!(result.unwrap_err().contains("alphanumeric"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_bug_file_uses_default_acceptance_criterion() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
setup_git_repo(tmp.path());
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
let bug_id = create_bug_file(
|
2026-03-22 19:07:07 +00:00
|
|
|
tmp.path(),
|
|
|
|
|
"Some Bug",
|
|
|
|
|
"desc",
|
|
|
|
|
"steps",
|
|
|
|
|
"actual",
|
|
|
|
|
"expected",
|
|
|
|
|
None,
|
2026-04-25 19:33:38 +00:00
|
|
|
None,
|
2026-03-22 19:07:07 +00:00
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
2026-04-27 20:17:03 +00:00
|
|
|
let contents = crate::db::read_content(&bug_id).expect("bug content should exist");
|
2026-04-08 03:03:59 +00:00
|
|
|
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(
|
2026-04-27 20:17:03 +00:00
|
|
|
contents.starts_with("---\ntype: bug\nname: \"Some Bug\"\n---"),
|
|
|
|
|
"bug file must have YAML front matter with type field"
|
2026-03-22 19:07:07 +00:00
|
|
|
);
|
|
|
|
|
assert!(contents.contains("- [ ] Bug is fixed and verified"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── create_spike_file tests ────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_spike_file_writes_correct_content() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
|
|
|
|
|
let spike_id =
|
2026-04-27 16:26:15 +00:00
|
|
|
create_spike_file(tmp.path(), "Filesystem Watcher Architecture", None, &[]).unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
2026-04-27 20:17:03 +00:00
|
|
|
spike_id.chars().all(|c| c.is_ascii_digit()),
|
|
|
|
|
"spike ID must be numeric-only, got: {spike_id}"
|
2026-04-13 14:07:08 +00:00
|
|
|
);
|
2026-04-08 03:03:59 +00:00
|
|
|
|
2026-04-27 20:17:03 +00:00
|
|
|
let contents = crate::db::read_content(&spike_id).expect("spike content should exist");
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
assert!(
|
2026-04-27 20:17:03 +00:00
|
|
|
contents
|
|
|
|
|
.starts_with("---\ntype: spike\nname: \"Filesystem Watcher Architecture\"\n---"),
|
|
|
|
|
"spike file must start with YAML front matter including type field"
|
2026-03-22 19:07:07 +00:00
|
|
|
);
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
|
|
|
|
contents.contains("Filesystem Watcher Architecture"),
|
|
|
|
|
"content should mention spike name"
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(contents.contains("## Question"));
|
|
|
|
|
assert!(contents.contains("## Hypothesis"));
|
|
|
|
|
assert!(contents.contains("## Timebox"));
|
|
|
|
|
assert!(contents.contains("## Investigation Plan"));
|
|
|
|
|
assert!(contents.contains("## Findings"));
|
|
|
|
|
assert!(contents.contains("## Recommendation"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_spike_file_uses_description_when_provided() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let description = "What is the best approach for watching filesystem events?";
|
|
|
|
|
|
2026-04-13 14:07:08 +00:00
|
|
|
let spike_id =
|
2026-04-27 16:26:15 +00:00
|
|
|
create_spike_file(tmp.path(), "FS Watcher Spike", Some(description), &[]).unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
let contents = crate::db::read_content(&spike_id)
|
|
|
|
|
.or_else(|| {
|
2026-04-13 14:07:08 +00:00
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{spike_id}.md"));
|
2026-04-08 03:03:59 +00:00
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("spike content should exist");
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(contents.contains(description));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_spike_file_uses_placeholder_when_no_description() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-27 16:26:15 +00:00
|
|
|
let spike_id = create_spike_file(tmp.path(), "My Spike", None, &[]).unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
let contents = crate::db::read_content(&spike_id)
|
|
|
|
|
.or_else(|| {
|
2026-04-13 14:07:08 +00:00
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{spike_id}.md"));
|
2026-04-08 03:03:59 +00:00
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("spike content should exist");
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(contents.contains("## Question\n\n- TBD\n"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_spike_file_rejects_empty_name() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-27 16:26:15 +00:00
|
|
|
let result = create_spike_file(tmp.path(), "!!!", None, &[]);
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(result.is_err());
|
|
|
|
|
assert!(result.unwrap_err().contains("alphanumeric"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_spike_file_with_special_chars_in_name_produces_valid_yaml() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let name = "Spike: compare \"fast\" vs slow encoders";
|
2026-04-27 16:26:15 +00:00
|
|
|
let result = create_spike_file(tmp.path(), name, None, &[]);
|
2026-03-22 19:07:07 +00:00
|
|
|
assert!(result.is_ok(), "create_spike_file failed: {result:?}");
|
|
|
|
|
|
|
|
|
|
let spike_id = result.unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
let contents = crate::db::read_content(&spike_id)
|
|
|
|
|
.or_else(|| {
|
|
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
|
|
|
|
fs::read_to_string(backlog.join(format!("{spike_id}.md"))).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("spike content should exist");
|
2026-03-22 19:07:07 +00:00
|
|
|
|
|
|
|
|
let meta = parse_front_matter(&contents).expect("front matter should be valid YAML");
|
|
|
|
|
assert_eq!(meta.name.as_deref(), Some(name));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_spike_file_increments_from_existing_items() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-10 14:56:13 +00:00
|
|
|
crate::db::ensure_content_store();
|
|
|
|
|
// Seed a high-numbered item into the CRDT so next_item_number goes beyond it.
|
|
|
|
|
crate::db::write_item_with_content(
|
|
|
|
|
"7050_story_existing",
|
|
|
|
|
"1_backlog",
|
|
|
|
|
"---\nname: Existing\n---\n",
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-27 16:26:15 +00:00
|
|
|
let spike_id = create_spike_file(tmp.path(), "My Spike", None, &[]).unwrap();
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
2026-04-27 20:17:03 +00:00
|
|
|
spike_id.chars().all(|c| c.is_ascii_digit()),
|
|
|
|
|
"spike ID must be numeric-only, got: {spike_id}"
|
2026-04-13 14:07:08 +00:00
|
|
|
);
|
2026-04-27 20:17:03 +00:00
|
|
|
let num: u32 = spike_id.parse().unwrap();
|
2026-04-13 14:07:08 +00:00
|
|
|
assert!(
|
|
|
|
|
num >= 7051,
|
|
|
|
|
"expected spike number >= 7051, got: {spike_id}"
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
2026-04-25 19:33:38 +00:00
|
|
|
|
|
|
|
|
// ── Bug 640: create_bug_file / create_refactor_file depends_on tests ────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_bug_file_with_depends_on_writes_front_matter_array() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
setup_git_repo(tmp.path());
|
|
|
|
|
|
|
|
|
|
let bug_id = create_bug_file(
|
|
|
|
|
tmp.path(),
|
|
|
|
|
"Dep Bug",
|
|
|
|
|
"desc",
|
|
|
|
|
"steps",
|
|
|
|
|
"actual",
|
|
|
|
|
"expected",
|
|
|
|
|
None,
|
|
|
|
|
Some(&[42, 43]),
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let contents = crate::db::read_content(&bug_id)
|
|
|
|
|
.or_else(|| {
|
|
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{bug_id}.md"));
|
|
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("bug content should exist");
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
contents.contains("depends_on: [42, 43]"),
|
|
|
|
|
"front matter should contain depends_on array: {contents}"
|
|
|
|
|
);
|
|
|
|
|
assert!(
|
|
|
|
|
!contents.contains("depends_on: \"["),
|
|
|
|
|
"depends_on must not be quoted string: {contents}"
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let meta = parse_front_matter(&contents).expect("front matter should be valid YAML");
|
|
|
|
|
assert_eq!(meta.depends_on, Some(vec![42, 43]));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_bug_file_without_depends_on_omits_field() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
setup_git_repo(tmp.path());
|
|
|
|
|
|
|
|
|
|
let bug_id = create_bug_file(
|
|
|
|
|
tmp.path(),
|
|
|
|
|
"No Dep Bug",
|
|
|
|
|
"desc",
|
|
|
|
|
"steps",
|
|
|
|
|
"actual",
|
|
|
|
|
"expected",
|
|
|
|
|
None,
|
|
|
|
|
None,
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let contents = crate::db::read_content(&bug_id)
|
|
|
|
|
.or_else(|| {
|
|
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{bug_id}.md"));
|
|
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("bug content should exist");
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
!contents.contains("depends_on"),
|
|
|
|
|
"front matter must not contain depends_on when not provided: {contents}"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_refactor_file_with_depends_on_writes_front_matter_array() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
setup_git_repo(tmp.path());
|
|
|
|
|
|
|
|
|
|
let refactor_id =
|
|
|
|
|
create_refactor_file(tmp.path(), "Dep Refactor", None, None, Some(&[99])).unwrap();
|
|
|
|
|
|
|
|
|
|
let contents = crate::db::read_content(&refactor_id)
|
|
|
|
|
.or_else(|| {
|
|
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{refactor_id}.md"));
|
|
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("refactor content should exist");
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
contents.contains("depends_on: [99]"),
|
|
|
|
|
"front matter should contain depends_on array: {contents}"
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let meta = parse_front_matter(&contents).expect("front matter should be valid YAML");
|
|
|
|
|
assert_eq!(meta.depends_on, Some(vec![99]));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn create_refactor_file_without_depends_on_omits_field() {
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
setup_git_repo(tmp.path());
|
|
|
|
|
|
|
|
|
|
let refactor_id =
|
|
|
|
|
create_refactor_file(tmp.path(), "No Dep Refactor", None, None, None).unwrap();
|
|
|
|
|
|
|
|
|
|
let contents = crate::db::read_content(&refactor_id)
|
|
|
|
|
.or_else(|| {
|
|
|
|
|
let filepath = tmp
|
|
|
|
|
.path()
|
|
|
|
|
.join(format!(".huskies/work/1_backlog/{refactor_id}.md"));
|
|
|
|
|
fs::read_to_string(filepath).ok()
|
|
|
|
|
})
|
|
|
|
|
.expect("refactor content should exist");
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
!contents.contains("depends_on"),
|
|
|
|
|
"front matter must not contain depends_on when not provided: {contents}"
|
|
|
|
|
);
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|