huskies: merge 530_story_eliminate_filesystem_markdown_shadows_entirely_crdt_db_is_the_only_story_store

This commit is contained in:
dave
2026-04-10 14:56:13 +00:00
parent 1dd675796b
commit 11d19d8902
26 changed files with 966 additions and 1668 deletions
+62 -127
View File
@@ -1,8 +1,7 @@
use crate::io::story_metadata::parse_front_matter;
use std::fs;
use std::path::Path;
use super::{next_item_number, slugify_name, write_story_content_with_fs};
use super::{next_item_number, slugify_name, write_story_content};
/// Create a bug file and store it in the database.
///
@@ -52,14 +51,8 @@ pub fn create_bug_file(
content.push_str("- [ ] Bug is fixed and verified\n");
}
// Write to database content store.
write_story_content_with_fs(root, &bug_id, "1_backlog", &content);
// Also write to filesystem for backwards compatibility.
let bugs_dir = root.join(".huskies").join("work").join("1_backlog");
if let Ok(()) = fs::create_dir_all(&bugs_dir) {
let _ = fs::write(bugs_dir.join(format!("{bug_id}.md")), &content);
}
// Write to database content store and CRDT.
write_story_content(root, &bug_id, "1_backlog", &content);
Ok(bug_id)
}
@@ -105,14 +98,8 @@ pub fn create_spike_file(
content.push_str("## Recommendation\n\n");
content.push_str("- TBD\n");
// Write to database content store.
write_story_content_with_fs(root, &spike_id, "1_backlog", &content);
// Also write to filesystem for backwards compatibility.
let backlog_dir = root.join(".huskies").join("work").join("1_backlog");
if let Ok(()) = fs::create_dir_all(&backlog_dir) {
let _ = fs::write(backlog_dir.join(format!("{spike_id}.md")), &content);
}
// Write to database content store and CRDT.
write_story_content(root, &spike_id, "1_backlog", &content);
Ok(spike_id)
}
@@ -162,14 +149,8 @@ pub fn create_refactor_file(
content.push_str("## Out of Scope\n\n");
content.push_str("- TBD\n");
// Write to database content store.
write_story_content_with_fs(root, &refactor_id, "1_backlog", &content);
// Also write to filesystem for backwards compatibility.
let backlog_dir = root.join(".huskies").join("work").join("1_backlog");
if let Ok(()) = fs::create_dir_all(&backlog_dir) {
let _ = fs::write(backlog_dir.join(format!("{refactor_id}.md")), &content);
}
// Write to database content store and CRDT.
write_story_content(root, &refactor_id, "1_backlog", &content);
Ok(refactor_id)
}
@@ -195,14 +176,12 @@ fn extract_bug_name_from_content(content: &str) -> Option<String> {
None
}
/// List all open bugs from CRDT + content store, falling back to filesystem.
/// List all open bugs from CRDT + content store.
///
/// Returns a sorted list of `(bug_id, name)` pairs.
pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
pub fn list_bug_files(_root: &Path) -> Result<Vec<(String, String)>, String> {
let mut bugs = Vec::new();
let mut seen = std::collections::HashSet::new();
// First: typed projection items in backlog that are bugs.
for item in crate::pipeline_state::read_all_typed() {
if !matches!(item.stage, crate::pipeline_state::Stage::Backlog) || !is_bug_item(&item.story_id.0) {
continue;
@@ -214,41 +193,9 @@ pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
.and_then(|c| extract_bug_name_from_content(&c))
})
.unwrap_or_else(|| sid.clone());
seen.insert(sid.clone());
bugs.push((sid, name));
}
// Then: filesystem fallback.
let backlog_dir = root.join(".huskies").join("work").join("1_backlog");
if backlog_dir.exists() {
for entry in
fs::read_dir(&backlog_dir).map_err(|e| format!("Failed to read backlog directory: {e}"))?
{
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
let path = entry.path();
if path.is_dir() || path.extension().and_then(|ext| ext.to_str()) != Some("md") {
continue;
}
let stem = path
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| "Invalid file name.".to_string())?;
if !is_bug_item(stem) || seen.contains(stem) {
continue;
}
let bug_id = stem.to_string();
let name = fs::read_to_string(&path)
.ok()
.and_then(|c| extract_bug_name_from_content(&c))
.unwrap_or_else(|| bug_id.clone());
bugs.push((bug_id, name));
}
}
bugs.sort_by(|a, b| a.0.cmp(&b.0));
Ok(bugs)
}
@@ -259,14 +206,12 @@ fn is_refactor_item(stem: &str) -> bool {
after_num.starts_with("_refactor_")
}
/// List all open refactors from CRDT + content store, falling back to filesystem.
/// List all open refactors from CRDT + content store.
///
/// Returns a sorted list of `(refactor_id, name)` pairs.
pub fn list_refactor_files(root: &Path) -> Result<Vec<(String, String)>, String> {
pub fn list_refactor_files(_root: &Path) -> Result<Vec<(String, String)>, String> {
let mut refactors = Vec::new();
let mut seen = std::collections::HashSet::new();
// First: typed projection items.
for item in crate::pipeline_state::read_all_typed() {
if !matches!(item.stage, crate::pipeline_state::Stage::Backlog) || !is_refactor_item(&item.story_id.0) {
continue;
@@ -279,42 +224,9 @@ pub fn list_refactor_files(root: &Path) -> Result<Vec<(String, String)>, String>
.and_then(|m| m.name)
})
.unwrap_or_else(|| sid.clone());
seen.insert(sid.clone());
refactors.push((sid, name));
}
// Then: filesystem fallback.
let backlog_dir = root.join(".huskies").join("work").join("1_backlog");
if backlog_dir.exists() {
for entry in fs::read_dir(&backlog_dir)
.map_err(|e| format!("Failed to read backlog directory: {e}"))?
{
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
let path = entry.path();
if path.is_dir() || path.extension().and_then(|ext| ext.to_str()) != Some("md") {
continue;
}
let stem = path
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| "Invalid file name.".to_string())?;
if !is_refactor_item(stem) || seen.contains(stem) {
continue;
}
let refactor_id = stem.to_string();
let name = fs::read_to_string(&path)
.ok()
.and_then(|contents| parse_front_matter(&contents).ok())
.and_then(|m| m.name)
.unwrap_or_else(|| refactor_id.clone());
refactors.push((refactor_id, name));
}
}
refactors.sort_by(|a, b| a.0.cmp(&b.0));
Ok(refactors)
}
@@ -322,6 +234,7 @@ pub fn list_refactor_files(root: &Path) -> Result<Vec<(String, String)>, String>
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
fn setup_git_repo(root: &std::path::Path) {
std::process::Command::new("git")
@@ -376,42 +289,63 @@ mod tests {
}
#[test]
fn list_bug_files_empty_when_no_bugs_dir() {
fn list_bug_files_no_crash_on_missing_dir() {
// list_bug_files now reads from the global CRDT, not the filesystem.
// Verify it does not panic when called with a non-existent project root.
let tmp = tempfile::tempdir().unwrap();
let result = list_bug_files(tmp.path()).unwrap();
assert!(result.is_empty());
let result = list_bug_files(tmp.path());
assert!(result.is_ok());
}
#[test]
fn list_bug_files_excludes_archive_subdir() {
let tmp = tempfile::tempdir().unwrap();
let backlog_dir = tmp.path().join(".huskies/work/1_backlog");
let archived_dir = tmp.path().join(".huskies/work/5_done");
fs::create_dir_all(&backlog_dir).unwrap();
fs::create_dir_all(&archived_dir).unwrap();
fs::write(backlog_dir.join("1_bug_open.md"), "# Bug 1: Open Bug\n").unwrap();
fs::write(archived_dir.join("2_bug_closed.md"), "# Bug 2: Closed Bug\n").unwrap();
crate::db::ensure_content_store();
// Bug in backlog (should appear).
crate::db::write_item_with_content(
"7001_bug_open",
"1_backlog",
"---\nname: Open Bug\n---\n# Bug 7001: Open Bug\n",
);
// Bug in done (should NOT appear — list_bug_files only returns Backlog).
crate::db::write_item_with_content(
"7002_bug_closed",
"5_done",
"---\nname: Closed Bug\n---\n# Bug 7002: Closed Bug\n",
);
let result = list_bug_files(tmp.path()).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].0, "1_bug_open");
assert_eq!(result[0].1, "Open Bug");
assert!(result.iter().any(|(id, name)| id == "7001_bug_open" && name == "Open Bug"));
assert!(!result.iter().any(|(id, _)| id == "7002_bug_closed"));
}
#[test]
fn list_bug_files_sorted_by_id() {
let tmp = tempfile::tempdir().unwrap();
let backlog_dir = tmp.path().join(".huskies/work/1_backlog");
fs::create_dir_all(&backlog_dir).unwrap();
fs::write(backlog_dir.join("3_bug_third.md"), "# Bug 3: Third\n").unwrap();
fs::write(backlog_dir.join("1_bug_first.md"), "# Bug 1: First\n").unwrap();
fs::write(backlog_dir.join("2_bug_second.md"), "# Bug 2: Second\n").unwrap();
crate::db::ensure_content_store();
crate::db::write_item_with_content(
"7013_bug_third",
"1_backlog",
"---\nname: Third\n---\n# Bug 7013: Third\n",
);
crate::db::write_item_with_content(
"7011_bug_first",
"1_backlog",
"---\nname: First\n---\n# Bug 7011: First\n",
);
crate::db::write_item_with_content(
"7012_bug_second",
"1_backlog",
"---\nname: Second\n---\n# Bug 7012: Second\n",
);
let result = list_bug_files(tmp.path()).unwrap();
assert_eq!(result.len(), 3);
assert_eq!(result[0].0, "1_bug_first");
assert_eq!(result[1].0, "2_bug_second");
assert_eq!(result[2].0, "3_bug_third");
// Find positions of our three bugs in the sorted result.
let pos_first = result.iter().position(|(id, _)| id == "7011_bug_first").unwrap();
let pos_second = result.iter().position(|(id, _)| id == "7012_bug_second").unwrap();
let pos_third = result.iter().position(|(id, _)| id == "7013_bug_third").unwrap();
assert!(pos_first < pos_second);
assert!(pos_second < pos_third);
}
#[test]
@@ -593,16 +527,17 @@ mod tests {
#[test]
fn create_spike_file_increments_from_existing_items() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".huskies/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join("5_story_existing.md"), "").unwrap();
crate::db::ensure_content_store();
// Seed a high-numbered item into the CRDT so next_item_number goes beyond it.
crate::db::write_item_with_content(
"7050_story_existing",
"1_backlog",
"---\nname: Existing\n---\n",
);
let spike_id = create_spike_file(tmp.path(), "My Spike", None).unwrap();
// The spike number must be > 5 (the highest filesystem item) but the global
// content store may have higher-numbered items from parallel tests, so we
// only assert the suffix and that the prefix is a number >= 6.
assert!(spike_id.ends_with("_spike_my_spike"), "expected ID to end with _spike_my_spike, got: {spike_id}");
let num: u32 = spike_id.chars().take_while(|c| c.is_ascii_digit()).collect::<String>().parse().unwrap();
assert!(num >= 6, "expected spike number >= 6, got: {spike_id}");
assert!(num >= 7051, "expected spike number >= 7051, got: {spike_id}");
}
}