huskies: merge 530_story_eliminate_filesystem_markdown_shadows_entirely_crdt_db_is_the_only_story_store
This commit is contained in:
+4
-88
@@ -73,6 +73,10 @@ pub fn delete_content(story_id: &str) {
|
||||
/// Safe to call multiple times — the `OnceLock` is set at most once.
|
||||
pub fn ensure_content_store() {
|
||||
let _ = CONTENT_STORE.set(Mutex::new(HashMap::new()));
|
||||
// In tests, also initialise the in-memory CRDT state so that
|
||||
// write_item_with_content() and read_all_typed() work without async SQLite.
|
||||
#[cfg(test)]
|
||||
crate::crdt_state::init_for_test();
|
||||
}
|
||||
|
||||
/// Return all story IDs present in the content store.
|
||||
@@ -333,73 +337,6 @@ pub fn next_item_number() -> u32 {
|
||||
max_num + 1
|
||||
}
|
||||
|
||||
// ── Filesystem migration ────────────────────────────────────────────
|
||||
|
||||
/// Import stories from `.huskies/work/` stage directories into the database.
|
||||
///
|
||||
/// For each `.md` file found in any stage directory, if it's not already in
|
||||
/// the content store, reads the file, stores it in the DB, and writes the
|
||||
/// CRDT state. After importing, renames the stage directories to
|
||||
/// `.huskies/work_archived/` so they are no longer used.
|
||||
pub fn import_from_filesystem(project_root: &Path) {
|
||||
let work_dir = project_root.join(".huskies").join("work");
|
||||
if !work_dir.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
let stages = [
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
];
|
||||
|
||||
let mut imported = 0u32;
|
||||
for stage in &stages {
|
||||
let stage_dir = work_dir.join(stage);
|
||||
if !stage_dir.exists() {
|
||||
continue;
|
||||
}
|
||||
let entries = match std::fs::read_dir(&stage_dir) {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
};
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
|
||||
continue;
|
||||
}
|
||||
let story_id = match path.file_stem().and_then(|s| s.to_str()) {
|
||||
Some(s) => s.to_string(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Skip if already in the content store.
|
||||
if read_content(&story_id).is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let content = match std::fs::read_to_string(&path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
write_item_with_content(&story_id, stage, &content);
|
||||
imported += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if imported > 0 {
|
||||
slog!("[db] Imported {imported} stories from filesystem into database");
|
||||
}
|
||||
|
||||
// Note: .huskies/work/ directories are kept in place during the migration
|
||||
// period to provide filesystem fallback for any code paths not yet fully
|
||||
// migrated to the DB content store. A future story will archive them once
|
||||
// all consumers are converted.
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
@@ -645,25 +582,4 @@ mod tests {
|
||||
assert!(n >= 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from_filesystem_imports_stories() {
|
||||
ensure_content_store();
|
||||
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let backlog = root.join(".huskies/work/1_backlog");
|
||||
let current = root.join(".huskies/work/2_current");
|
||||
fs::create_dir_all(&backlog).unwrap();
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
|
||||
let content1 = "---\nname: Story One\n---\n# Story 1\n";
|
||||
let content2 = "---\nname: Story Two\n---\n# Story 2\n";
|
||||
fs::write(backlog.join("10_story_one.md"), content1).unwrap();
|
||||
fs::write(current.join("20_story_two.md"), content2).unwrap();
|
||||
|
||||
import_from_filesystem(root);
|
||||
|
||||
assert_eq!(read_content("10_story_one").as_deref(), Some(content1));
|
||||
assert_eq!(read_content("20_story_two").as_deref(), Some(content2));
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user