rename .story_kit directory to .storkit and update all references

Renames the config directory and updates 514 references across 42 Rust
source files, plus CLAUDE.md, .gitignore, Makefile, script/release,
and .mcp.json files. All 1205 tests pass.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Dave
2026-03-20 11:34:53 +00:00
parent 375277f86e
commit 9581e5d51a
406 changed files with 531 additions and 530 deletions

View File

@@ -17,7 +17,7 @@ pub struct LogEntry {
/// Writes agent events to a persistent log file (JSONL format).
///
/// Each agent session gets its own log file at:
/// `.story_kit/logs/{story_id}/{agent_name}-{session_id}.log`
/// `.storkit/logs/{story_id}/{agent_name}-{session_id}.log`
pub struct AgentLogWriter {
file: File,
}
@@ -72,7 +72,7 @@ impl AgentLogWriter {
/// Return the log directory for a story.
fn log_dir(project_root: &Path, story_id: &str) -> PathBuf {
project_root
.join(".story_kit")
.join(".storkit")
.join("logs")
.join(story_id)
}
@@ -110,7 +110,7 @@ pub fn read_log(path: &Path) -> Result<Vec<LogEntry>, String> {
/// Find the most recent log file for a given story+agent combination.
///
/// Scans `.story_kit/logs/{story_id}/` for files matching `{agent_name}-*.log`
/// Scans `.storkit/logs/{story_id}/` for files matching `{agent_name}-*.log`
/// and returns the one with the most recent modification time.
pub fn find_latest_log(
project_root: &Path,
@@ -162,7 +162,7 @@ mod tests {
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-abc123").unwrap();
let expected_path = root
.join(".story_kit")
.join(".storkit")
.join("logs")
.join("42_story_foo")
.join("coder-1-sess-abc123.log");

View File

@@ -18,12 +18,12 @@ pub(super) fn item_type_from_id(item_id: &str) -> &'static str {
/// Return the source directory path for a work item (always work/1_backlog/).
fn item_source_dir(project_root: &Path, _item_id: &str) -> PathBuf {
project_root.join(".story_kit").join("work").join("1_backlog")
project_root.join(".storkit").join("work").join("1_backlog")
}
/// Return the done directory path for a work item (always work/5_done/).
fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf {
project_root.join(".story_kit").join("work").join("5_done")
project_root.join(".storkit").join("work").join("5_done")
}
/// Move a work item (story, bug, or spike) from `work/1_backlog/` to `work/2_current/`.
@@ -31,7 +31,7 @@ fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf {
/// Idempotent: if the item is already in `2_current/`, returns Ok without committing.
/// If the item is not found in `1_backlog/`, logs a warning and returns Ok.
pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let current_dir = sk.join("2_current");
let current_path = current_dir.join(format!("{story_id}.md"));
@@ -103,7 +103,7 @@ pub fn feature_branch_has_unmerged_changes(project_root: &Path, story_id: &str)
/// * If the story is already in `5_done/` or `6_archived/`, this is a no-op (idempotent).
/// * If the story is not found in `2_current/`, `4_merge/`, `5_done/`, or `6_archived/`, an error is returned.
pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
let merge_path = sk.join("4_merge").join(format!("{story_id}.md"));
let done_dir = sk.join("5_done");
@@ -153,7 +153,7 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
/// This stages a work item as ready for the mergemaster to pick up and merge into master.
/// Idempotent: if already in `4_merge/`, returns Ok without committing.
pub fn move_story_to_merge(project_root: &Path, story_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
let qa_path = sk.join("3_qa").join(format!("{story_id}.md"));
let merge_dir = sk.join("4_merge");
@@ -203,7 +203,7 @@ pub fn move_story_to_merge(project_root: &Path, story_id: &str) -> Result<(), St
/// This stages a work item for QA review before merging to master.
/// Idempotent: if already in `3_qa/`, returns Ok without committing.
pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
let qa_dir = sk.join("3_qa");
let qa_path = qa_dir.join(format!("{story_id}.md"));
@@ -246,7 +246,7 @@ pub fn reject_story_from_qa(
story_id: &str,
notes: &str,
) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let qa_path = sk.join("3_qa").join(format!("{story_id}.md"));
let current_dir = sk.join("2_current");
let current_path = current_dir.join(format!("{story_id}.md"));
@@ -311,7 +311,7 @@ pub fn move_story_to_stage(
)
})?;
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let target_dir = sk.join(target_dir_name);
let target_path = target_dir.join(format!("{story_id}.md"));
@@ -362,7 +362,7 @@ pub fn move_story_to_stage(
/// * If the bug is already in `5_done/`, this is a no-op (idempotent).
/// * If the bug is not found anywhere, an error is returned.
pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> {
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let current_path = sk.join("2_current").join(format!("{bug_id}.md"));
let backlog_path = sk.join("1_backlog").join(format!("{bug_id}.md"));
let archive_dir = item_archive_dir(project_root, bug_id);
@@ -405,8 +405,8 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let backlog = root.join(".story_kit/work/1_backlog");
let current = root.join(".story_kit/work/2_current");
let backlog = root.join(".storkit/work/1_backlog");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&backlog).unwrap();
fs::create_dir_all(&current).unwrap();
fs::write(backlog.join("10_story_foo.md"), "test").unwrap();
@@ -422,7 +422,7 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("11_story_foo.md"), "test").unwrap();
@@ -441,8 +441,8 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let backlog = root.join(".story_kit/work/1_backlog");
let current = root.join(".story_kit/work/2_current");
let backlog = root.join(".storkit/work/1_backlog");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&backlog).unwrap();
fs::create_dir_all(&current).unwrap();
fs::write(backlog.join("1_bug_test.md"), "# Bug 1\n").unwrap();
@@ -460,14 +460,14 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("2_bug_test.md"), "# Bug 2\n").unwrap();
close_bug_to_archive(root, "2_bug_test").unwrap();
assert!(!current.join("2_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_done/2_bug_test.md").exists());
assert!(root.join(".storkit/work/5_done/2_bug_test.md").exists());
}
#[test]
@@ -475,14 +475,14 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let backlog = root.join(".story_kit/work/1_backlog");
let backlog = root.join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join("3_bug_test.md"), "# Bug 3\n").unwrap();
close_bug_to_archive(root, "3_bug_test").unwrap();
assert!(!backlog.join("3_bug_test.md").exists());
assert!(root.join(".story_kit/work/5_done/3_bug_test.md").exists());
assert!(root.join(".storkit/work/5_done/3_bug_test.md").exists());
}
// ── item_type_from_id tests ────────────────────────────────────────────────
@@ -502,14 +502,14 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("20_story_foo.md"), "test").unwrap();
move_story_to_merge(root, "20_story_foo").unwrap();
assert!(!current.join("20_story_foo.md").exists());
assert!(root.join(".story_kit/work/4_merge/20_story_foo.md").exists());
assert!(root.join(".storkit/work/4_merge/20_story_foo.md").exists());
}
#[test]
@@ -517,14 +517,14 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let qa_dir = root.join(".story_kit/work/3_qa");
let qa_dir = root.join(".storkit/work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();
fs::write(qa_dir.join("40_story_test.md"), "test").unwrap();
move_story_to_merge(root, "40_story_test").unwrap();
assert!(!qa_dir.join("40_story_test.md").exists());
assert!(root.join(".story_kit/work/4_merge/40_story_test.md").exists());
assert!(root.join(".storkit/work/4_merge/40_story_test.md").exists());
}
#[test]
@@ -532,7 +532,7 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let merge_dir = root.join(".story_kit/work/4_merge");
let merge_dir = root.join(".storkit/work/4_merge");
fs::create_dir_all(&merge_dir).unwrap();
fs::write(merge_dir.join("21_story_test.md"), "test").unwrap();
@@ -554,14 +554,14 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("30_story_qa.md"), "test").unwrap();
move_story_to_qa(root, "30_story_qa").unwrap();
assert!(!current.join("30_story_qa.md").exists());
assert!(root.join(".story_kit/work/3_qa/30_story_qa.md").exists());
assert!(root.join(".storkit/work/3_qa/30_story_qa.md").exists());
}
#[test]
@@ -569,7 +569,7 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let qa_dir = root.join(".story_kit/work/3_qa");
let qa_dir = root.join(".storkit/work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();
fs::write(qa_dir.join("31_story_test.md"), "test").unwrap();
@@ -591,14 +591,14 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let merge_dir = root.join(".story_kit/work/4_merge");
let merge_dir = root.join(".storkit/work/4_merge");
fs::create_dir_all(&merge_dir).unwrap();
fs::write(merge_dir.join("22_story_test.md"), "test").unwrap();
move_story_to_archived(root, "22_story_test").unwrap();
assert!(!merge_dir.join("22_story_test.md").exists());
assert!(root.join(".story_kit/work/5_done/22_story_test.md").exists());
assert!(root.join(".storkit/work/5_done/22_story_test.md").exists());
}
#[test]
@@ -696,8 +696,8 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let qa_dir = root.join(".story_kit/work/3_qa");
let current_dir = root.join(".story_kit/work/2_current");
let qa_dir = root.join(".storkit/work/3_qa");
let current_dir = root.join(".storkit/work/2_current");
fs::create_dir_all(&qa_dir).unwrap();
fs::create_dir_all(&current_dir).unwrap();
fs::write(
@@ -728,7 +728,7 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current_dir = root.join(".story_kit/work/2_current");
let current_dir = root.join(".storkit/work/2_current");
fs::create_dir_all(&current_dir).unwrap();
fs::write(current_dir.join("51_story_test.md"), "---\nname: Test\n---\n# Story\n").unwrap();
@@ -743,8 +743,8 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let backlog = root.join(".story_kit/work/1_backlog");
let current = root.join(".story_kit/work/2_current");
let backlog = root.join(".storkit/work/1_backlog");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&backlog).unwrap();
fs::create_dir_all(&current).unwrap();
fs::write(backlog.join("60_story_move.md"), "test").unwrap();
@@ -762,8 +762,8 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let backlog = root.join(".story_kit/work/1_backlog");
let current = root.join(".storkit/work/2_current");
let backlog = root.join(".storkit/work/1_backlog");
fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&backlog).unwrap();
fs::write(current.join("61_story_back.md"), "test").unwrap();
@@ -781,7 +781,7 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("62_story_idem.md"), "test").unwrap();
@@ -813,8 +813,8 @@ mod tests {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let qa_dir = root.join(".story_kit/work/3_qa");
let backlog = root.join(".story_kit/work/1_backlog");
let qa_dir = root.join(".storkit/work/3_qa");
let backlog = root.join(".storkit/work/1_backlog");
fs::create_dir_all(&qa_dir).unwrap();
fs::create_dir_all(&backlog).unwrap();
fs::write(qa_dir.join("63_story_qa.md"), "test").unwrap();

View File

@@ -10,7 +10,7 @@ use super::gates::run_project_tests;
/// Global lock ensuring only one squash-merge runs at a time.
///
/// The merge pipeline uses a shared `.story_kit/merge_workspace` directory and
/// The merge pipeline uses a shared `.storkit/merge_workspace` directory and
/// temporary `merge-queue/{story_id}` branches. If two merges run concurrently,
/// the second call's initial cleanup destroys the first call's branch mid-flight,
/// causing `git cherry-pick merge-queue/…` to fail with "bad revision".
@@ -89,7 +89,7 @@ pub(crate) fn run_squash_merge(
let mut all_output = String::new();
let merge_branch = format!("merge-queue/{story_id}");
let merge_wt_path = project_root
.join(".story_kit")
.join(".storkit")
.join("merge_workspace");
// Ensure we start clean: remove any leftover merge workspace.
@@ -250,7 +250,7 @@ pub(crate) fn run_squash_merge(
}
// ── Bug 226: Verify the commit contains real code changes ─────
// If the merge only brought in .story_kit/ files (pipeline file moves),
// If the merge only brought in .storkit/ files (pipeline file moves),
// there are no actual code changes to land on master. Abort.
{
let diff_check = Command::new("git")
@@ -261,10 +261,10 @@ pub(crate) fn run_squash_merge(
let changed_files = String::from_utf8_lossy(&diff_check.stdout);
let has_code_changes = changed_files
.lines()
.any(|f| !f.starts_with(".story_kit/"));
.any(|f| !f.starts_with(".storkit/"));
if !has_code_changes {
all_output.push_str(
"=== Merge commit contains only .story_kit/ file moves, no code changes ===\n",
"=== Merge commit contains only .storkit/ file moves, no code changes ===\n",
);
cleanup_merge_workspace(project_root, &merge_wt_path, &merge_branch);
return Ok(SquashMergeResult {
@@ -272,7 +272,7 @@ pub(crate) fn run_squash_merge(
had_conflicts,
conflicts_resolved,
conflict_details: Some(
"Feature branch has no code changes outside .story_kit/ — only \
"Feature branch has no code changes outside .storkit/ — only \
pipeline file moves were found."
.to_string(),
),
@@ -960,7 +960,7 @@ after\n";
// Verify no leftover merge workspace directory.
assert!(
!repo.join(".story_kit/merge_workspace").exists(),
!repo.join(".storkit/merge_workspace").exists(),
"merge workspace should be cleaned up"
);
}
@@ -1076,7 +1076,7 @@ after\n";
.current_dir(repo)
.output()
.unwrap();
let sk_dir = repo.join(".story_kit/work/4_merge");
let sk_dir = repo.join(".storkit/work/4_merge");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("diverge_test.md"),
@@ -1135,7 +1135,7 @@ after\n";
"merge-queue branch should be cleaned up, got: {branch_list}"
);
assert!(
!repo.join(".story_kit/merge_workspace").exists(),
!repo.join(".storkit/merge_workspace").exists(),
"merge workspace should be cleaned up"
);
}
@@ -1188,13 +1188,13 @@ after\n";
// Cleanup should still happen.
assert!(
!repo.join(".story_kit/merge_workspace").exists(),
!repo.join(".storkit/merge_workspace").exists(),
"merge workspace should be cleaned up"
);
}
/// Bug 226: Verifies that `run_squash_merge` fails when the feature branch
/// only contains .story_kit/ file moves with no real code changes.
/// only contains .storkit/ file moves with no real code changes.
#[tokio::test]
async fn squash_merge_md_only_changes_fails() {
use std::fs;
@@ -1204,13 +1204,13 @@ after\n";
let repo = tmp.path();
init_git_repo(repo);
// Create a feature branch that only moves a .story_kit/ file.
// Create a feature branch that only moves a .storkit/ file.
Command::new("git")
.args(["checkout", "-b", "feature/story-md_only_test"])
.current_dir(repo)
.output()
.unwrap();
let sk_dir = repo.join(".story_kit/work/2_current");
let sk_dir = repo.join(".storkit/work/2_current");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("md_only_test.md"),
@@ -1236,17 +1236,17 @@ after\n";
let result =
run_squash_merge(repo, "feature/story-md_only_test", "md_only_test").unwrap();
// The squash merge will commit the .story_kit/ file, but should fail because
// there are no code changes outside .story_kit/.
// The squash merge will commit the .storkit/ file, but should fail because
// there are no code changes outside .storkit/.
assert!(
!result.success,
"merge with only .story_kit/ changes must fail: {}",
"merge with only .storkit/ changes must fail: {}",
result.output
);
// Cleanup should still happen.
assert!(
!repo.join(".story_kit/merge_workspace").exists(),
!repo.join(".storkit/merge_workspace").exists(),
"merge workspace should be cleaned up"
);
}
@@ -1359,7 +1359,7 @@ after\n";
"merge-queue branch must be cleaned up"
);
assert!(
!repo.join(".story_kit/merge_workspace").exists(),
!repo.join(".storkit/merge_workspace").exists(),
"merge workspace must be cleaned up"
);
}
@@ -1463,7 +1463,7 @@ after\n";
// Cleanup must still happen.
assert!(
!repo.join(".story_kit/merge_workspace").exists(),
!repo.join(".storkit/merge_workspace").exists(),
"merge workspace must be cleaned up even on gate failure"
);
}
@@ -1503,7 +1503,7 @@ after\n";
.unwrap();
// Simulate a stale merge workspace left from a previous failed merge.
let stale_ws = repo.join(".story_kit/merge_workspace");
let stale_ws = repo.join(".storkit/merge_workspace");
fs::create_dir_all(&stale_ws).unwrap();
fs::write(stale_ws.join("leftover.txt"), "stale").unwrap();
@@ -1524,7 +1524,7 @@ after\n";
// ── story 216: merge worktree uses project.toml component setup ───────────
/// When the project has `[[component]]` entries in `.story_kit/project.toml`,
/// When the project has `[[component]]` entries in `.storkit/project.toml`,
/// `run_squash_merge` must run their setup commands in the merge worktree
/// before quality gates — matching the behaviour of `create_worktree`.
#[cfg(unix)]
@@ -1537,9 +1537,9 @@ after\n";
let repo = tmp.path();
init_git_repo(repo);
// Add a .story_kit/project.toml with a component whose setup writes a
// Add a .storkit/project.toml with a component whose setup writes a
// sentinel file so we can confirm the command ran.
let sk_dir = repo.join(".story_kit");
let sk_dir = repo.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1612,7 +1612,7 @@ after\n";
let repo = tmp.path();
init_git_repo(repo);
// No .story_kit/project.toml — no component setup.
// No .storkit/project.toml — no component setup.
fs::write(repo.join("file.txt"), "initial").unwrap();
Command::new("git")
.args(["add", "."])

View File

@@ -72,7 +72,7 @@ impl AgentPool {
on feature branch. Writing merge_failure and blocking."
);
let story_path = project_root
.join(".story_kit/work")
.join(".storkit/work")
.join(stage_dir)
.join(format!("{story_id}.md"));
let _ = crate::io::story_metadata::write_merge_failure(
@@ -209,7 +209,7 @@ impl AgentPool {
/// (called immediately after) picks up the right next-stage agents.
///
/// Algorithm:
/// 1. List all worktree directories under `{project_root}/.story_kit/worktrees/`.
/// 1. List all worktree directories under `{project_root}/.storkit/worktrees/`.
/// 2. For each worktree, check whether its feature branch has commits ahead of the
/// base branch (`master` / `main`).
/// 3. If committed work is found AND the story is in `2_current/` or `3_qa/`:
@@ -344,7 +344,7 @@ impl AgentPool {
.unwrap_or_default()
.default_qa_mode();
let story_path = project_root
.join(".story_kit/work/2_current")
.join(".storkit/work/2_current")
.join(format!("{story_id}.md"));
crate::io::story_metadata::resolve_qa_mode(&story_path, default_qa)
}
@@ -395,7 +395,7 @@ impl AgentPool {
});
} else {
let story_path = project_root
.join(".story_kit/work/3_qa")
.join(".storkit/work/3_qa")
.join(format!("{story_id}.md"));
if let Err(e) = crate::io::story_metadata::write_review_hold(&story_path) {
eprintln!(
@@ -451,7 +451,7 @@ impl AgentPool {
true
} else {
let story_path = project_root
.join(".story_kit/work/3_qa")
.join(".storkit/work/3_qa")
.join(format!("{story_id}.md"));
let default_qa = crate::config::ProjectConfig::load(project_root)
.unwrap_or_default()
@@ -465,7 +465,7 @@ impl AgentPool {
if needs_human_review {
let story_path = project_root
.join(".story_kit/work/3_qa")
.join(".storkit/work/3_qa")
.join(format!("{story_id}.md"));
if let Err(e) = crate::io::story_metadata::write_review_hold(&story_path) {
eprintln!(
@@ -567,7 +567,7 @@ fn read_story_front_matter_agent(
) -> Option<String> {
use crate::io::story_metadata::parse_front_matter;
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage_dir)
.join(format!("{story_id}.md"));
@@ -579,7 +579,7 @@ fn read_story_front_matter_agent(
fn has_review_hold(project_root: &Path, stage_dir: &str, story_id: &str) -> bool {
use crate::io::story_metadata::parse_front_matter;
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage_dir)
.join(format!("{story_id}.md"));
@@ -597,7 +597,7 @@ fn has_review_hold(project_root: &Path, stage_dir: &str, story_id: &str) -> bool
fn is_story_blocked(project_root: &Path, stage_dir: &str, story_id: &str) -> bool {
use crate::io::story_metadata::parse_front_matter;
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage_dir)
.join(format!("{story_id}.md"));
@@ -615,7 +615,7 @@ fn is_story_blocked(project_root: &Path, stage_dir: &str, story_id: &str) -> boo
fn has_merge_failure(project_root: &Path, stage_dir: &str, story_id: &str) -> bool {
use crate::io::story_metadata::parse_front_matter;
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage_dir)
.join(format!("{story_id}.md"));
@@ -638,7 +638,7 @@ pub(super) fn is_agent_free(agents: &HashMap<String, StoryAgent>, agent_name: &s
}
fn scan_stage_items(project_root: &Path, stage_dir: &str) -> Vec<String> {
let dir = project_root.join(".story_kit").join("work").join(stage_dir);
let dir = project_root.join(".storkit").join("work").join(stage_dir);
if !dir.is_dir() {
return Vec::new();
}
@@ -875,7 +875,7 @@ mod tests {
fn scan_stage_items_returns_sorted_story_ids() {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let stage_dir = tmp.path().join(".story_kit").join("work").join("2_current");
let stage_dir = tmp.path().join(".storkit").join("work").join("2_current");
fs::create_dir_all(&stage_dir).unwrap();
fs::write(stage_dir.join("42_story_foo.md"), "---\nname: foo\n---").unwrap();
fs::write(stage_dir.join("10_story_bar.md"), "---\nname: bar\n---").unwrap();
@@ -1199,7 +1199,7 @@ stage = "coder"
#[tokio::test]
async fn auto_assign_picks_up_story_queued_in_current() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
let current = sk.join("work/2_current");
std::fs::create_dir_all(&current).unwrap();
std::fs::write(
@@ -1238,7 +1238,7 @@ stage = "coder"
let root = tmp.path();
// Create project.toml with a QA agent.
let sk = root.join(".story_kit");
let sk = root.join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),
@@ -1247,7 +1247,7 @@ stage = "coder"
.unwrap();
// Put a spike in 3_qa/ with review_hold: true.
let qa_dir = root.join(".story_kit/work/3_qa");
let qa_dir = root.join(".storkit/work/3_qa");
std::fs::create_dir_all(&qa_dir).unwrap();
std::fs::write(
qa_dir.join("20_spike_test.md"),
@@ -1276,7 +1276,7 @@ stage = "coder"
#[tokio::test]
async fn auto_assign_ignores_coder_preference_when_story_is_in_qa_stage() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
let qa_dir = sk.join("work/3_qa");
std::fs::create_dir_all(&qa_dir).unwrap();
std::fs::write(
@@ -1323,7 +1323,7 @@ stage = "coder"
#[tokio::test]
async fn auto_assign_respects_coder_preference_when_story_is_in_current_stage() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
let current_dir = sk.join("work/2_current");
std::fs::create_dir_all(&current_dir).unwrap();
std::fs::write(
@@ -1370,7 +1370,7 @@ stage = "coder"
#[tokio::test]
async fn auto_assign_stage_mismatch_with_no_fallback_starts_no_agent() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
let qa_dir = sk.join("work/3_qa");
std::fs::create_dir_all(&qa_dir).unwrap();
// Only a coder agent is configured — no QA agent exists.
@@ -1409,7 +1409,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
// Two stories waiting in 2_current, one coder agent.
fs::create_dir_all(sk_dir.join("work/2_current")).unwrap();
fs::write(
@@ -1463,7 +1463,7 @@ stage = "coder"
#[test]
fn has_review_hold_returns_true_when_set() {
let tmp = tempfile::tempdir().unwrap();
let qa_dir = tmp.path().join(".story_kit/work/3_qa");
let qa_dir = tmp.path().join(".storkit/work/3_qa");
std::fs::create_dir_all(&qa_dir).unwrap();
let spike_path = qa_dir.join("10_spike_research.md");
std::fs::write(
@@ -1477,7 +1477,7 @@ stage = "coder"
#[test]
fn has_review_hold_returns_false_when_not_set() {
let tmp = tempfile::tempdir().unwrap();
let qa_dir = tmp.path().join(".story_kit/work/3_qa");
let qa_dir = tmp.path().join(".storkit/work/3_qa");
std::fs::create_dir_all(&qa_dir).unwrap();
let spike_path = qa_dir.join("10_spike_research.md");
std::fs::write(&spike_path, "---\nname: Research spike\n---\n# Spike\n").unwrap();
@@ -1702,13 +1702,13 @@ stage = "coder"
let root = tmp.path();
// Set up story in 2_current/.
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("60_story_test.md"), "test").unwrap();
// Create a worktree directory that is a fresh git repo with no commits
// ahead of its own base branch (simulates a worktree where no work was done).
let wt_dir = root.join(".story_kit/worktrees/60_story_test");
let wt_dir = root.join(".storkit/worktrees/60_story_test");
fs::create_dir_all(&wt_dir).unwrap();
init_git_repo(&wt_dir);
@@ -1733,7 +1733,7 @@ stage = "coder"
init_git_repo(root);
// Set up story in 2_current/ and commit it so the project root is clean.
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("61_story_test.md"), "test").unwrap();
Command::new("git")
@@ -1756,7 +1756,7 @@ stage = "coder"
.unwrap();
// Create a real git worktree for the story.
let wt_dir = root.join(".story_kit/worktrees/61_story_test");
let wt_dir = root.join(".storkit/worktrees/61_story_test");
fs::create_dir_all(wt_dir.parent().unwrap()).unwrap();
Command::new("git")
.args([
@@ -1804,7 +1804,7 @@ stage = "coder"
// and the story stays in 2_current/. The important assertion is that
// reconcile ran without panicking and the story is in a consistent state.
let in_current = current.join("61_story_test.md").exists();
let in_qa = root.join(".story_kit/work/3_qa/61_story_test.md").exists();
let in_qa = root.join(".storkit/work/3_qa/61_story_test.md").exists();
assert!(
in_current || in_qa,
"story should be in 2_current/ or 3_qa/ after reconciliation"

View File

@@ -1037,7 +1037,7 @@ fn find_active_story_stage(project_root: &Path, story_id: &str) -> Option<&'stat
const STAGES: [&str; 3] = ["2_current", "3_qa", "4_merge"];
for stage in &STAGES {
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage)
.join(format!("{story_id}.md"));
@@ -1301,7 +1301,7 @@ stage = "coder"
#[tokio::test]
async fn start_agent_auto_selects_second_coder_when_first_busy() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),
@@ -1347,7 +1347,7 @@ stage = "coder"
#[tokio::test]
async fn start_agent_returns_busy_when_all_coders_occupied() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),
@@ -1379,7 +1379,7 @@ stage = "coder"
#[tokio::test]
async fn start_agent_moves_story_to_current_when_coders_busy() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
let backlog = sk.join("work/1_backlog");
std::fs::create_dir_all(&backlog).unwrap();
std::fs::write(
@@ -1424,7 +1424,7 @@ stage = "coder"
#[tokio::test]
async fn start_agent_story_already_in_current_is_noop() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
let current = sk.join("work/2_current");
std::fs::create_dir_all(&current).unwrap();
std::fs::write(
@@ -1451,7 +1451,7 @@ stage = "coder"
#[tokio::test]
async fn start_agent_explicit_name_unchanged_when_busy() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),
@@ -1490,7 +1490,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(sk_dir.join("project.toml"), "[[agent]]\nname = \"qa\"\n").unwrap();
@@ -1517,7 +1517,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(sk_dir.join("project.toml"), "[[agent]]\nname = \"qa\"\n").unwrap();
@@ -1543,7 +1543,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1551,7 +1551,7 @@ stage = "coder"
)
.unwrap();
let upcoming = root.join(".story_kit/work/1_backlog");
let upcoming = root.join(".storkit/work/1_backlog");
fs::create_dir_all(&upcoming).unwrap();
fs::write(upcoming.join("50_story_test.md"), "---\nname: Test\n---\n").unwrap();
@@ -1609,7 +1609,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(sk_dir.join("project.toml"), "[[agent]]\nname = \"qa\"\n").unwrap();
@@ -1635,7 +1635,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1666,20 +1666,20 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/1_backlog")).unwrap();
fs::write(
root.join(".story_kit/project.toml"),
root.join(".storkit/project.toml"),
"[[agent]]\nname = \"coder-1\"\n",
)
.unwrap();
fs::write(
root.join(".story_kit/work/1_backlog/86_story_foo.md"),
root.join(".storkit/work/1_backlog/86_story_foo.md"),
"---\nname: Foo\n---\n",
)
.unwrap();
fs::write(
root.join(".story_kit/work/1_backlog/130_story_bar.md"),
root.join(".storkit/work/1_backlog/130_story_bar.md"),
"---\nname: Bar\n---\n",
)
.unwrap();
@@ -1731,7 +1731,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1768,7 +1768,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1800,15 +1800,15 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/2_current")).unwrap();
fs::write(
root.join(".story_kit/project.toml"),
root.join(".storkit/project.toml"),
"[[agent]]\nname = \"coder-1\"\n\n[[agent]]\nname = \"coder-2\"\n",
)
.unwrap();
fs::write(
root.join(".story_kit/work/2_current/42_story_foo.md"),
root.join(".storkit/work/2_current/42_story_foo.md"),
"---\nname: Foo\n---\n",
)
.unwrap();
@@ -1854,15 +1854,15 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/1_backlog")).unwrap();
fs::write(
root.join(".story_kit/project.toml"),
root.join(".storkit/project.toml"),
"[[agent]]\nname = \"coder-1\"\n\n[[agent]]\nname = \"coder-2\"\n",
)
.unwrap();
fs::write(
root.join(".story_kit/work/1_backlog/99_story_baz.md"),
root.join(".storkit/work/1_backlog/99_story_baz.md"),
"---\nname: Baz\n---\n",
)
.unwrap();
@@ -1892,7 +1892,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/2_current")).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1929,7 +1929,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/3_qa")).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -1966,7 +1966,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/4_merge")).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -2003,7 +2003,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/2_current")).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -2039,7 +2039,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
fs::create_dir_all(sk_dir.join("work/4_merge")).unwrap();
fs::write(
sk_dir.join("project.toml"),
@@ -2075,7 +2075,7 @@ stage = "coder"
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("10_story_test.md"), "test").unwrap();
@@ -2090,7 +2090,7 @@ stage = "coder"
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let qa = root.join(".story_kit/work/3_qa");
let qa = root.join(".storkit/work/3_qa");
fs::create_dir_all(&qa).unwrap();
fs::write(qa.join("11_story_test.md"), "test").unwrap();
@@ -2102,7 +2102,7 @@ stage = "coder"
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let merge = root.join(".story_kit/work/4_merge");
let merge = root.join(".storkit/work/4_merge");
fs::create_dir_all(&merge).unwrap();
fs::write(merge.join("12_story_test.md"), "test").unwrap();
@@ -2157,7 +2157,7 @@ stage = "coder"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("60_story_cleanup.md"), "test").unwrap();
@@ -2180,7 +2180,7 @@ stage = "coder"
assert_eq!(remaining[0].story_id, "61_story_other");
assert!(
root.join(".story_kit/work/5_done/60_story_cleanup.md")
root.join(".storkit/work/5_done/60_story_cleanup.md")
.exists()
);
}

View File

@@ -56,7 +56,7 @@ impl AgentPool {
let default_qa = config.default_qa_mode();
// Story is in 2_current/ when a coder completes.
let story_path = project_root
.join(".story_kit/work/2_current")
.join(".storkit/work/2_current")
.join(format!("{story_id}.md"));
crate::io::story_metadata::resolve_qa_mode(&story_path, default_qa)
}
@@ -105,7 +105,7 @@ impl AgentPool {
if let Err(e) = super::super::lifecycle::move_story_to_qa(&project_root, story_id) {
slog_error!("[pipeline] Failed to move '{story_id}' to 3_qa/: {e}");
} else {
let qa_dir = project_root.join(".story_kit/work/3_qa");
let qa_dir = project_root.join(".storkit/work/3_qa");
let story_path = qa_dir.join(format!("{story_id}.md"));
if let Err(e) =
crate::io::story_metadata::write_review_hold(&story_path)
@@ -120,7 +120,7 @@ impl AgentPool {
} else {
// Increment retry count and check if blocked.
let story_path = project_root
.join(".story_kit/work/2_current")
.join(".storkit/work/2_current")
.join(format!("{story_id}.md"));
if should_block_story(&story_path, config.max_retries, story_id, "coder") {
// Story has exceeded retry limit — do not restart.
@@ -171,7 +171,7 @@ impl AgentPool {
if item_type == "spike" {
true // Spikes always need human review.
} else {
let qa_dir = project_root.join(".story_kit/work/3_qa");
let qa_dir = project_root.join(".storkit/work/3_qa");
let story_path = qa_dir.join(format!("{story_id}.md"));
let default_qa = config.default_qa_mode();
matches!(
@@ -183,7 +183,7 @@ impl AgentPool {
if needs_human_review {
// Hold in 3_qa/ for human review.
let qa_dir = project_root.join(".story_kit/work/3_qa");
let qa_dir = project_root.join(".storkit/work/3_qa");
let story_path = qa_dir.join(format!("{story_id}.md"));
if let Err(e) =
crate::io::story_metadata::write_review_hold(&story_path)
@@ -219,7 +219,7 @@ impl AgentPool {
}
} else {
let story_path = project_root
.join(".story_kit/work/3_qa")
.join(".storkit/work/3_qa")
.join(format!("{story_id}.md"));
if should_block_story(&story_path, config.max_retries, story_id, "qa-coverage") {
// Story has exceeded retry limit — do not restart.
@@ -243,7 +243,7 @@ impl AgentPool {
}
} else {
let story_path = project_root
.join(".story_kit/work/3_qa")
.join(".storkit/work/3_qa")
.join(format!("{story_id}.md"));
if should_block_story(&story_path, config.max_retries, story_id, "qa") {
// Story has exceeded retry limit — do not restart.
@@ -319,7 +319,7 @@ impl AgentPool {
);
} else {
let story_path = project_root
.join(".story_kit/work/4_merge")
.join(".storkit/work/4_merge")
.join(format!("{story_id}.md"));
if should_block_story(&story_path, config.max_retries, story_id, "mergemaster") {
// Story has exceeded retry limit — do not restart.
@@ -1125,7 +1125,7 @@ mod tests {
let root = tmp.path();
// Set up story in 2_current/ (no qa frontmatter → uses project default "server")
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("50_story_test.md"), "test").unwrap();
@@ -1146,7 +1146,7 @@ mod tests {
// With default qa: server, story skips QA and goes straight to 4_merge/
assert!(
root.join(".story_kit/work/4_merge/50_story_test.md")
root.join(".storkit/work/4_merge/50_story_test.md")
.exists(),
"story should be in 4_merge/"
);
@@ -1163,7 +1163,7 @@ mod tests {
let root = tmp.path();
// Set up story in 2_current/ with qa: agent frontmatter
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(
current.join("50_story_test.md"),
@@ -1188,7 +1188,7 @@ mod tests {
// With qa: agent, story should move to 3_qa/
assert!(
root.join(".story_kit/work/3_qa/50_story_test.md").exists(),
root.join(".storkit/work/3_qa/50_story_test.md").exists(),
"story should be in 3_qa/"
);
assert!(
@@ -1204,7 +1204,7 @@ mod tests {
let root = tmp.path();
// Set up story in 3_qa/
let qa_dir = root.join(".story_kit/work/3_qa");
let qa_dir = root.join(".storkit/work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();
// qa: server so the story skips human review and goes straight to merge.
fs::write(
@@ -1230,7 +1230,7 @@ mod tests {
// Story should have moved to 4_merge/
assert!(
root.join(".story_kit/work/4_merge/51_story_test.md")
root.join(".storkit/work/4_merge/51_story_test.md")
.exists(),
"story should be in 4_merge/"
);
@@ -1246,7 +1246,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("52_story_test.md"), "test").unwrap();
@@ -1280,18 +1280,18 @@ mod tests {
let root = tmp.path();
// Set up story in 2_current/
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("173_story_test.md"), "test").unwrap();
// Ensure 3_qa/ exists for the move target
fs::create_dir_all(root.join(".story_kit/work/3_qa")).unwrap();
fs::create_dir_all(root.join(".storkit/work/3_qa")).unwrap();
// Ensure 1_backlog/ exists (start_agent calls move_story_to_current)
fs::create_dir_all(root.join(".story_kit/work/1_backlog")).unwrap();
fs::create_dir_all(root.join(".storkit/work/1_backlog")).unwrap();
// Write a project.toml with a qa agent so start_agent can resolve it.
fs::create_dir_all(root.join(".story_kit")).unwrap();
fs::create_dir_all(root.join(".storkit")).unwrap();
fs::write(
root.join(".story_kit/project.toml"),
root.join(".storkit/project.toml"),
r#"
default_qa = "agent"
@@ -1426,7 +1426,7 @@ stage = "qa"
.unwrap();
// Create the story file in 4_merge/ so we can test archival
let merge_dir = repo.join(".story_kit/work/4_merge");
let merge_dir = repo.join(".storkit/work/4_merge");
fs::create_dir_all(&merge_dir).unwrap();
let story_file = merge_dir.join("23_test.md");
fs::write(&story_file, "---\nname: Test\n---\n").unwrap();
@@ -1454,7 +1454,7 @@ stage = "qa"
"report should be coherent: {report:?}"
);
if report.story_archived {
let done = repo.join(".story_kit/work/5_done/23_test.md");
let done = repo.join(".storkit/work/5_done/23_test.md");
assert!(done.exists(), "done file should exist");
}
}
@@ -1639,7 +1639,7 @@ stage = "qa"
.unwrap();
// Create story file in 4_merge.
let merge_dir = repo.join(".story_kit/work/4_merge");
let merge_dir = repo.join(".storkit/work/4_merge");
fs::create_dir_all(&merge_dir).unwrap();
fs::write(merge_dir.join("42_story_foo.md"), "---\nname: Test\n---\n").unwrap();
Command::new("git")
@@ -1689,7 +1689,7 @@ stage = "qa"
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let sk = root.join(".story_kit");
let sk = root.join(".storkit");
let qa_dir = sk.join("work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();

View File

@@ -20,7 +20,7 @@ pub struct TokenUsageRecord {
/// Append a token usage record to the persistent JSONL file.
///
/// Each line is a self-contained JSON object, making appends atomic and
/// reads simple. The file lives at `.story_kit/token_usage.jsonl`.
/// reads simple. The file lives at `.storkit/token_usage.jsonl`.
pub fn append_record(project_root: &Path, record: &TokenUsageRecord) -> Result<(), String> {
let path = token_usage_path(project_root);
if let Some(parent) = path.parent() {
@@ -87,7 +87,7 @@ pub fn build_record(
}
fn token_usage_path(project_root: &Path) -> std::path::PathBuf {
project_root.join(".story_kit").join("token_usage.jsonl")
project_root.join(".storkit").join("token_usage.jsonl")
}
#[cfg(test)]
@@ -147,7 +147,7 @@ mod tests {
fn malformed_lines_are_skipped() {
let dir = TempDir::new().unwrap();
let root = dir.path();
let path = root.join(".story_kit").join("token_usage.jsonl");
let path = root.join(".storkit").join("token_usage.jsonl");
fs::create_dir_all(path.parent().unwrap()).unwrap();
fs::write(&path, "not json\n{\"bad\":true}\n").unwrap();

View File

@@ -137,7 +137,7 @@ fn default_agent_command() -> String {
fn default_agent_prompt() -> String {
"You are working in a git worktree on story {{story_id}}. \
Read .story_kit/README.md to understand the dev process, then pick up the story. \
Read .storkit/README.md to understand the dev process, then pick up the story. \
Commit all your work when done — the server will automatically run acceptance \
gates (cargo clippy + tests) when your process exits."
.to_string()
@@ -189,13 +189,13 @@ impl Default for ProjectConfig {
}
impl ProjectConfig {
/// Load from `.story_kit/project.toml` relative to the given root.
/// Load from `.storkit/project.toml` relative to the given root.
/// Falls back to sensible defaults if the file doesn't exist.
///
/// Supports both the new `[[agent]]` array format and the legacy
/// `[agent]` single-table format (with a deprecation warning).
pub fn load(project_root: &Path) -> Result<Self, String> {
let config_path = project_root.join(".story_kit/project.toml");
let config_path = project_root.join(".storkit/project.toml");
if !config_path.exists() {
return Ok(Self::default());
}
@@ -582,7 +582,7 @@ name = "second"
#[test]
fn parse_project_toml_from_file() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("project.toml"),
@@ -663,7 +663,7 @@ name = "coder"
#[test]
fn watcher_config_from_file() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("project.toml"),

View File

@@ -158,7 +158,7 @@ struct AllTokenUsageResponse {
/// response so the agents panel is not cluttered with old completed items on
/// frontend startup.
pub fn story_is_archived(project_root: &path::Path, story_id: &str) -> bool {
let work = project_root.join(".story_kit").join("work");
let work = project_root.join(".storkit").join("work");
let filename = format!("{story_id}.md");
work.join("5_done").join(&filename).exists()
|| work.join("6_archived").join(&filename).exists()
@@ -316,7 +316,7 @@ impl AgentsApi {
))
}
/// Create a git worktree for a story under .story_kit/worktrees/{story_id}.
/// Create a git worktree for a story under .storkit/worktrees/{story_id}.
#[oai(path = "/agents/worktrees", method = "post")]
async fn create_worktree(
&self,
@@ -343,7 +343,7 @@ impl AgentsApi {
}))
}
/// List all worktrees under .story_kit/worktrees/.
/// List all worktrees under .storkit/worktrees/.
#[oai(path = "/agents/worktrees", method = "get")]
async fn list_worktrees(&self) -> OpenApiResult<Json<Vec<WorktreeListEntry>>> {
let project_root = self
@@ -389,7 +389,7 @@ impl AgentsApi {
("6_archived", "archived"),
];
let work_dir = project_root.join(".story_kit").join("work");
let work_dir = project_root.join(".storkit").join("work");
let filename = format!("{}.md", story_id.0);
for (stage_dir, stage_name) in &stages {
@@ -604,7 +604,7 @@ mod tests {
fn make_work_dirs(tmp: &TempDir) -> path::PathBuf {
let root = tmp.path().to_path_buf();
for stage in &["5_done", "6_archived"] {
std::fs::create_dir_all(root.join(".story_kit").join("work").join(stage)).unwrap();
std::fs::create_dir_all(root.join(".storkit").join("work").join(stage)).unwrap();
}
root
}
@@ -621,7 +621,7 @@ mod tests {
let tmp = TempDir::new().unwrap();
let root = make_work_dirs(&tmp);
std::fs::write(
root.join(".story_kit/work/5_done/79_story_foo.md"),
root.join(".storkit/work/5_done/79_story_foo.md"),
"---\nname: test\n---\n",
)
.unwrap();
@@ -633,7 +633,7 @@ mod tests {
let tmp = TempDir::new().unwrap();
let root = make_work_dirs(&tmp);
std::fs::write(
root.join(".story_kit/work/6_archived/79_story_foo.md"),
root.join(".storkit/work/6_archived/79_story_foo.md"),
"---\nname: test\n---\n",
)
.unwrap();
@@ -647,7 +647,7 @@ mod tests {
// Place an archived story file in 6_archived
std::fs::write(
root.join(".story_kit/work/6_archived/79_story_archived.md"),
root.join(".storkit/work/6_archived/79_story_archived.md"),
"---\nname: archived story\n---\n",
)
.unwrap();
@@ -694,7 +694,7 @@ mod tests {
}
fn make_project_toml(root: &path::Path, content: &str) {
let sk_dir = root.join(".story_kit");
let sk_dir = root.join(".storkit");
std::fs::create_dir_all(&sk_dir).unwrap();
std::fs::write(sk_dir.join("project.toml"), content).unwrap();
}
@@ -830,7 +830,7 @@ allowed_tools = ["Read", "Bash"]
#[tokio::test]
async fn list_worktrees_returns_entries_from_dir() {
let tmp = TempDir::new().unwrap();
let worktrees_dir = tmp.path().join(".story_kit").join("worktrees");
let worktrees_dir = tmp.path().join(".storkit").join("worktrees");
std::fs::create_dir_all(worktrees_dir.join("42_story_foo")).unwrap();
std::fs::create_dir_all(worktrees_dir.join("43_story_bar")).unwrap();
@@ -935,7 +935,7 @@ allowed_tools = ["Read", "Bash"]
// --- get_work_item_content tests ---
fn make_stage_dir(root: &path::Path, stage: &str) {
std::fs::create_dir_all(root.join(".story_kit").join("work").join(stage)).unwrap();
std::fs::create_dir_all(root.join(".storkit").join("work").join(stage)).unwrap();
}
#[tokio::test]
@@ -944,7 +944,7 @@ allowed_tools = ["Read", "Bash"]
let root = tmp.path();
make_stage_dir(root, "1_backlog");
std::fs::write(
root.join(".story_kit/work/1_backlog/42_story_foo.md"),
root.join(".storkit/work/1_backlog/42_story_foo.md"),
"---\nname: \"Foo Story\"\n---\n\n# Story 42: Foo Story\n\nSome content.",
)
.unwrap();
@@ -968,7 +968,7 @@ allowed_tools = ["Read", "Bash"]
let root = tmp.path();
make_stage_dir(root, "2_current");
std::fs::write(
root.join(".story_kit/work/2_current/43_story_bar.md"),
root.join(".storkit/work/2_current/43_story_bar.md"),
"---\nname: \"Bar Story\"\n---\n\nBar content.",
)
.unwrap();
@@ -1244,7 +1244,7 @@ allowed_tools = ["Read", "Bash"]
let root = tmp.path().to_path_buf();
// Create work dirs including 2_current for the story file.
for stage in &["1_backlog", "2_current", "5_done", "6_archived"] {
std::fs::create_dir_all(root.join(".story_kit").join("work").join(stage)).unwrap();
std::fs::create_dir_all(root.join(".storkit").join("work").join(stage)).unwrap();
}
// Write a story file with persisted test results.
@@ -1258,7 +1258,7 @@ name: "Test story"
<!-- story-kit-test-results: {"unit":[{"name":"from_file","status":"pass","details":null}],"integration":[]} -->
"#;
std::fs::write(
root.join(".story_kit/work/2_current/42_story_foo.md"),
root.join(".storkit/work/2_current/42_story_foo.md"),
story_content,
)
.unwrap();

View File

@@ -59,7 +59,7 @@ impl AppContext {
pub fn new_test(project_root: std::path::PathBuf) -> Self {
let state = SessionState::default();
*state.project_root.lock().unwrap() = Some(project_root.clone());
let store_path = project_root.join(".story_kit_store.json");
let store_path = project_root.join(".storkit_store.json");
let (watcher_tx, _) = broadcast::channel(64);
let (reconciliation_tx, _) = broadcast::channel(64);
let (perm_tx, perm_rx) = mpsc::unbounded_channel();

View File

@@ -42,11 +42,11 @@ pub(super) async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<S
/// Try to read the overall line coverage percentage from the llvm-cov JSON report.
///
/// Expects the file at `{project_root}/.story_kit/coverage/server.json`.
/// Expects the file at `{project_root}/.storkit/coverage/server.json`.
/// Returns `None` if the file is absent, unreadable, or cannot be parsed.
pub(super) fn read_coverage_percent_from_json(project_root: &std::path::Path) -> Option<f64> {
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("coverage")
.join("server.json");
let contents = std::fs::read_to_string(&path).ok()?;
@@ -489,7 +489,7 @@ mod tests {
// Config has only a supervisor — start_agent without agent_name should
// refuse rather than silently assigning supervisor.
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),
@@ -517,7 +517,7 @@ stage = "other"
// missing git repo / worktree, but the error must NOT be about
// "No coder agent configured".
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),
@@ -713,7 +713,7 @@ stage = "coder"
fn read_coverage_percent_from_json_parses_llvm_cov_format() {
use std::fs;
let tmp = tempfile::tempdir().unwrap();
let cov_dir = tmp.path().join(".story_kit/coverage");
let cov_dir = tmp.path().join(".storkit/coverage");
fs::create_dir_all(&cov_dir).unwrap();
let json_content = r#"{"data":[{"totals":{"lines":{"count":100,"covered":78,"percent":78.0}}}]}"#;
fs::write(cov_dir.join("server.json"), json_content).unwrap();

View File

@@ -98,13 +98,13 @@ pub(super) async fn tool_rebuild_and_restart(ctx: &AppContext) -> Result<String,
// Remove the port file before re-exec so the new process can write its own.
if let Ok(root) = ctx.state.get_project_root() {
let port_file = root.join(".story_kit_port");
let port_file = root.join(".storkit_port");
if port_file.exists() {
let _ = std::fs::remove_file(&port_file);
}
}
// Also check cwd for port file.
let cwd_port_file = std::path::Path::new(".story_kit_port");
let cwd_port_file = std::path::Path::new(".storkit_port");
if cwd_port_file.exists() {
let _ = std::fs::remove_file(cwd_port_file);
}
@@ -723,7 +723,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
// Seed project root in state so get_project_root works
let backlog = root.join(".story_kit/work/1_backlog");
let backlog = root.join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join("1_story_test.md"), "---\nname: Test\n---\n").unwrap();
let ctx = test_ctx(root);
@@ -739,8 +739,8 @@ mod tests {
fn tool_move_story_moves_from_backlog_to_current() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let backlog = root.join(".story_kit/work/1_backlog");
let current = root.join(".story_kit/work/2_current");
let backlog = root.join(".storkit/work/1_backlog");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&backlog).unwrap();
fs::create_dir_all(&current).unwrap();
fs::write(backlog.join("5_story_test.md"), "---\nname: Test\n---\n").unwrap();
@@ -764,8 +764,8 @@ mod tests {
fn tool_move_story_moves_from_current_to_backlog() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let backlog = root.join(".story_kit/work/1_backlog");
let current = root.join(".storkit/work/2_current");
let backlog = root.join(".storkit/work/1_backlog");
fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&backlog).unwrap();
fs::write(current.join("6_story_back.md"), "---\nname: Back\n---\n").unwrap();
@@ -788,7 +788,7 @@ mod tests {
fn tool_move_story_idempotent_when_already_in_target() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("7_story_idem.md"), "---\nname: Idem\n---\n").unwrap();

View File

@@ -138,7 +138,7 @@ pub(super) fn tool_report_merge_failure(args: &Value, ctx: &AppContext) -> Resul
// survives server restarts and is visible in the web UI.
if let Ok(project_root) = ctx.state.get_project_root() {
let story_file = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join("4_merge")
.join(format!("{story_id}.md"));
@@ -247,7 +247,7 @@ mod tests {
async fn tool_move_story_to_merge_moves_file() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let current_dir = tmp.path().join(".story_kit/work/2_current");
let current_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&current_dir).unwrap();
let story_file = current_dir.join("24_story_test.md");
std::fs::write(&story_file, "---\nname: Test\n---\n").unwrap();
@@ -268,7 +268,7 @@ mod tests {
// File should have been moved regardless of agent start outcome
assert!(!story_file.exists(), "2_current file should be gone");
assert!(
tmp.path().join(".story_kit/work/4_merge/24_story_test.md").exists(),
tmp.path().join(".storkit/work/4_merge/24_story_test.md").exists(),
"4_merge file should exist"
);
// Result is either Ok (agent started) or Err (agent failed - acceptable in tests)

View File

@@ -534,7 +534,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "create_worktree",
"description": "Create a git worktree for a story under .story_kit/worktrees/{story_id} with deterministic naming. Writes .mcp.json and runs component setup. Returns the worktree path.",
"description": "Create a git worktree for a story under .storkit/worktrees/{story_id} with deterministic naming. Writes .mcp.json and runs component setup. Returns the worktree path.",
"inputSchema": {
"type": "object",
"properties": {
@@ -548,7 +548,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "list_worktrees",
"description": "List all worktrees under .story_kit/worktrees/ for the current project.",
"description": "List all worktrees under .storkit/worktrees/ for the current project.",
"inputSchema": {
"type": "object",
"properties": {}
@@ -667,7 +667,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "create_spike",
"description": "Create a spike file in .story_kit/work/1_backlog/ with a deterministic filename and YAML front matter. Returns the spike_id.",
"description": "Create a spike file in .storkit/work/1_backlog/ with a deterministic filename and YAML front matter. Returns the spike_id.",
"inputSchema": {
"type": "object",
"properties": {
@@ -891,7 +891,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "launch_qa_app",
"description": "Launch the app from a story's worktree for manual QA testing. Automatically assigns a free port, writes it to .story_kit_port, and starts the backend server. Only one QA app instance runs at a time.",
"description": "Launch the app from a story's worktree for manual QA testing. Automatically assigns a free port, writes it to .storkit_port, and starts the backend server. Only one QA app instance runs at a time.",
"inputSchema": {
"type": "object",
"properties": {
@@ -1006,7 +1006,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "run_command",
"description": "Execute a shell command in an agent's worktree directory. The working_dir must be inside .story_kit/worktrees/. Returns stdout, stderr, exit_code, and timed_out. Supports SSE streaming (send Accept: text/event-stream) for long-running commands. Dangerous commands (rm -rf /, sudo, etc.) are blocked.",
"description": "Execute a shell command in an agent's worktree directory. The working_dir must be inside .storkit/worktrees/. Returns stdout, stderr, exit_code, and timed_out. Supports SSE streaming (send Accept: text/event-stream) for long-running commands. Dangerous commands (rm -rf /, sudo, etc.) are blocked.",
"inputSchema": {
"type": "object",
"properties": {
@@ -1016,7 +1016,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
"working_dir": {
"type": "string",
"description": "Absolute path to the worktree directory to run the command in. Must be inside .story_kit/worktrees/."
"description": "Absolute path to the worktree directory to run the command in. Must be inside .storkit/worktrees/."
},
"timeout": {
"type": "integer",

View File

@@ -48,7 +48,7 @@ pub(super) async fn tool_approve_qa(args: &Value, ctx: &AppContext) -> Result<St
// Clear review_hold before moving
let qa_path = project_root
.join(".story_kit/work/3_qa")
.join(".storkit/work/3_qa")
.join(format!("{story_id}.md"));
if qa_path.exists() {
let _ = crate::io::story_metadata::clear_front_matter_field(&qa_path, "review_hold");
@@ -92,7 +92,7 @@ pub(super) async fn tool_reject_qa(args: &Value, ctx: &AppContext) -> Result<Str
// Restart the coder agent with rejection context
let story_path = project_root
.join(".story_kit/work/2_current")
.join(".storkit/work/2_current")
.join(format!("{story_id}.md"));
let agent_name = if story_path.exists() {
let contents = std::fs::read_to_string(&story_path).unwrap_or_default();
@@ -152,10 +152,10 @@ pub(super) async fn tool_launch_qa_app(args: &Value, ctx: &AppContext) -> Result
// Find a free port starting from 3100
let port = find_free_port(3100);
// Write .story_kit_port so the frontend dev server knows where to connect
let port_file = wt_path.join(".story_kit_port");
// Write .storkit_port so the frontend dev server knows where to connect
let port_file = wt_path.join(".storkit_port");
std::fs::write(&port_file, port.to_string())
.map_err(|e| format!("Failed to write .story_kit_port: {e}"))?;
.map_err(|e| format!("Failed to write .storkit_port: {e}"))?;
// Launch the server from the worktree
let child = std::process::Command::new("cargo")

View File

@@ -59,7 +59,7 @@ fn is_dangerous(command: &str) -> Option<String> {
}
/// Validates that `working_dir` exists and is inside the project's
/// `.story_kit/worktrees/` directory. Returns the canonicalized path.
/// `.storkit/worktrees/` directory. Returns the canonicalized path.
fn validate_working_dir(working_dir: &str, ctx: &AppContext) -> Result<PathBuf, String> {
let wd = PathBuf::from(working_dir);
@@ -71,7 +71,7 @@ fn validate_working_dir(working_dir: &str, ctx: &AppContext) -> Result<PathBuf,
}
let project_root = ctx.agents.get_project_root(&ctx.state)?;
let worktrees_root = project_root.join(".story_kit").join("worktrees");
let worktrees_root = project_root.join(".storkit").join("worktrees");
let canonical_wd = wd
.canonicalize()
@@ -88,7 +88,7 @@ fn validate_working_dir(working_dir: &str, ctx: &AppContext) -> Result<PathBuf,
if !canonical_wd.starts_with(&canonical_wt) {
return Err(format!(
"working_dir must be inside .story_kit/worktrees/. Got: {working_dir}"
"working_dir must be inside .storkit/worktrees/. Got: {working_dir}"
));
}
@@ -410,14 +410,14 @@ mod tests {
fn validate_working_dir_rejects_path_outside_worktrees() {
let tmp = tempfile::tempdir().unwrap();
// Create the worktrees dir so it exists
let wt_dir = tmp.path().join(".story_kit").join("worktrees");
let wt_dir = tmp.path().join(".storkit").join("worktrees");
std::fs::create_dir_all(&wt_dir).unwrap();
let ctx = test_ctx(tmp.path());
// Try to use /tmp (outside worktrees)
let result = validate_working_dir(tmp.path().to_str().unwrap(), &ctx);
assert!(result.is_err());
assert!(
result.unwrap_err().contains("inside .story_kit/worktrees"),
result.unwrap_err().contains("inside .storkit/worktrees"),
"expected sandbox error"
);
}
@@ -427,7 +427,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let story_wt = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join("42_test_story");
std::fs::create_dir_all(&story_wt).unwrap();
@@ -481,7 +481,7 @@ mod tests {
#[tokio::test]
async fn tool_run_command_rejects_path_outside_worktrees() {
let tmp = tempfile::tempdir().unwrap();
let wt_dir = tmp.path().join(".story_kit").join("worktrees");
let wt_dir = tmp.path().join(".storkit").join("worktrees");
std::fs::create_dir_all(&wt_dir).unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_run_command(
@@ -504,7 +504,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let story_wt = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join("42_test");
std::fs::create_dir_all(&story_wt).unwrap();
@@ -532,7 +532,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let story_wt = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join("43_test");
std::fs::create_dir_all(&story_wt).unwrap();
@@ -558,7 +558,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let story_wt = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join("44_test");
std::fs::create_dir_all(&story_wt).unwrap();
@@ -584,7 +584,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let story_wt = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join("45_test");
std::fs::create_dir_all(&story_wt).unwrap();

View File

@@ -124,7 +124,7 @@ pub(super) fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result<Str
.ok_or("Missing required argument: story_id")?;
let root = ctx.state.get_project_root()?;
let current_dir = root.join(".story_kit").join("work").join("2_current");
let current_dir = root.join(".storkit").join("work").join("2_current");
let filepath = current_dir.join(format!("{story_id}.md"));
if !filepath.exists() {
@@ -414,7 +414,7 @@ pub(super) async fn tool_delete_story(args: &Value, ctx: &AppContext) -> Result<
}
// 4. Find and delete the story file from any pipeline stage
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
let stage_dirs = ["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"];
let mut deleted = false;
for stage in &stage_dirs {
@@ -626,7 +626,7 @@ mod tests {
("4_merge", "40_story_merge", "Merge Story"),
("5_done", "50_story_done", "Done Story"),
] {
let dir = root.join(".story_kit/work").join(stage);
let dir = root.join(".storkit/work").join(stage);
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(
dir.join(format!("{id}.md")),
@@ -661,7 +661,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
std::fs::create_dir_all(&current).unwrap();
std::fs::write(
current.join("20_story_active.md"),
@@ -701,7 +701,7 @@ mod tests {
#[test]
fn tool_get_story_todos_returns_unchecked() {
let tmp = tempfile::tempdir().unwrap();
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
let current_dir = tmp.path().join(".storkit").join("work").join("2_current");
fs::create_dir_all(&current_dir).unwrap();
fs::write(
current_dir.join("1_test.md"),
@@ -795,8 +795,8 @@ mod tests {
"create_bug description should reference work/1_backlog/, got: {desc}"
);
assert!(
!desc.contains(".story_kit/bugs"),
"create_bug description should not reference nonexistent .story_kit/bugs/, got: {desc}"
!desc.contains(".storkit/bugs"),
"create_bug description should not reference nonexistent .storkit/bugs/, got: {desc}"
);
let required = t["inputSchema"]["required"].as_array().unwrap();
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
@@ -821,8 +821,8 @@ mod tests {
"list_bugs description should reference work/1_backlog/, got: {desc}"
);
assert!(
!desc.contains(".story_kit/bugs"),
"list_bugs description should not reference nonexistent .story_kit/bugs/, got: {desc}"
!desc.contains(".storkit/bugs"),
"list_bugs description should not reference nonexistent .storkit/bugs/, got: {desc}"
);
}
@@ -836,8 +836,8 @@ mod tests {
let t = tool.unwrap();
let desc = t["description"].as_str().unwrap();
assert!(
!desc.contains(".story_kit/bugs"),
"close_bug description should not reference nonexistent .story_kit/bugs/, got: {desc}"
!desc.contains(".storkit/bugs"),
"close_bug description should not reference nonexistent .storkit/bugs/, got: {desc}"
);
assert!(
desc.contains("work/5_done/"),
@@ -903,7 +903,7 @@ mod tests {
assert!(result.contains("1_bug_login_crash"));
let bug_file = tmp
.path()
.join(".story_kit/work/1_backlog/1_bug_login_crash.md");
.join(".storkit/work/1_backlog/1_bug_login_crash.md");
assert!(bug_file.exists());
}
@@ -919,7 +919,7 @@ mod tests {
#[test]
fn tool_list_bugs_returns_open_bugs() {
let tmp = tempfile::tempdir().unwrap();
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
let backlog_dir = tmp.path().join(".storkit/work/1_backlog");
std::fs::create_dir_all(&backlog_dir).unwrap();
std::fs::write(
backlog_dir.join("1_bug_crash.md"),
@@ -955,7 +955,7 @@ mod tests {
fn tool_close_bug_moves_to_archive() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
let backlog_dir = tmp.path().join(".storkit/work/1_backlog");
std::fs::create_dir_all(&backlog_dir).unwrap();
let bug_file = backlog_dir.join("1_bug_crash.md");
std::fs::write(&bug_file, "# Bug 1: Crash\n").unwrap();
@@ -975,7 +975,7 @@ mod tests {
let result = tool_close_bug(&json!({"bug_id": "1_bug_crash"}), &ctx).unwrap();
assert!(result.contains("1_bug_crash"));
assert!(!bug_file.exists());
assert!(tmp.path().join(".story_kit/work/5_done/1_bug_crash.md").exists());
assert!(tmp.path().join(".storkit/work/5_done/1_bug_crash.md").exists());
}
#[test]
@@ -1026,7 +1026,7 @@ mod tests {
assert!(result.contains("1_spike_compare_encoders"));
let spike_file = tmp
.path()
.join(".story_kit/work/1_backlog/1_spike_compare_encoders.md");
.join(".storkit/work/1_backlog/1_spike_compare_encoders.md");
assert!(spike_file.exists());
let contents = std::fs::read_to_string(&spike_file).unwrap();
assert!(contents.starts_with("---\nname: \"Compare Encoders\"\n---"));
@@ -1041,7 +1041,7 @@ mod tests {
let result = tool_create_spike(&json!({"name": "My Spike"}), &ctx).unwrap();
assert!(result.contains("1_spike_my_spike"));
let spike_file = tmp.path().join(".story_kit/work/1_backlog/1_spike_my_spike.md");
let spike_file = tmp.path().join(".storkit/work/1_backlog/1_spike_my_spike.md");
assert!(spike_file.exists());
let contents = std::fs::read_to_string(&spike_file).unwrap();
assert!(contents.starts_with("---\nname: \"My Spike\"\n---"));
@@ -1087,7 +1087,7 @@ mod tests {
#[test]
fn tool_validate_stories_with_valid_story() {
let tmp = tempfile::tempdir().unwrap();
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
let current_dir = tmp.path().join(".storkit").join("work").join("2_current");
fs::create_dir_all(&current_dir).unwrap();
fs::write(
current_dir.join("1_test.md"),
@@ -1104,7 +1104,7 @@ mod tests {
#[test]
fn tool_validate_stories_with_invalid_front_matter() {
let tmp = tempfile::tempdir().unwrap();
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
let current_dir = tmp.path().join(".storkit").join("work").join("2_current");
fs::create_dir_all(&current_dir).unwrap();
fs::write(
current_dir.join("1_test.md"),
@@ -1121,7 +1121,7 @@ mod tests {
#[test]
fn record_tests_persists_to_story_file() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("1_story_persist.md"), "---\nname: Persist\n---\n# Story\n").unwrap();
@@ -1145,7 +1145,7 @@ mod tests {
#[test]
fn ensure_acceptance_reads_from_file_when_not_in_memory() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
// Write a story file with a pre-populated Test Results section (simulating a restart)
@@ -1164,7 +1164,7 @@ mod tests {
#[test]
fn ensure_acceptance_file_with_failures_still_blocks() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_content = "---\nname: Fail\n---\n# Story\n\n## Test Results\n\n<!-- story-kit-test-results: {\"unit\":[{\"name\":\"u1\",\"status\":\"fail\",\"details\":\"error\"}],\"integration\":[]} -->\n";
@@ -1197,7 +1197,7 @@ mod tests {
#[tokio::test]
async fn tool_delete_story_deletes_file_from_backlog() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
let story_file = backlog.join("10_story_cleanup.md");
fs::write(&story_file, "---\nname: Cleanup\n---\n").unwrap();
@@ -1211,7 +1211,7 @@ mod tests {
#[tokio::test]
async fn tool_delete_story_deletes_file_from_current() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_file = current.join("11_story_active.md");
fs::write(&story_file, "---\nname: Active\n---\n").unwrap();
@@ -1271,7 +1271,7 @@ mod tests {
.unwrap();
// Create story file in current/ so move_story_to_archived would work.
let current_dir = tmp.path().join(".story_kit/work/2_current");
let current_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&current_dir).unwrap();
std::fs::write(
current_dir.join("50_story_test.md"),
@@ -1297,7 +1297,7 @@ mod tests {
setup_git_repo_in(tmp.path());
// Create story file in current/ (no feature branch).
let current_dir = tmp.path().join(".story_kit/work/2_current");
let current_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&current_dir).unwrap();
std::fs::write(
current_dir.join("51_story_no_branch.md"),
@@ -1333,7 +1333,7 @@ mod tests {
fn tool_check_criterion_marks_unchecked_item() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo_in(tmp.path());
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
let current_dir = tmp.path().join(".storkit").join("work").join("2_current");
fs::create_dir_all(&current_dir).unwrap();
fs::write(
current_dir.join("1_test.md"),

View File

@@ -45,7 +45,7 @@ pub fn resolve_port() -> u16 {
}
pub fn write_port_file(dir: &Path, port: u16) -> Option<PathBuf> {
let path = dir.join(".story_kit_port");
let path = dir.join(".storkit_port");
std::fs::write(&path, port.to_string()).ok()?;
Some(path)
}

View File

@@ -241,7 +241,7 @@ mod tests {
#[test]
fn editor_command_survives_reload() {
let dir = TempDir::new().unwrap();
let store_path = dir.path().join(".story_kit_store.json");
let store_path = dir.path().join(".storkit_store.json");
{
let ctx = AppContext::new_test(dir.path().to_path_buf());

View File

@@ -24,7 +24,7 @@ pub fn create_bug_file(
}
let filename = format!("{bug_number}_bug_{slug}.md");
let bugs_dir = root.join(".story_kit").join("work").join("1_backlog");
let bugs_dir = root.join(".storkit").join("work").join("1_backlog");
fs::create_dir_all(&bugs_dir)
.map_err(|e| format!("Failed to create backlog directory: {e}"))?;
@@ -88,7 +88,7 @@ pub fn create_spike_file(
}
let filename = format!("{spike_number}_spike_{slug}.md");
let backlog_dir = root.join(".story_kit").join("work").join("1_backlog");
let backlog_dir = root.join(".storkit").join("work").join("1_backlog");
fs::create_dir_all(&backlog_dir)
.map_err(|e| format!("Failed to create backlog directory: {e}"))?;
@@ -151,7 +151,7 @@ pub fn create_refactor_file(
}
let filename = format!("{refactor_number}_refactor_{slug}.md");
let backlog_dir = root.join(".story_kit").join("work").join("1_backlog");
let backlog_dir = root.join(".storkit").join("work").join("1_backlog");
fs::create_dir_all(&backlog_dir)
.map_err(|e| format!("Failed to create backlog directory: {e}"))?;
@@ -227,7 +227,7 @@ fn extract_bug_name(path: &Path) -> Option<String> {
///
/// Returns a sorted list of `(bug_id, name)` pairs.
pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
let backlog_dir = root.join(".story_kit").join("work").join("1_backlog");
let backlog_dir = root.join(".storkit").join("work").join("1_backlog");
if !backlog_dir.exists() {
return Ok(Vec::new());
}
@@ -277,7 +277,7 @@ fn is_refactor_item(stem: &str) -> bool {
///
/// Returns a sorted list of `(refactor_id, name)` pairs.
pub fn list_refactor_files(root: &Path) -> Result<Vec<(String, String)>, String> {
let backlog_dir = root.join(".story_kit").join("work").join("1_backlog");
let backlog_dir = root.join(".storkit").join("work").join("1_backlog");
if !backlog_dir.exists() {
return Ok(Vec::new());
}
@@ -357,7 +357,7 @@ mod tests {
#[test]
fn next_item_number_increments_from_existing_bugs() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join("1_bug_crash.md"), "").unwrap();
fs::write(backlog.join("3_bug_another.md"), "").unwrap();
@@ -367,8 +367,8 @@ mod tests {
#[test]
fn next_item_number_scans_archived_too() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let archived = tmp.path().join(".story_kit/work/5_done");
let backlog = tmp.path().join(".storkit/work/1_backlog");
let archived = tmp.path().join(".storkit/work/5_done");
fs::create_dir_all(&backlog).unwrap();
fs::create_dir_all(&archived).unwrap();
fs::write(archived.join("5_bug_old.md"), "").unwrap();
@@ -385,8 +385,8 @@ mod tests {
#[test]
fn list_bug_files_excludes_archive_subdir() {
let tmp = tempfile::tempdir().unwrap();
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
let archived_dir = tmp.path().join(".story_kit/work/5_done");
let backlog_dir = tmp.path().join(".storkit/work/1_backlog");
let archived_dir = tmp.path().join(".storkit/work/5_done");
fs::create_dir_all(&backlog_dir).unwrap();
fs::create_dir_all(&archived_dir).unwrap();
fs::write(backlog_dir.join("1_bug_open.md"), "# Bug 1: Open Bug\n").unwrap();
@@ -401,7 +401,7 @@ mod tests {
#[test]
fn list_bug_files_sorted_by_id() {
let tmp = tempfile::tempdir().unwrap();
let backlog_dir = tmp.path().join(".story_kit/work/1_backlog");
let backlog_dir = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog_dir).unwrap();
fs::write(backlog_dir.join("3_bug_third.md"), "# Bug 3: Third\n").unwrap();
fs::write(backlog_dir.join("1_bug_first.md"), "# Bug 1: First\n").unwrap();
@@ -443,7 +443,7 @@ mod tests {
let filepath = tmp
.path()
.join(".story_kit/work/1_backlog/1_bug_login_crash.md");
.join(".storkit/work/1_backlog/1_bug_login_crash.md");
assert!(filepath.exists());
let contents = fs::read_to_string(&filepath).unwrap();
assert!(
@@ -487,7 +487,7 @@ mod tests {
)
.unwrap();
let filepath = tmp.path().join(".story_kit/work/1_backlog/1_bug_some_bug.md");
let filepath = tmp.path().join(".storkit/work/1_backlog/1_bug_some_bug.md");
let contents = fs::read_to_string(&filepath).unwrap();
assert!(
contents.starts_with("---\nname: \"Some Bug\"\n---"),
@@ -509,7 +509,7 @@ mod tests {
let filepath = tmp
.path()
.join(".story_kit/work/1_backlog/1_spike_filesystem_watcher_architecture.md");
.join(".storkit/work/1_backlog/1_spike_filesystem_watcher_architecture.md");
assert!(filepath.exists());
let contents = fs::read_to_string(&filepath).unwrap();
assert!(
@@ -533,7 +533,7 @@ mod tests {
create_spike_file(tmp.path(), "FS Watcher Spike", Some(description)).unwrap();
let filepath =
tmp.path().join(".story_kit/work/1_backlog/1_spike_fs_watcher_spike.md");
tmp.path().join(".storkit/work/1_backlog/1_spike_fs_watcher_spike.md");
let contents = fs::read_to_string(&filepath).unwrap();
assert!(contents.contains(description));
}
@@ -543,7 +543,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
create_spike_file(tmp.path(), "My Spike", None).unwrap();
let filepath = tmp.path().join(".story_kit/work/1_backlog/1_spike_my_spike.md");
let filepath = tmp.path().join(".storkit/work/1_backlog/1_spike_my_spike.md");
let contents = fs::read_to_string(&filepath).unwrap();
// Should have placeholder TBD in Question section
assert!(contents.contains("## Question\n\n- TBD\n"));
@@ -564,7 +564,7 @@ mod tests {
let result = create_spike_file(tmp.path(), name, None);
assert!(result.is_ok(), "create_spike_file failed: {result:?}");
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
let spike_id = result.unwrap();
let filename = format!("{spike_id}.md");
let contents = fs::read_to_string(backlog.join(&filename)).unwrap();
@@ -576,7 +576,7 @@ mod tests {
#[test]
fn create_spike_file_increments_from_existing_items() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join("5_story_existing.md"), "").unwrap();

View File

@@ -121,7 +121,7 @@ fn load_stage_items(
agent_map: &HashMap<String, AgentAssignment>,
) -> Result<Vec<UpcomingStory>, String> {
let root = ctx.state.get_project_root()?;
let dir = root.join(".story_kit").join("work").join(stage_dir);
let dir = root.join(".storkit").join("work").join(stage_dir);
if !dir.exists() {
return Ok(Vec::new());
@@ -166,8 +166,8 @@ pub fn validate_story_dirs(
// Directories to validate: work/2_current/ + work/1_backlog/
let dirs_to_validate: Vec<PathBuf> = vec![
root.join(".story_kit").join("work").join("2_current"),
root.join(".story_kit").join("work").join("1_backlog"),
root.join(".storkit").join("work").join("2_current"),
root.join(".storkit").join("work").join("1_backlog"),
];
for dir in &dirs_to_validate {
@@ -230,7 +230,7 @@ pub fn validate_story_dirs(
/// Searches in priority order: 2_current, 1_backlog, 3_qa, 4_merge, 5_done, 6_archived.
pub(super) fn find_story_file(project_root: &Path, story_id: &str) -> Result<PathBuf, String> {
let filename = format!("{story_id}.md");
let sk = project_root.join(".story_kit").join("work");
let sk = project_root.join(".storkit").join("work");
for stage in &["2_current", "1_backlog", "3_qa", "4_merge", "5_done", "6_archived"] {
let path = sk.join(stage).join(&filename);
if path.exists() {
@@ -370,7 +370,7 @@ pub(super) fn slugify_name(name: &str) -> String {
/// Scan all `work/` subdirectories for the highest item number across all types (stories, bugs, spikes).
pub(super) fn next_item_number(root: &std::path::Path) -> Result<u32, String> {
let work_base = root.join(".story_kit").join("work");
let work_base = root.join(".storkit").join("work");
let mut max_num: u32 = 0;
for subdir in &["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
@@ -413,7 +413,7 @@ mod tests {
("4_merge", "40_story_merge"),
("5_done", "50_story_done"),
] {
let dir = root.join(".story_kit").join("work").join(stage);
let dir = root.join(".storkit").join("work").join(stage);
fs::create_dir_all(&dir).unwrap();
fs::write(
dir.join(format!("{id}.md")),
@@ -445,7 +445,7 @@ mod tests {
fn load_upcoming_returns_empty_when_no_dir() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
// No .story_kit directory at all
// No .storkit directory at all
let ctx = crate::http::context::AppContext::new_test(root);
let result = load_upcoming_stories(&ctx).unwrap();
assert!(result.is_empty());
@@ -456,7 +456,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(
current.join("10_story_test.md"),
@@ -482,7 +482,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(
current.join("11_story_done.md"),
@@ -507,7 +507,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_path_buf();
let current = root.join(".story_kit/work/2_current");
let current = root.join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(
current.join("12_story_pending.md"),
@@ -529,7 +529,7 @@ mod tests {
#[test]
fn load_upcoming_parses_metadata() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(
backlog.join("31_story_view_upcoming.md"),
@@ -554,7 +554,7 @@ mod tests {
#[test]
fn load_upcoming_skips_non_md_files() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join(".gitkeep"), "").unwrap();
fs::write(
@@ -572,8 +572,8 @@ mod tests {
#[test]
fn validate_story_dirs_valid_files() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let current = tmp.path().join(".storkit/work/2_current");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&backlog).unwrap();
fs::write(
@@ -596,7 +596,7 @@ mod tests {
#[test]
fn validate_story_dirs_missing_front_matter() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("28_story_todos.md"), "# No front matter\n").unwrap();
@@ -609,7 +609,7 @@ mod tests {
#[test]
fn validate_story_dirs_missing_required_fields() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("28_story_todos.md"), "---\n---\n# Story\n").unwrap();
@@ -667,7 +667,7 @@ mod tests {
#[test]
fn next_item_number_empty_dirs() {
let tmp = tempfile::tempdir().unwrap();
let base = tmp.path().join(".story_kit/work/1_backlog");
let base = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&base).unwrap();
assert_eq!(next_item_number(tmp.path()).unwrap(), 1);
}
@@ -675,9 +675,9 @@ mod tests {
#[test]
fn next_item_number_scans_all_dirs() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let current = tmp.path().join(".story_kit/work/2_current");
let archived = tmp.path().join(".story_kit/work/5_done");
let backlog = tmp.path().join(".storkit/work/1_backlog");
let current = tmp.path().join(".storkit/work/2_current");
let archived = tmp.path().join(".storkit/work/5_done");
fs::create_dir_all(&backlog).unwrap();
fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&archived).unwrap();
@@ -690,7 +690,7 @@ mod tests {
#[test]
fn next_item_number_no_work_dirs() {
let tmp = tempfile::tempdir().unwrap();
// No .story_kit at all
// No .storkit at all
assert_eq!(next_item_number(tmp.path()).unwrap(), 1);
}
@@ -699,8 +699,8 @@ mod tests {
#[test]
fn find_story_file_searches_current_then_backlog() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let current = tmp.path().join(".storkit/work/2_current");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&current).unwrap();
fs::create_dir_all(&backlog).unwrap();

View File

@@ -24,7 +24,7 @@ pub fn create_story_file(
}
let filename = format!("{story_number}_story_{slug}.md");
let backlog_dir = root.join(".story_kit").join("work").join("1_backlog");
let backlog_dir = root.join(".storkit").join("work").join("1_backlog");
fs::create_dir_all(&backlog_dir)
.map_err(|e| format!("Failed to create backlog directory: {e}"))?;
@@ -269,7 +269,7 @@ mod tests {
#[test]
fn create_story_writes_correct_content() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
fs::write(backlog.join("36_story_existing.md"), "").unwrap();
@@ -312,7 +312,7 @@ mod tests {
let result = create_story_file(tmp.path(), name, None, None, false);
assert!(result.is_ok(), "create_story_file failed: {result:?}");
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
let story_id = result.unwrap();
let filename = format!("{story_id}.md");
let contents = fs::read_to_string(backlog.join(&filename)).unwrap();
@@ -324,7 +324,7 @@ mod tests {
#[test]
fn create_story_rejects_duplicate() {
let tmp = tempfile::tempdir().unwrap();
let backlog = tmp.path().join(".story_kit/work/1_backlog");
let backlog = tmp.path().join(".storkit/work/1_backlog");
fs::create_dir_all(&backlog).unwrap();
let filepath = backlog.join("1_story_my_feature.md");
@@ -340,7 +340,7 @@ mod tests {
fn check_criterion_marks_first_unchecked() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo(tmp.path());
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("1_test.md");
fs::write(&filepath, story_with_criteria(3)).unwrap();
@@ -367,7 +367,7 @@ mod tests {
fn check_criterion_marks_second_unchecked() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo(tmp.path());
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("2_test.md");
fs::write(&filepath, story_with_criteria(3)).unwrap();
@@ -394,7 +394,7 @@ mod tests {
fn check_criterion_out_of_range_returns_error() {
let tmp = tempfile::tempdir().unwrap();
setup_git_repo(tmp.path());
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("3_test.md");
fs::write(&filepath, story_with_criteria(2)).unwrap();
@@ -428,7 +428,7 @@ mod tests {
#[test]
fn add_criterion_appends_after_last_criterion() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("10_test.md");
fs::write(&filepath, story_with_ac_section(&["First", "Second"])).unwrap();
@@ -448,7 +448,7 @@ mod tests {
#[test]
fn add_criterion_to_empty_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("11_test.md");
let content = "---\nname: Test\n---\n\n## Acceptance Criteria\n\n## Out of Scope\n\n- N/A\n";
@@ -463,7 +463,7 @@ mod tests {
#[test]
fn add_criterion_missing_section_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("12_test.md");
fs::write(&filepath, "---\nname: Test\n---\n\nNo AC section here.\n").unwrap();
@@ -478,7 +478,7 @@ mod tests {
#[test]
fn update_story_replaces_user_story_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("20_test.md");
let content = "---\nname: T\n---\n\n## User Story\n\nOld text\n\n## Acceptance Criteria\n\n- [ ] AC\n";
@@ -495,7 +495,7 @@ mod tests {
#[test]
fn update_story_replaces_description_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("21_test.md");
let content = "---\nname: T\n---\n\n## Description\n\nOld description\n\n## Acceptance Criteria\n\n- [ ] AC\n";
@@ -511,7 +511,7 @@ mod tests {
#[test]
fn update_story_no_args_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("22_test.md"), "---\nname: T\n---\n").unwrap();
@@ -523,7 +523,7 @@ mod tests {
#[test]
fn update_story_missing_section_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(
current.join("23_test.md"),
@@ -539,7 +539,7 @@ mod tests {
#[test]
fn update_story_sets_agent_front_matter_field() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("24_test.md");
fs::write(&filepath, "---\nname: T\n---\n\n## User Story\n\nSome story\n").unwrap();
@@ -556,7 +556,7 @@ mod tests {
#[test]
fn update_story_sets_arbitrary_front_matter_fields() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let filepath = current.join("25_test.md");
fs::write(&filepath, "---\nname: T\n---\n\n## User Story\n\nSome story\n").unwrap();
@@ -575,7 +575,7 @@ mod tests {
#[test]
fn update_story_front_matter_only_no_section_required() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
// File without a User Story section — front matter update should succeed
let filepath = current.join("26_test.md");

View File

@@ -144,7 +144,7 @@ mod tests {
#[test]
fn write_and_read_test_results_roundtrip() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("1_story_test.md"), "---\nname: Test\n---\n# Story\n").unwrap();
@@ -163,7 +163,7 @@ mod tests {
#[test]
fn write_test_results_creates_readable_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_path = current.join("2_story_check.md");
fs::write(&story_path, "---\nname: Check\n---\n# Story\n\n## Acceptance Criteria\n\n- [ ] AC1\n").unwrap();
@@ -184,7 +184,7 @@ mod tests {
#[test]
fn write_test_results_overwrites_existing_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
let story_path = current.join("3_story_overwrite.md");
fs::write(
@@ -206,7 +206,7 @@ mod tests {
#[test]
fn read_test_results_returns_none_when_no_section() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("4_story_empty.md"), "---\nname: Empty\n---\n# Story\n").unwrap();
@@ -224,7 +224,7 @@ mod tests {
#[test]
fn write_test_results_finds_story_in_any_stage() {
let tmp = tempfile::tempdir().unwrap();
let qa_dir = tmp.path().join(".story_kit/work/3_qa");
let qa_dir = tmp.path().join(".storkit/work/3_qa");
fs::create_dir_all(&qa_dir).unwrap();
fs::write(qa_dir.join("5_story_qa.md"), "---\nname: QA Story\n---\n# Story\n").unwrap();
@@ -241,7 +241,7 @@ mod tests {
#[test]
fn write_coverage_baseline_to_story_file_updates_front_matter() {
let tmp = tempfile::tempdir().unwrap();
let current = tmp.path().join(".story_kit/work/2_current");
let current = tmp.path().join(".storkit/work/2_current");
fs::create_dir_all(&current).unwrap();
fs::write(current.join("6_story_cov.md"), "---\nname: Cov Story\n---\n# Story\n").unwrap();

View File

@@ -53,8 +53,8 @@ enum WsRequest {
/// - `token` streams partial model output.
/// - `update` pushes the updated message history.
/// - `error` reports a request or processing failure.
/// - `work_item_changed` notifies that a `.story_kit/work/` file changed.
/// - `agent_config_changed` notifies that `.story_kit/project.toml` was modified.
/// - `work_item_changed` notifies that a `.storkit/work/` file changed.
/// - `agent_config_changed` notifies that `.storkit/project.toml` was modified.
enum WsResponse {
Token {
content: String,
@@ -86,7 +86,7 @@ enum WsResponse {
merge: Vec<crate::http::workflow::UpcomingStory>,
done: Vec<crate::http::workflow::UpcomingStory>,
},
/// `.story_kit/project.toml` was modified; the frontend should re-fetch the
/// `.storkit/project.toml` was modified; the frontend should re-fetch the
/// agent roster. Does NOT trigger a pipeline state refresh.
AgentConfigChanged,
/// An agent's state changed (started, stopped, completed, etc.).
@@ -1111,7 +1111,7 @@ mod tests {
// Create minimal pipeline dirs so load_pipeline_state succeeds.
for stage in &["1_backlog", "2_current", "3_qa", "4_merge"] {
std::fs::create_dir_all(root.join(".story_kit").join("work").join(stage)).unwrap();
std::fs::create_dir_all(root.join(".storkit").join("work").join(stage)).unwrap();
}
let ctx = Arc::new(AppContext::new_test(root));

View File

@@ -9,7 +9,7 @@ const KEY_LAST_PROJECT: &str = "last_project_path";
const KEY_SELECTED_MODEL: &str = "selected_model";
const KEY_KNOWN_PROJECTS: &str = "known_projects";
const STORY_KIT_README: &str = include_str!("../../../.story_kit/README.md");
const STORY_KIT_README: &str = include_str!("../../../.storkit/README.md");
const STORY_KIT_CONTEXT: &str = "<!-- story-kit:scaffold-template -->\n\
# Project Context\n\
@@ -57,7 +57,7 @@ The permission system validates the entire command string, and chained commands
won't match allow rules like `Bash(git *)`. Use separate Bash calls instead — \
parallel calls work fine.\n\
\n\
Read .story_kit/README.md to see our dev process.\n";
Read .storkit/README.md to see our dev process.\n";
const STORY_KIT_CLAUDE_SETTINGS: &str = r#"{
"permissions": {
@@ -110,7 +110,7 @@ role = "Full-stack engineer. Implements features across all components."
model = "sonnet"
max_turns = 50
max_budget_usd = 5.00
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Follow the workflow through implementation and verification. The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop.\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits."
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .storkit/README.md to understand the dev process. Follow the workflow through implementation and verification. The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop.\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits."
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Commit all your work before finishing. Do not accept stories, move them to archived, or merge to master."
[[agent]]
@@ -238,11 +238,11 @@ pub fn resolve_cli_path(cwd: &Path, path_arg: &str) -> PathBuf {
}
/// Walk from `start` up through parent directories, returning the first
/// directory that contains a `.story_kit/` subdirectory, or `None`.
/// directory that contains a `.storkit/` subdirectory, or `None`.
pub fn find_story_kit_root(start: &Path) -> Option<PathBuf> {
let mut current = start.to_path_buf();
loop {
if current.join(".story_kit").is_dir() {
if current.join(".storkit").is_dir() {
return Some(current);
}
if !current.pop() {
@@ -316,12 +316,12 @@ fn write_script_if_missing(path: &Path, content: &str) -> Result<(), String> {
Ok(())
}
/// Write (or idempotently update) `.story_kit/.gitignore` with Story Kitspecific
/// ignore patterns for files that live inside the `.story_kit/` directory.
/// Patterns are relative to `.story_kit/` as git resolves `.gitignore` files
/// Write (or idempotently update) `.storkit/.gitignore` with Story Kitspecific
/// ignore patterns for files that live inside the `.storkit/` directory.
/// Patterns are relative to `.storkit/` as git resolves `.gitignore` files
/// relative to the directory that contains them.
fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
// Entries that belong inside .story_kit/.gitignore (relative to .story_kit/).
// Entries that belong inside .storkit/.gitignore (relative to .storkit/).
let entries = [
"bot.toml",
"matrix_store/",
@@ -331,10 +331,10 @@ fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
"coverage/",
];
let gitignore_path = root.join(".story_kit").join(".gitignore");
let gitignore_path = root.join(".storkit").join(".gitignore");
let existing = if gitignore_path.exists() {
fs::read_to_string(&gitignore_path)
.map_err(|e| format!("Failed to read .story_kit/.gitignore: {}", e))?
.map_err(|e| format!("Failed to read .storkit/.gitignore: {}", e))?
} else {
String::new()
};
@@ -359,17 +359,17 @@ fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
}
fs::write(&gitignore_path, new_content)
.map_err(|e| format!("Failed to write .story_kit/.gitignore: {}", e))?;
.map_err(|e| format!("Failed to write .storkit/.gitignore: {}", e))?;
Ok(())
}
/// Append root-level Story Kit entries to the project `.gitignore`.
/// Only `store.json` and `.story_kit_port` remain here because they live at
/// Only `store.json` and `.storkit_port` remain here because they live at
/// the project root and git does not support `../` patterns in `.gitignore`
/// files, so they cannot be expressed in `.story_kit/.gitignore`.
/// files, so they cannot be expressed in `.storkit/.gitignore`.
fn append_root_gitignore_entries(root: &Path) -> Result<(), String> {
let entries = [".story_kit_port", "store.json"];
let entries = [".storkit_port", "store.json"];
let gitignore_path = root.join(".gitignore");
let existing = if gitignore_path.exists() {
@@ -405,7 +405,7 @@ fn append_root_gitignore_entries(root: &Path) -> Result<(), String> {
}
fn scaffold_story_kit(root: &Path) -> Result<(), String> {
let story_kit_root = root.join(".story_kit");
let story_kit_root = root.join(".storkit");
let specs_root = story_kit_root.join("specs");
let tech_root = specs_root.join("tech");
let functional_root = specs_root.join("functional");
@@ -464,7 +464,7 @@ fn scaffold_story_kit(root: &Path) -> Result<(), String> {
}
let add_output = std::process::Command::new("git")
.args(["add", ".story_kit", "script", ".gitignore", "CLAUDE.md", ".claude"])
.args(["add", ".storkit", "script", ".gitignore", "CLAUDE.md", ".claude"])
.current_dir(root)
.output()
.map_err(|e| format!("Failed to run git add: {}", e))?;
@@ -505,7 +505,7 @@ async fn ensure_project_root_with_story_kit(path: PathBuf) -> Result<(), String>
fs::create_dir_all(&path)
.map_err(|e| format!("Failed to create project directory: {}", e))?;
}
if !path.join(".story_kit").is_dir() {
if !path.join(".storkit").is_dir() {
scaffold_story_kit(&path)?;
}
Ok(())
@@ -1032,7 +1032,7 @@ mod tests {
#[test]
fn find_story_kit_root_returns_cwd_when_story_kit_in_cwd() {
let tmp = tempfile::tempdir().unwrap();
std::fs::create_dir_all(tmp.path().join(".story_kit")).unwrap();
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
let result = find_story_kit_root(tmp.path());
assert_eq!(result, Some(tmp.path().to_path_buf()));
@@ -1041,7 +1041,7 @@ mod tests {
#[test]
fn find_story_kit_root_returns_parent_when_story_kit_in_parent() {
let tmp = tempfile::tempdir().unwrap();
std::fs::create_dir_all(tmp.path().join(".story_kit")).unwrap();
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
let child = tmp.path().join("subdir").join("nested");
std::fs::create_dir_all(&child).unwrap();
@@ -1060,9 +1060,9 @@ mod tests {
#[test]
fn find_story_kit_root_prefers_nearest_ancestor() {
let tmp = tempfile::tempdir().unwrap();
std::fs::create_dir_all(tmp.path().join(".story_kit")).unwrap();
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
let child = tmp.path().join("inner");
std::fs::create_dir_all(child.join(".story_kit")).unwrap();
std::fs::create_dir_all(child.join(".storkit")).unwrap();
let result = find_story_kit_root(&child);
assert_eq!(result, Some(child));
@@ -1075,12 +1075,12 @@ mod tests {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
assert!(dir.path().join(".story_kit/README.md").exists());
assert!(dir.path().join(".story_kit/project.toml").exists());
assert!(dir.path().join(".story_kit/specs/00_CONTEXT.md").exists());
assert!(dir.path().join(".story_kit/specs/tech/STACK.md").exists());
assert!(dir.path().join(".storkit/README.md").exists());
assert!(dir.path().join(".storkit/project.toml").exists());
assert!(dir.path().join(".storkit/specs/00_CONTEXT.md").exists());
assert!(dir.path().join(".storkit/specs/tech/STACK.md").exists());
// Old stories/ dirs should NOT be created
assert!(!dir.path().join(".story_kit/stories").exists());
assert!(!dir.path().join(".storkit/stories").exists());
assert!(dir.path().join("script/test").exists());
}
@@ -1091,7 +1091,7 @@ mod tests {
let stages = ["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"];
for stage in &stages {
let path = dir.path().join(".story_kit/work").join(stage);
let path = dir.path().join(".storkit/work").join(stage);
assert!(path.is_dir(), "work/{} should be a directory", stage);
assert!(
path.join(".gitkeep").exists(),
@@ -1107,7 +1107,7 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".story_kit/project.toml")).unwrap();
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
assert!(content.contains("[[agent]]"));
assert!(content.contains("stage = \"coder\""));
assert!(content.contains("stage = \"qa\""));
@@ -1121,7 +1121,7 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".story_kit/specs/00_CONTEXT.md")).unwrap();
fs::read_to_string(dir.path().join(".storkit/specs/00_CONTEXT.md")).unwrap();
assert!(content.contains("<!-- story-kit:scaffold-template -->"));
assert!(content.contains("## High-Level Goal"));
assert!(content.contains("## Core Features"));
@@ -1138,7 +1138,7 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".story_kit/specs/tech/STACK.md")).unwrap();
fs::read_to_string(dir.path().join(".storkit/specs/tech/STACK.md")).unwrap();
assert!(content.contains("<!-- story-kit:scaffold-template -->"));
assert!(content.contains("## Core Stack"));
assert!(content.contains("## Coding Standards"));
@@ -1169,7 +1169,7 @@ mod tests {
#[test]
fn scaffold_story_kit_does_not_overwrite_existing() {
let dir = tempdir().unwrap();
let readme = dir.path().join(".story_kit/README.md");
let readme = dir.path().join(".storkit/README.md");
fs::create_dir_all(readme.parent().unwrap()).unwrap();
fs::write(&readme, "custom content").unwrap();
@@ -1184,24 +1184,24 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let readme_content =
fs::read_to_string(dir.path().join(".story_kit/README.md")).unwrap();
fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap();
let toml_content =
fs::read_to_string(dir.path().join(".story_kit/project.toml")).unwrap();
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
// Run again — must not change content or add duplicate .gitignore entries
scaffold_story_kit(dir.path()).unwrap();
assert_eq!(
fs::read_to_string(dir.path().join(".story_kit/README.md")).unwrap(),
fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap(),
readme_content
);
assert_eq!(
fs::read_to_string(dir.path().join(".story_kit/project.toml")).unwrap(),
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap(),
toml_content
);
let story_kit_gitignore =
fs::read_to_string(dir.path().join(".story_kit/.gitignore")).unwrap();
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
let count = story_kit_gitignore
.lines()
.filter(|l| l.trim() == "worktrees/")
@@ -1209,7 +1209,7 @@ mod tests {
assert_eq!(
count,
1,
".story_kit/.gitignore should not have duplicate entries"
".storkit/.gitignore should not have duplicate entries"
);
}
@@ -1260,32 +1260,32 @@ mod tests {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
// .story_kit/.gitignore must contain relative patterns for files under .story_kit/
// .storkit/.gitignore must contain relative patterns for files under .storkit/
let sk_content =
fs::read_to_string(dir.path().join(".story_kit/.gitignore")).unwrap();
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
assert!(sk_content.contains("worktrees/"));
assert!(sk_content.contains("merge_workspace/"));
assert!(sk_content.contains("coverage/"));
// Must NOT contain absolute .story_kit/ prefixed paths
assert!(!sk_content.contains(".story_kit/"));
// Must NOT contain absolute .storkit/ prefixed paths
assert!(!sk_content.contains(".storkit/"));
// Root .gitignore must contain root-level story-kit entries
let root_content = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
assert!(root_content.contains(".story_kit_port"));
assert!(root_content.contains(".storkit_port"));
assert!(root_content.contains("store.json"));
// Root .gitignore must NOT contain .story_kit/ sub-directory patterns
assert!(!root_content.contains(".story_kit/worktrees/"));
assert!(!root_content.contains(".story_kit/merge_workspace/"));
assert!(!root_content.contains(".story_kit/coverage/"));
// Root .gitignore must NOT contain .storkit/ sub-directory patterns
assert!(!root_content.contains(".storkit/worktrees/"));
assert!(!root_content.contains(".storkit/merge_workspace/"));
assert!(!root_content.contains(".storkit/coverage/"));
}
#[test]
fn scaffold_story_kit_gitignore_does_not_duplicate_existing_entries() {
let dir = tempdir().unwrap();
// Pre-create .story_kit dir and .gitignore with some entries already present
fs::create_dir_all(dir.path().join(".story_kit")).unwrap();
// Pre-create .storkit dir and .gitignore with some entries already present
fs::create_dir_all(dir.path().join(".storkit")).unwrap();
fs::write(
dir.path().join(".story_kit/.gitignore"),
dir.path().join(".storkit/.gitignore"),
"worktrees/\ncoverage/\n",
)
.unwrap();
@@ -1293,7 +1293,7 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".story_kit/.gitignore")).unwrap();
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
let worktrees_count = content
.lines()
.filter(|l| l.trim() == "worktrees/")
@@ -1324,8 +1324,8 @@ mod tests {
"CLAUDE.md should contain the scaffold sentinel"
);
assert!(
content.contains("Read .story_kit/README.md"),
"CLAUDE.md should include directive to read .story_kit/README.md"
content.contains("Read .storkit/README.md"),
"CLAUDE.md should include directive to read .storkit/README.md"
);
assert!(
content.contains("Never chain shell commands"),
@@ -1366,15 +1366,15 @@ mod tests {
.await
.unwrap();
// .story_kit/ should have been created automatically
assert!(project_dir.join(".story_kit").is_dir());
// .storkit/ should have been created automatically
assert!(project_dir.join(".storkit").is_dir());
}
#[tokio::test]
async fn open_project_does_not_overwrite_existing_story_kit() {
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
let sk_dir = project_dir.join(".story_kit");
let sk_dir = project_dir.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
let readme = sk_dir.join("README.md");
fs::write(&readme, "custom content").unwrap();
@@ -1389,7 +1389,7 @@ mod tests {
.await
.unwrap();
// Existing .story_kit/ content should not be overwritten
// Existing .storkit/ content should not be overwritten
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
}
@@ -1570,7 +1570,7 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".story_kit/project.toml")).unwrap();
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
assert!(
content.contains("[[component]]"),
"project.toml should contain a component entry"
@@ -1591,7 +1591,7 @@ mod tests {
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".story_kit/project.toml")).unwrap();
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
assert!(
content.contains("[[component]]"),
"project.toml should always have at least one component"
@@ -1606,7 +1606,7 @@ mod tests {
#[test]
fn scaffold_does_not_overwrite_existing_project_toml_with_components() {
let dir = tempdir().unwrap();
let sk_dir = dir.path().join(".story_kit");
let sk_dir = dir.path().join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
let existing = "[[component]]\nname = \"custom\"\npath = \".\"\nsetup = [\"make build\"]\n";
fs::write(sk_dir.join("project.toml"), existing).unwrap();

View File

@@ -18,7 +18,7 @@ pub struct OnboardingStatus {
pub needs_stack: bool,
/// True when `script/test` still contains the scaffold placeholder.
pub needs_test_script: bool,
/// True when `.story_kit/project.toml` is missing or has no
/// True when `.storkit/project.toml` is missing or has no
/// `[[component]]` entries.
pub needs_project_toml: bool,
}
@@ -33,7 +33,7 @@ impl OnboardingStatus {
/// Inspect the project at `project_root` and determine which onboarding
/// steps are still required.
pub fn check_onboarding_status(project_root: &Path) -> OnboardingStatus {
let story_kit = project_root.join(".story_kit");
let story_kit = project_root.join(".storkit");
OnboardingStatus {
needs_context: is_template_or_missing(
@@ -79,7 +79,7 @@ mod tests {
fn setup_project(dir: &TempDir) -> std::path::PathBuf {
let root = dir.path().to_path_buf();
let sk = root.join(".story_kit");
let sk = root.join(".storkit");
fs::create_dir_all(sk.join("specs").join("tech")).unwrap();
fs::create_dir_all(root.join("script")).unwrap();
root
@@ -106,12 +106,12 @@ mod tests {
// Write content that includes the scaffold sentinel
fs::write(
root.join(".story_kit/specs/00_CONTEXT.md"),
root.join(".storkit/specs/00_CONTEXT.md"),
"<!-- story-kit:scaffold-template -->\n# Project Context\nPlaceholder...",
)
.unwrap();
fs::write(
root.join(".story_kit/specs/tech/STACK.md"),
root.join(".storkit/specs/tech/STACK.md"),
"<!-- story-kit:scaffold-template -->\n# Tech Stack\nPlaceholder...",
)
.unwrap();
@@ -130,12 +130,12 @@ mod tests {
// Real project content that happens to mention "Agentic AI Code Assistant"
// but does NOT contain the scaffold sentinel — should NOT trigger onboarding.
fs::write(
root.join(".story_kit/specs/00_CONTEXT.md"),
root.join(".storkit/specs/00_CONTEXT.md"),
"# Project Context\nTo build a standalone Agentic AI Code Assistant application.",
)
.unwrap();
fs::write(
root.join(".story_kit/specs/tech/STACK.md"),
root.join(".storkit/specs/tech/STACK.md"),
"# Tech Stack\nThis is an Agentic Code Assistant binary.",
)
.unwrap();
@@ -152,12 +152,12 @@ mod tests {
let root = setup_project(&dir);
fs::write(
root.join(".story_kit/specs/00_CONTEXT.md"),
root.join(".storkit/specs/00_CONTEXT.md"),
"# My Project\n\nThis is an e-commerce platform for selling widgets.",
)
.unwrap();
fs::write(
root.join(".story_kit/specs/tech/STACK.md"),
root.join(".storkit/specs/tech/STACK.md"),
"# Tech Stack\n\n## Backend: Python + FastAPI\n## Frontend: React + TypeScript",
)
.unwrap();
@@ -173,8 +173,8 @@ mod tests {
let dir = TempDir::new().unwrap();
let root = setup_project(&dir);
fs::write(root.join(".story_kit/specs/00_CONTEXT.md"), " \n").unwrap();
fs::write(root.join(".story_kit/specs/tech/STACK.md"), "").unwrap();
fs::write(root.join(".storkit/specs/00_CONTEXT.md"), " \n").unwrap();
fs::write(root.join(".storkit/specs/tech/STACK.md"), "").unwrap();
let status = check_onboarding_status(&root);
assert!(status.needs_context);
@@ -230,7 +230,7 @@ mod tests {
let root = setup_project(&dir);
fs::write(
root.join(".story_kit/project.toml"),
root.join(".storkit/project.toml"),
"# empty config\n",
)
.unwrap();
@@ -245,7 +245,7 @@ mod tests {
let root = setup_project(&dir);
fs::write(
root.join(".story_kit/project.toml"),
root.join(".storkit/project.toml"),
"[[component]]\nname = \"app\"\npath = \".\"\nsetup = [\"cargo check\"]\n",
)
.unwrap();
@@ -263,12 +263,12 @@ mod tests {
// Write real content for the required onboarding files
fs::write(
root.join(".story_kit/specs/00_CONTEXT.md"),
root.join(".storkit/specs/00_CONTEXT.md"),
"# My Project\n\nReal project context.",
)
.unwrap();
fs::write(
root.join(".story_kit/specs/tech/STACK.md"),
root.join(".storkit/specs/tech/STACK.md"),
"# My Stack\n\nReal stack content.",
)
.unwrap();
@@ -300,13 +300,13 @@ mod tests {
// Context still has sentinel
fs::write(
root.join(".story_kit/specs/00_CONTEXT.md"),
root.join(".storkit/specs/00_CONTEXT.md"),
"<!-- story-kit:scaffold-template -->\n# Project Context\nPlaceholder...",
)
.unwrap();
// Stack is customised (no sentinel)
fs::write(
root.join(".story_kit/specs/tech/STACK.md"),
root.join(".storkit/specs/tech/STACK.md"),
"# My Stack\nRuby on Rails + PostgreSQL",
)
.unwrap();

View File

@@ -1,10 +1,10 @@
//! Filesystem watcher for `.story_kit/work/` and `.story_kit/project.toml`.
//! Filesystem watcher for `.storkit/work/` and `.storkit/project.toml`.
//!
//! Watches the work pipeline directories for file changes, infers the lifecycle
//! stage from the target directory name, auto-commits with a deterministic message,
//! and broadcasts a [`WatcherEvent`] to all connected WebSocket clients.
//!
//! Also watches `.story_kit/project.toml` for modifications and broadcasts
//! Also watches `.storkit/project.toml` for modifications and broadcasts
//! [`WatcherEvent::ConfigChanged`] so the frontend can reload the agent roster
//! without a server restart.
//!
@@ -45,7 +45,7 @@ pub enum WatcherEvent {
/// The deterministic git commit message used (or that would have been used).
commit_msg: String,
},
/// `.story_kit/project.toml` was modified at the project root (not inside a worktree).
/// `.storkit/project.toml` was modified at the project root (not inside a worktree).
ConfigChanged,
/// An agent's state changed (started, stopped, completed, etc.).
/// Triggers a pipeline state refresh so the frontend can update agent
@@ -61,8 +61,8 @@ pub enum WatcherEvent {
},
}
/// Return `true` if `path` is the root-level `.story_kit/project.toml`, i.e.
/// `{git_root}/.story_kit/project.toml`.
/// Return `true` if `path` is the root-level `.storkit/project.toml`, i.e.
/// `{git_root}/.storkit/project.toml`.
///
/// Returns `false` for paths inside worktree directories (paths containing
/// a `worktrees` component).
@@ -71,7 +71,7 @@ pub fn is_config_file(path: &Path, git_root: &Path) -> bool {
if path.components().any(|c| c.as_os_str() == "worktrees") {
return false;
}
let expected = git_root.join(".story_kit").join("project.toml");
let expected = git_root.join(".storkit").join("project.toml");
path == expected
}
@@ -92,7 +92,7 @@ fn stage_metadata(stage: &str, item_id: &str) -> Option<(&'static str, String)>
/// Return the pipeline stage name for a path if it is a `.md` file living
/// directly inside one of the known work subdirectories, otherwise `None`.
///
/// Explicitly returns `None` for any path under `.story_kit/worktrees/` so
/// Explicitly returns `None` for any path under `.storkit/worktrees/` so
/// that code changes made by agents in their isolated worktrees are never
/// auto-committed to master by the watcher.
fn stage_for_path(path: &Path) -> Option<String> {
@@ -117,11 +117,11 @@ fn stage_for_path(path: &Path) -> Option<String> {
/// Stage all changes in the work directory and commit with the given message.
///
/// Uses `git add -A .story_kit/work/` to catch both additions and deletions in
/// Uses `git add -A .storkit/work/` to catch both additions and deletions in
/// a single commit. Returns `Ok(true)` if a commit was made, `Ok(false)` if
/// there was nothing to commit, and `Err` for unexpected failures.
fn git_add_work_and_commit(git_root: &Path, message: &str) -> Result<bool, String> {
let work_rel = PathBuf::from(".story_kit").join("work");
let work_rel = PathBuf::from(".storkit").join("work");
let add_out = std::process::Command::new("git")
.args(["add", "-A"])
@@ -170,7 +170,7 @@ fn should_commit_stage(stage: &str) -> bool {
///
/// Only files that still exist on disk are used to derive the commit message
/// (they represent the destination of a move or a new file). Deletions are
/// captured by `git add -A .story_kit/work/` automatically.
/// captured by `git add -A .storkit/work/` automatically.
///
/// Only terminal stages (`1_backlog` and `6_archived`) trigger git commits.
/// All stages broadcast a [`WatcherEvent`] so the frontend stays in sync.
@@ -338,9 +338,9 @@ fn sweep_done_to_archived(work_dir: &Path, git_root: &Path, done_retention: Dura
/// Start the filesystem watcher on a dedicated OS thread.
///
/// `work_dir` — absolute path to `.story_kit/work/` (watched recursively).
/// `work_dir` — absolute path to `.storkit/work/` (watched recursively).
/// `git_root` — project root (passed to `git` commands as cwd, and used to
/// derive the config file path `.story_kit/project.toml`).
/// derive the config file path `.storkit/project.toml`).
/// `event_tx` — broadcast sender; each connected WebSocket client holds a receiver.
/// `watcher_config` — initial sweep configuration loaded from `project.toml`.
pub fn start_watcher(
@@ -367,8 +367,8 @@ pub fn start_watcher(
return;
}
// Also watch .story_kit/project.toml for hot-reload of agent config.
let config_file = git_root.join(".story_kit").join("project.toml");
// Also watch .storkit/project.toml for hot-reload of agent config.
let config_file = git_root.join(".storkit").join("project.toml");
if config_file.exists()
&& let Err(e) = watcher.watch(&config_file, RecursiveMode::NonRecursive)
{
@@ -521,9 +521,9 @@ mod tests {
.expect("git initial commit");
}
/// Create the `.story_kit/work/{stage}/` dir tree inside `root`.
/// Create the `.storkit/work/{stage}/` dir tree inside `root`.
fn make_stage_dir(root: &std::path::Path, stage: &str) -> PathBuf {
let dir = root.join(".story_kit").join("work").join(stage);
let dir = root.join(".storkit").join("work").join(stage);
fs::create_dir_all(&dir).expect("create stage dir");
dir
}
@@ -702,7 +702,7 @@ mod tests {
make_stage_dir(tmp.path(), "2_current");
let deleted_path = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("work")
.join("2_current")
.join("42_story_foo.md");
@@ -731,7 +731,7 @@ mod tests {
let tmp = TempDir::new().unwrap();
init_git_repo(tmp.path());
// File sits in an unrecognised directory.
let unknown_dir = tmp.path().join(".story_kit").join("work").join("9_unknown");
let unknown_dir = tmp.path().join(".storkit").join("work").join("9_unknown");
fs::create_dir_all(&unknown_dir).unwrap();
let path = unknown_dir.join("42_story_foo.md");
fs::write(&path, "---\nname: test\n---\n").unwrap();
@@ -889,7 +889,7 @@ mod tests {
#[test]
fn stage_for_path_recognises_pipeline_dirs() {
let base = PathBuf::from("/proj/.story_kit/work");
let base = PathBuf::from("/proj/.storkit/work");
assert_eq!(
stage_for_path(&base.join("2_current/42_story_foo.md")),
Some("2_current".to_string())
@@ -911,7 +911,7 @@ mod tests {
#[test]
fn stage_for_path_ignores_worktree_paths() {
let worktrees = PathBuf::from("/proj/.story_kit/worktrees");
let worktrees = PathBuf::from("/proj/.storkit/worktrees");
// Code changes inside a worktree must be ignored.
assert_eq!(
@@ -922,14 +922,14 @@ mod tests {
// Even if a worktree happens to contain a path component that looks
// like a pipeline stage, it must still be ignored.
assert_eq!(
stage_for_path(&worktrees.join("42_story_foo/.story_kit/work/2_current/42_story_foo.md")),
stage_for_path(&worktrees.join("42_story_foo/.storkit/work/2_current/42_story_foo.md")),
None,
);
// A path that only contains the word "worktrees" as part of a longer
// segment (not an exact component) must NOT be filtered out.
assert_eq!(
stage_for_path(&PathBuf::from("/proj/.story_kit/work/2_current/not_worktrees_story.md")),
stage_for_path(&PathBuf::from("/proj/.storkit/work/2_current/not_worktrees_story.md")),
Some("2_current".to_string()),
);
}
@@ -968,7 +968,7 @@ mod tests {
#[test]
fn is_config_file_identifies_root_project_toml() {
let git_root = PathBuf::from("/proj");
let config = git_root.join(".story_kit").join("project.toml");
let config = git_root.join(".storkit").join("project.toml");
assert!(is_config_file(&config, &git_root));
}
@@ -977,7 +977,7 @@ mod tests {
let git_root = PathBuf::from("/proj");
// project.toml inside a worktree must NOT be treated as the root config.
let worktree_config = PathBuf::from(
"/proj/.story_kit/worktrees/42_story_foo/.story_kit/project.toml",
"/proj/.storkit/worktrees/42_story_foo/.storkit/project.toml",
);
assert!(!is_config_file(&worktree_config, &git_root));
}
@@ -987,11 +987,11 @@ mod tests {
let git_root = PathBuf::from("/proj");
// Random files must not match.
assert!(!is_config_file(
&PathBuf::from("/proj/.story_kit/work/2_current/42_story_foo.md"),
&PathBuf::from("/proj/.storkit/work/2_current/42_story_foo.md"),
&git_root
));
assert!(!is_config_file(
&PathBuf::from("/proj/.story_kit/README.md"),
&PathBuf::from("/proj/.storkit/README.md"),
&git_root
));
}
@@ -999,7 +999,7 @@ mod tests {
#[test]
fn is_config_file_rejects_wrong_root() {
let git_root = PathBuf::from("/proj");
let other_root_config = PathBuf::from("/other/.story_kit/project.toml");
let other_root_config = PathBuf::from("/other/.storkit/project.toml");
assert!(!is_config_file(&other_root_config, &git_root));
}
@@ -1008,7 +1008,7 @@ mod tests {
#[test]
fn sweep_moves_old_items_to_archived() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let work_dir = tmp.path().join(".storkit").join("work");
let done_dir = work_dir.join("5_done");
let archived_dir = work_dir.join("6_archived");
fs::create_dir_all(&done_dir).unwrap();
@@ -1036,7 +1036,7 @@ mod tests {
#[test]
fn sweep_keeps_recent_items_in_done() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let work_dir = tmp.path().join(".storkit").join("work");
let done_dir = work_dir.join("5_done");
fs::create_dir_all(&done_dir).unwrap();
@@ -1053,7 +1053,7 @@ mod tests {
#[test]
fn sweep_respects_custom_retention() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let work_dir = tmp.path().join(".storkit").join("work");
let done_dir = work_dir.join("5_done");
let archived_dir = work_dir.join("6_archived");
fs::create_dir_all(&done_dir).unwrap();
@@ -1083,7 +1083,7 @@ mod tests {
#[test]
fn sweep_custom_retention_keeps_younger_items() {
let tmp = TempDir::new().unwrap();
let work_dir = tmp.path().join(".story_kit").join("work");
let work_dir = tmp.path().join(".storkit").join("work");
let done_dir = work_dir.join("5_done");
fs::create_dir_all(&done_dir).unwrap();
@@ -1128,7 +1128,7 @@ mod tests {
let git_root = tmp.path().to_path_buf();
init_git_repo(&git_root);
let work_dir = git_root.join(".story_kit").join("work");
let work_dir = git_root.join(".storkit").join("work");
let done_dir = work_dir.join("5_done");
fs::create_dir_all(&done_dir).unwrap();
@@ -1169,7 +1169,7 @@ mod tests {
let git_root = tmp.path().to_path_buf();
init_git_repo(&git_root);
let work_dir = git_root.join(".story_kit").join("work");
let work_dir = git_root.join(".storkit").join("work");
let archived_dir = work_dir.join("6_archived");
fs::create_dir_all(&archived_dir).unwrap();
@@ -1204,7 +1204,7 @@ mod tests {
let git_root = tmp.path().to_path_buf();
init_git_repo(&git_root);
let work_dir = git_root.join(".story_kit").join("work");
let work_dir = git_root.join(".storkit").join("work");
let done_dir = work_dir.join("5_done");
fs::create_dir_all(&done_dir).unwrap();

View File

@@ -18,7 +18,7 @@ You have the following tools available:
YOUR WORKFLOW:
When the user requests a feature or change:
1. **Understand:** Read `.story_kit/README.md` if you haven't already to understand the development process
1. **Understand:** Read `.storkit/README.md` if you haven't already to understand the development process
2. **Explore:** Use `read_file` and `list_directory` to understand the current codebase structure
3. **Implement:** Use `write_file` to create or modify files directly
4. **Verify:** Use `exec_shell` to run tests, linters, or build commands to verify your changes work
@@ -100,7 +100,7 @@ Ask the user:
- Who are the target users?
- What are the core features or goals?
Then use `write_file` to write `.story_kit/specs/00_CONTEXT.md` with:
Then use `write_file` to write `.storkit/specs/00_CONTEXT.md` with:
- **High-Level Goal** — a clear, concise summary of what the project does
- **Core Features** — 3-5 bullet points
- **Domain Definition** — key terms and roles
@@ -114,7 +114,7 @@ Ask the user:
- What test runner(s)? (e.g. cargo test, pytest, jest, pnpm test)
- What linter(s)? (e.g. clippy, eslint, biome, ruff)
Then use `write_file` to write `.story_kit/specs/tech/STACK.md` with:
Then use `write_file` to write `.storkit/specs/tech/STACK.md` with:
- **Overview** of the architecture
- **Core Stack** — languages, frameworks, build tools
- **Coding Standards** — formatting, linting, quality gates
@@ -131,7 +131,7 @@ Based on the tech stack answers, use `write_file` to write `script/test` — a b
The script must start with `#!/usr/bin/env bash` and `set -euo pipefail`.
## Step 4: Project Configuration
The scaffold has written `.story_kit/project.toml` with example `[[component]]` sections. You must replace these examples with real definitions that match the project's actual tech stack.
The scaffold has written `.storkit/project.toml` with example `[[component]]` sections. You must replace these examples with real definitions that match the project's actual tech stack.
First, inspect the project structure to identify the tech stack:
- Use `list_directory(".")` to see top-level files and directories
@@ -139,9 +139,9 @@ First, inspect the project structure to identify the tech stack:
- Check subdirectories like `frontend/`, `backend/`, `app/`, `web/` for nested stacks
- If you find a `package.json`, check whether `pnpm-lock.yaml`, `yarn.lock`, or `package-lock.json` exists to determine the package manager
Then use `read_file(".story_kit/project.toml")` to see the current content, keeping the `[[agent]]` sections intact.
Then use `read_file(".storkit/project.toml")` to see the current content, keeping the `[[agent]]` sections intact.
Finally, use `write_file` to rewrite `.story_kit/project.toml` with real `[[component]]` entries. Each component needs:
Finally, use `write_file` to rewrite `.storkit/project.toml` with real `[[component]]` entries. Each component needs:
- `name` — component identifier (e.g. "backend", "frontend", "app")
- `path` — relative path from project root (use "." for root, "frontend" for a frontend subdirectory)
- `setup` — list of setup commands that install dependencies and verify the build (e.g. ["pnpm install"], ["cargo check"])

View File

@@ -55,7 +55,7 @@ async fn main() -> Result<(), std::io::Error> {
if let Some(explicit_root) = explicit_path {
// An explicit path was given on the command line.
// Open it directly — scaffold .story_kit/ if it is missing — and
// Open it directly — scaffold .storkit/ if it is missing — and
// exit with a clear error message if the path is invalid.
match io::fs::open_project(
explicit_root.to_string_lossy().to_string(),
@@ -76,7 +76,7 @@ async fn main() -> Result<(), std::io::Error> {
}
}
} else {
// No path argument — auto-detect a .story_kit/ project in cwd or
// No path argument — auto-detect a .storkit/ project in cwd or
// parent directories (preserves existing behaviour).
if let Some(project_root) = find_story_kit_root(&cwd) {
io::fs::open_project(
@@ -94,16 +94,16 @@ async fn main() -> Result<(), std::io::Error> {
config::ProjectConfig::load(&project_root)
.unwrap_or_else(|e| panic!("Invalid project.toml: {e}"));
} else {
// No .story_kit/ found — fall back to cwd so existing behaviour is preserved.
// No .storkit/ found — fall back to cwd so existing behaviour is preserved.
// TRACE:MERGE-DEBUG — remove once root cause is found
slog!("[MERGE-DEBUG] main: no .story_kit/ found, falling back to cwd {:?}", cwd);
slog!("[MERGE-DEBUG] main: no .storkit/ found, falling back to cwd {:?}", cwd);
*app_state.project_root.lock().unwrap() = Some(cwd.clone());
}
}
// Enable persistent server log file now that the project root is known.
if let Some(ref root) = *app_state.project_root.lock().unwrap() {
let log_dir = root.join(".story_kit").join("logs");
let log_dir = root.join(".storkit").join("logs");
let _ = std::fs::create_dir_all(&log_dir);
log_buffer::global().set_log_file(log_dir.join("server.log"));
}
@@ -120,7 +120,7 @@ async fn main() -> Result<(), std::io::Error> {
let watchdog_root: Option<PathBuf> = app_state.project_root.lock().unwrap().clone();
AgentPool::spawn_watchdog(Arc::clone(&agents), watchdog_root);
if let Some(ref root) = *app_state.project_root.lock().unwrap() {
let work_dir = root.join(".story_kit").join("work");
let work_dir = root.join(".storkit").join("work");
if work_dir.is_dir() {
let watcher_config = config::ProjectConfig::load(root)
.map(|c| c.watcher)
@@ -264,7 +264,7 @@ async fn main() -> Result<(), std::io::Error> {
// Optional Matrix bot: connect to the homeserver and start listening for
// messages if `.story_kit/bot.toml` is present and enabled.
// messages if `.storkit/bot.toml` is present and enabled.
if let Some(ref root) = startup_root {
matrix::spawn_bot(root, watcher_tx_for_bot, perm_rx_for_bot, Arc::clone(&startup_agents));
}
@@ -319,7 +319,7 @@ mod tests {
#[should_panic(expected = "Invalid project.toml: Duplicate agent name")]
fn panics_on_duplicate_agent_names() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(
sk.join("project.toml"),

View File

@@ -84,7 +84,7 @@ struct PersistedHistory {
}
/// Path to the persisted conversation history file relative to project root.
const HISTORY_FILE: &str = ".story_kit/matrix_history.json";
const HISTORY_FILE: &str = ".storkit/matrix_history.json";
/// Load conversation history from disk, returning an empty map on any error.
pub fn load_history(project_root: &std::path::Path) -> HashMap<OwnedRoomId, RoomConversation> {
@@ -214,7 +214,7 @@ pub async fn run_bot(
perm_rx: Arc<TokioMutex<mpsc::UnboundedReceiver<PermissionForward>>>,
agents: Arc<AgentPool>,
) -> Result<(), String> {
let store_path = project_root.join(".story_kit").join("matrix_store");
let store_path = project_root.join(".storkit").join("matrix_store");
let client = Client::builder()
.homeserver_url(&config.homeserver)
.sqlite_store(&store_path, None)
@@ -223,7 +223,7 @@ pub async fn run_bot(
.map_err(|e| format!("Failed to build Matrix client: {e}"))?;
// Persist device ID so E2EE crypto state survives restarts.
let device_id_path = project_root.join(".story_kit").join("matrix_device_id");
let device_id_path = project_root.join(".storkit").join("matrix_device_id");
let saved_device_id: Option<String> = std::fs::read_to_string(&device_id_path)
.ok()
.map(|s| s.trim().to_string())
@@ -1701,7 +1701,7 @@ mod tests {
#[test]
fn save_and_load_history_round_trip() {
let dir = tempfile::tempdir().unwrap();
let story_kit_dir = dir.path().join(".story_kit");
let story_kit_dir = dir.path().join(".storkit");
std::fs::create_dir_all(&story_kit_dir).unwrap();
let room_id: OwnedRoomId = "!persist:example.com".parse().unwrap();
@@ -1742,7 +1742,7 @@ mod tests {
#[test]
fn load_history_returns_empty_on_corrupt_file() {
let dir = tempfile::tempdir().unwrap();
let story_kit_dir = dir.path().join(".story_kit");
let story_kit_dir = dir.path().join(".storkit");
std::fs::create_dir_all(&story_kit_dir).unwrap();
std::fs::write(dir.path().join(HISTORY_FILE), "not valid json").unwrap();
let loaded = load_history(dir.path());

View File

@@ -62,7 +62,7 @@ pub(super) fn handle_move(ctx: &CommandContext) -> Option<String> {
'outer: for stage_dir in SEARCH_DIRS {
let dir = ctx
.project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage_dir);
if !dir.exists() {
@@ -143,7 +143,7 @@ mod tests {
}
fn write_story_file(root: &std::path::Path, stage: &str, filename: &str, content: &str) {
let dir = root.join(".story_kit/work").join(stage);
let dir = root.join(".storkit/work").join(stage);
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(dir.join(filename), content).unwrap();
}
@@ -246,7 +246,7 @@ mod tests {
// Verify the file was actually moved.
let new_path = tmp
.path()
.join(".story_kit/work/2_current/42_story_some_feature.md");
.join(".storkit/work/2_current/42_story_some_feature.md");
assert!(new_path.exists(), "story file should be in 2_current/");
}

View File

@@ -108,7 +108,7 @@ fn find_story_name(root: &std::path::Path, num_str: &str) -> Option<String> {
"6_archived",
];
for stage in &stages {
let dir = root.join(".story_kit").join("work").join(stage);
let dir = root.join(".storkit").join("work").join(stage);
if !dir.exists() {
continue;
}

View File

@@ -34,7 +34,7 @@ pub(super) fn handle_show(ctx: &CommandContext) -> Option<String> {
for stage in &stages {
let dir = ctx
.project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage);
if !dir.exists() {
@@ -92,7 +92,7 @@ mod tests {
}
fn write_story_file(root: &std::path::Path, stage: &str, filename: &str, content: &str) {
let dir = root.join(".story_kit/work").join(stage);
let dir = root.join(".storkit/work").join(stage);
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(dir.join(filename), content).unwrap();
}

View File

@@ -38,7 +38,7 @@ fn read_stage_items(
stage_dir: &str,
) -> Vec<(String, Option<String>)> {
let dir = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage_dir);
if !dir.exists() {
@@ -228,7 +228,7 @@ mod tests {
use tempfile::TempDir;
let tmp = TempDir::new().unwrap();
let stage_dir = tmp.path().join(".story_kit/work/2_current");
let stage_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&stage_dir).unwrap();
// Write a story file with a front-matter name
@@ -257,7 +257,7 @@ mod tests {
use tempfile::TempDir;
let tmp = TempDir::new().unwrap();
let stage_dir = tmp.path().join(".story_kit/work/2_current");
let stage_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&stage_dir).unwrap();
let story_path = stage_dir.join("293_story_register_all_bot_commands.md");
@@ -295,7 +295,7 @@ mod tests {
use tempfile::TempDir;
let tmp = TempDir::new().unwrap();
let stage_dir = tmp.path().join(".story_kit/work/2_current");
let stage_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&stage_dir).unwrap();
let story_path = stage_dir.join("293_story_register_all_bot_commands.md");
@@ -318,7 +318,7 @@ mod tests {
use tempfile::TempDir;
let tmp = TempDir::new().unwrap();
let stage_dir = tmp.path().join(".story_kit/work/2_current");
let stage_dir = tmp.path().join(".storkit/work/2_current");
std::fs::create_dir_all(&stage_dir).unwrap();
let story_path = stage_dir.join("293_story_register_all_bot_commands.md");

View File

@@ -9,7 +9,7 @@ fn default_permission_timeout_secs() -> u64 {
120
}
/// Configuration for the Matrix bot, read from `.story_kit/bot.toml`.
/// Configuration for the Matrix bot, read from `.storkit/bot.toml`.
#[derive(Deserialize, Clone, Debug)]
pub struct BotConfig {
/// Matrix homeserver URL, e.g. `https://matrix.example.com`
@@ -107,12 +107,12 @@ fn default_transport() -> String {
}
impl BotConfig {
/// Load bot configuration from `.story_kit/bot.toml`.
/// Load bot configuration from `.storkit/bot.toml`.
///
/// Returns `None` if the file does not exist, fails to parse, has
/// `enabled = false`, or specifies no room IDs.
pub fn load(project_root: &Path) -> Option<Self> {
let path = project_root.join(".story_kit").join("bot.toml");
let path = project_root.join(".storkit").join("bot.toml");
if !path.exists() {
return None;
}
@@ -201,7 +201,7 @@ impl BotConfig {
/// array, and writes the result back. Errors are logged but not propagated
/// so a persistence failure never interrupts the bot's message handling.
pub fn save_ambient_rooms(project_root: &Path, room_ids: &[String]) {
let path = project_root.join(".story_kit").join("bot.toml");
let path = project_root.join(".storkit").join("bot.toml");
let content = match std::fs::read_to_string(&path) {
Ok(c) => c,
Err(e) => {
@@ -250,7 +250,7 @@ mod tests {
#[test]
fn load_returns_none_when_disabled() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -270,7 +270,7 @@ enabled = false
#[test]
fn load_returns_config_when_enabled_with_room_ids() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -298,7 +298,7 @@ enabled = true
#[test]
fn load_merges_deprecated_room_id_into_room_ids() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
// Old-style single room_id key — should still work.
fs::write(
@@ -319,7 +319,7 @@ enabled = true
#[test]
fn load_returns_none_when_no_room_ids() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -338,7 +338,7 @@ enabled = true
#[test]
fn load_returns_none_when_toml_invalid() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(sk.join("bot.toml"), "not valid toml {{{").unwrap();
let result = BotConfig::load(tmp.path());
@@ -348,7 +348,7 @@ enabled = true
#[test]
fn load_respects_optional_model() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -369,7 +369,7 @@ model = "claude-sonnet-4-6"
#[test]
fn load_uses_default_history_size() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -389,7 +389,7 @@ enabled = true
#[test]
fn load_respects_custom_history_size() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -410,7 +410,7 @@ history_size = 50
#[test]
fn load_reads_display_name() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -431,7 +431,7 @@ display_name = "Timmy"
#[test]
fn load_display_name_defaults_to_none_when_absent() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -451,7 +451,7 @@ enabled = true
#[test]
fn load_uses_default_permission_timeout() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -471,7 +471,7 @@ enabled = true
#[test]
fn load_respects_custom_permission_timeout() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -495,7 +495,7 @@ permission_timeout_secs = 60
// must parse successfully — the field is simply ignored now that
// verification is always enforced unconditionally.
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -520,7 +520,7 @@ require_verified_devices = true
#[test]
fn load_reads_ambient_rooms() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -541,7 +541,7 @@ ambient_rooms = ["!abc:example.com"]
#[test]
fn load_ambient_rooms_defaults_to_empty_when_absent() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -561,7 +561,7 @@ enabled = true
#[test]
fn save_ambient_rooms_persists_to_bot_toml() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -583,7 +583,7 @@ enabled = true
#[test]
fn save_ambient_rooms_clears_when_empty() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -606,7 +606,7 @@ ambient_rooms = ["!abc:example.com"]
#[test]
fn load_transport_defaults_to_matrix() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -626,7 +626,7 @@ enabled = true
#[test]
fn load_transport_reads_custom_value() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -662,7 +662,7 @@ whatsapp_verify_token = "my-verify"
#[test]
fn load_whatsapp_returns_none_when_missing_phone_number_id() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -683,7 +683,7 @@ whatsapp_verify_token = "my-verify"
#[test]
fn load_whatsapp_returns_none_when_missing_access_token() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -704,7 +704,7 @@ whatsapp_verify_token = "my-verify"
#[test]
fn load_whatsapp_returns_none_when_missing_verify_token() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -727,7 +727,7 @@ whatsapp_access_token = "EAAtoken"
#[test]
fn load_slack_transport_reads_config() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -753,7 +753,7 @@ slack_channel_ids = ["C01ABCDEF"]
#[test]
fn load_slack_returns_none_when_missing_bot_token() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -774,7 +774,7 @@ slack_channel_ids = ["C01ABCDEF"]
#[test]
fn load_slack_returns_none_when_missing_signing_secret() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),
@@ -795,7 +795,7 @@ slack_channel_ids = ["C01ABCDEF"]
#[test]
fn load_slack_returns_none_when_missing_channel_ids() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("bot.toml"),

View File

@@ -71,7 +71,7 @@ pub async fn handle_delete(
// Find the story file across all pipeline stages.
let mut found: Option<(std::path::PathBuf, &str, String)> = None; // (path, stage, story_id)
'outer: for stage in STAGES {
let dir = project_root.join(".story_kit").join("work").join(stage);
let dir = project_root.join(".storkit").join("work").join(stage);
if !dir.exists() {
continue;
}
@@ -152,7 +152,7 @@ pub async fn handle_delete(
// Commit the deletion to git.
let commit_msg = format!("story-kit: delete {story_id}");
let work_rel = std::path::PathBuf::from(".story_kit").join("work");
let work_rel = std::path::PathBuf::from(".storkit").join("work");
let _ = std::process::Command::new("git")
.args(["add", "-A"])
.arg(&work_rel)
@@ -296,7 +296,7 @@ mod tests {
let project_root = tmp.path();
// Create the pipeline directories.
for stage in &["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
std::fs::create_dir_all(project_root.join(".story_kit").join("work").join(stage))
std::fs::create_dir_all(project_root.join(".storkit").join("work").join(stage))
.unwrap();
}
let agents = std::sync::Arc::new(crate::agents::AgentPool::new_test(3000));
@@ -329,7 +329,7 @@ mod tests {
.output()
.unwrap();
let backlog_dir = project_root.join(".story_kit").join("work").join("1_backlog");
let backlog_dir = project_root.join(".storkit").join("work").join("1_backlog");
std::fs::create_dir_all(&backlog_dir).unwrap();
let story_path = backlog_dir.join("42_story_some_feature.md");
std::fs::write(

View File

@@ -1,6 +1,6 @@
//! Matrix bot integration for Story Kit.
//!
//! When a `.story_kit/bot.toml` file is present with `enabled = true`, the
//! When a `.storkit/bot.toml` file is present with `enabled = true`, the
//! server spawns a Matrix bot that:
//!
//! 1. Connects to the configured homeserver and joins the configured room.
@@ -37,7 +37,7 @@ use tokio::sync::{Mutex as TokioMutex, broadcast, mpsc};
/// Attempt to start the Matrix bot.
///
/// Reads the bot configuration from `.story_kit/bot.toml`. If the file is
/// Reads the bot configuration from `.storkit/bot.toml`. If the file is
/// absent or `enabled = false`, this function returns immediately without
/// spawning anything — the server continues normally.
///

View File

@@ -52,7 +52,7 @@ pub fn extract_story_number(item_id: &str) -> Option<&str> {
/// Returns `None` if the file doesn't exist or has no parseable name.
pub fn read_story_name(project_root: &Path, stage: &str, item_id: &str) -> Option<String> {
let path = project_root
.join(".story_kit")
.join(".storkit")
.join("work")
.join(stage)
.join(format!("{item_id}.md"));
@@ -243,7 +243,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let stage_dir = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("work")
.join("2_current");
std::fs::create_dir_all(&stage_dir).unwrap();
@@ -269,7 +269,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let stage_dir = tmp
.path()
.join(".story_kit")
.join(".storkit")
.join("work")
.join("2_current");
std::fs::create_dir_all(&stage_dir).unwrap();

View File

@@ -90,7 +90,7 @@ pub async fn handle_start(
// Find the story file across all pipeline stages.
let mut found: Option<(std::path::PathBuf, String)> = None; // (path, story_id)
'outer: for stage in STAGES {
let dir = project_root.join(".story_kit").join("work").join(stage);
let dir = project_root.join(".storkit").join("work").join(stage);
if !dir.exists() {
continue;
}
@@ -301,7 +301,7 @@ mod tests {
let tmp = tempfile::tempdir().unwrap();
let project_root = tmp.path();
for stage in &["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
std::fs::create_dir_all(project_root.join(".story_kit").join("work").join(stage))
std::fs::create_dir_all(project_root.join(".storkit").join("work").join(stage))
.unwrap();
}
let agents = std::sync::Arc::new(crate::agents::AgentPool::new_test(3000));

View File

@@ -402,7 +402,7 @@ struct PersistedSlackHistory {
}
/// Path to the persisted Slack conversation history file.
const SLACK_HISTORY_FILE: &str = ".story_kit/slack_history.json";
const SLACK_HISTORY_FILE: &str = ".storkit/slack_history.json";
/// Load Slack conversation history from disk.
pub fn load_slack_history(project_root: &std::path::Path) -> HashMap<String, RoomConversation> {
@@ -1244,7 +1244,7 @@ mod tests {
#[test]
fn save_and_load_slack_history_round_trips() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
let mut history = HashMap::new();
@@ -1288,7 +1288,7 @@ mod tests {
#[test]
fn load_slack_history_returns_empty_on_invalid_json() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(sk.join("slack_history.json"), "not json {{{").unwrap();
let history = load_slack_history(tmp.path());

View File

@@ -554,7 +554,7 @@ struct PersistedWhatsAppHistory {
}
/// Path to the persisted WhatsApp conversation history file.
const WHATSAPP_HISTORY_FILE: &str = ".story_kit/whatsapp_history.json";
const WHATSAPP_HISTORY_FILE: &str = ".storkit/whatsapp_history.json";
/// Load WhatsApp conversation history from disk.
pub fn load_whatsapp_history(
@@ -1322,7 +1322,7 @@ mod tests {
#[test]
fn save_and_load_whatsapp_history_round_trips() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
let mut history = HashMap::new();
@@ -1366,7 +1366,7 @@ mod tests {
#[test]
fn load_whatsapp_history_returns_empty_on_invalid_json() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
std::fs::write(sk.join("whatsapp_history.json"), "not json {{{").unwrap();
let history = load_whatsapp_history(tmp.path());
@@ -1376,7 +1376,7 @@ mod tests {
#[test]
fn save_whatsapp_history_preserves_multiple_senders() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
let sk = tmp.path().join(".storkit");
std::fs::create_dir_all(&sk).unwrap();
let mut history = HashMap::new();

View File

@@ -25,10 +25,10 @@ pub struct WorktreeListEntry {
pub path: PathBuf,
}
/// Worktree path inside the project: `{project_root}/.story_kit/worktrees/{story_id}`.
/// Worktree path inside the project: `{project_root}/.storkit/worktrees/{story_id}`.
pub fn worktree_path(project_root: &Path, story_id: &str) -> PathBuf {
project_root
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join(story_id)
}
@@ -56,7 +56,7 @@ fn detect_base_branch(project_root: &Path) -> String {
/// Create a git worktree for the given story.
///
/// - Creates the worktree at `{project_root}/.story_kit/worktrees/{story_id}`
/// - Creates the worktree at `{project_root}/.storkit/worktrees/{story_id}`
/// on branch `feature/story-{story_id}`.
/// - Writes `.mcp.json` in the worktree pointing to the MCP server at `port`.
/// - Runs setup commands from the config for each component.
@@ -149,14 +149,14 @@ fn create_worktree_sync(
}
// Enable sparse checkout to exclude pipeline files from the worktree.
// This prevents .story_kit/work/ changes from ending up in feature branches,
// This prevents .storkit/work/ changes from ending up in feature branches,
// which cause rename/delete merge conflicts when merging back to master.
configure_sparse_checkout(wt_path)?;
Ok(())
}
/// Placeholder for worktree isolation of `.story_kit/work/`.
/// Placeholder for worktree isolation of `.storkit/work/`.
///
/// Previous approaches (sparse checkout, skip-worktree) all leaked state
/// from worktrees back to the main checkout's config/index. For now this
@@ -218,11 +218,11 @@ pub async fn remove_worktree_by_story_id(
remove_worktree(project_root, &info, config).await
}
/// List all worktrees under `{project_root}/.story_kit/worktrees/`.
/// List all worktrees under `{project_root}/.storkit/worktrees/`.
/// Find the worktree path for a given story ID, if it exists.
pub fn find_worktree_path(project_root: &Path, story_id: &str) -> Option<PathBuf> {
let wt_path = project_root
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join(story_id);
if wt_path.is_dir() {
@@ -233,7 +233,7 @@ pub fn find_worktree_path(project_root: &Path, story_id: &str) -> Option<PathBuf
}
pub fn list_worktrees(project_root: &Path) -> Result<Vec<WorktreeListEntry>, String> {
let worktrees_dir = project_root.join(".story_kit").join("worktrees");
let worktrees_dir = project_root.join(".storkit").join("worktrees");
if !worktrees_dir.exists() {
return Ok(Vec::new());
}
@@ -375,7 +375,7 @@ mod tests {
let path = worktree_path(project_root, "42_my_story");
assert_eq!(
path,
Path::new("/home/user/my-project/.story_kit/worktrees/42_my_story")
Path::new("/home/user/my-project/.storkit/worktrees/42_my_story")
);
}
@@ -389,7 +389,7 @@ mod tests {
#[test]
fn list_worktrees_returns_subdirs() {
let tmp = TempDir::new().unwrap();
let worktrees_dir = tmp.path().join(".story_kit").join("worktrees");
let worktrees_dir = tmp.path().join(".storkit").join("worktrees");
fs::create_dir_all(worktrees_dir.join("42_story_a")).unwrap();
fs::create_dir_all(worktrees_dir.join("43_story_b")).unwrap();
// A file (not dir) — should be ignored
@@ -438,8 +438,8 @@ mod tests {
fs::create_dir_all(&project_root).unwrap();
init_git_repo(&project_root);
// Create a tracked file under .story_kit/work/ on the initial branch
let work_dir = project_root.join(".story_kit").join("work");
// Create a tracked file under .storkit/work/ on the initial branch
let work_dir = project_root.join(".storkit").join("work");
fs::create_dir_all(&work_dir).unwrap();
fs::write(work_dir.join("test_story.md"), "# Test").unwrap();
Command::new("git")
@@ -457,14 +457,14 @@ mod tests {
let branch = "feature/test-sparse";
create_worktree_sync(&project_root, &wt_path, branch).unwrap();
// Worktree should have all files including .story_kit/work/
assert!(wt_path.join(".story_kit").join("work").exists());
// Worktree should have all files including .storkit/work/
assert!(wt_path.join(".storkit").join("work").exists());
assert!(wt_path.join(".git").exists());
// Main checkout must NOT be affected by worktree creation.
assert!(
work_dir.exists(),
".story_kit/work/ must still exist in the main checkout"
".storkit/work/ must still exist in the main checkout"
);
}
@@ -656,7 +656,7 @@ mod tests {
init_git_repo(&project_root);
let wt_path = project_root
.join(".story_kit")
.join(".storkit")
.join("worktrees")
.join("test_rm");
create_worktree_sync(&project_root, &wt_path, "feature/test-rm").unwrap();