Files
storkit/server/src/io/fs.rs
Dave 9581e5d51a rename .story_kit directory to .storkit and update all references
Renames the config directory and updates 514 references across 42 Rust
source files, plus CLAUDE.md, .gitignore, Makefile, script/release,
and .mcp.json files. All 1205 tests pass.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-20 11:34:53 +00:00

1687 lines
58 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use crate::state::SessionState;
use crate::store::StoreOps;
use serde::Serialize;
use serde_json::json;
use std::fs;
use std::path::{Path, PathBuf};
const KEY_LAST_PROJECT: &str = "last_project_path";
const KEY_SELECTED_MODEL: &str = "selected_model";
const KEY_KNOWN_PROJECTS: &str = "known_projects";
const STORY_KIT_README: &str = include_str!("../../../.storkit/README.md");
const STORY_KIT_CONTEXT: &str = "<!-- story-kit:scaffold-template -->\n\
# Project Context\n\
\n\
## High-Level Goal\n\
\n\
TODO: Describe the high-level goal of this project.\n\
\n\
## Core Features\n\
\n\
TODO: List the core features of this project.\n\
\n\
## Domain Definition\n\
\n\
TODO: Define the key domain concepts and entities.\n\
\n\
## Glossary\n\
\n\
TODO: Define abbreviations and technical terms.\n";
const STORY_KIT_STACK: &str = "<!-- story-kit:scaffold-template -->\n\
# Tech Stack & Constraints\n\
\n\
## Core Stack\n\
\n\
TODO: Describe the language, frameworks, and runtimes.\n\
\n\
## Coding Standards\n\
\n\
TODO: Describe code style, linting rules, and error handling conventions.\n\
\n\
## Quality Gates\n\
\n\
TODO: List the commands that must pass before merging (e.g., cargo test, npm run build).\n\
\n\
## Libraries\n\
\n\
TODO: List approved libraries and their purpose.\n";
const STORY_KIT_SCRIPT_TEST: &str = "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's test commands here.\n# Story Kit agents invoke this script as the canonical test runner.\n# Exit 0 on success, non-zero on failure.\necho \"No tests configured\"\n";
const STORY_KIT_CLAUDE_MD: &str = "<!-- story-kit:scaffold-template -->\n\
Never chain shell commands with `&&`, `||`, or `;` in a single Bash call. \
The permission system validates the entire command string, and chained commands \
won't match allow rules like `Bash(git *)`. Use separate Bash calls instead — \
parallel calls work fine.\n\
\n\
Read .storkit/README.md to see our dev process.\n";
const STORY_KIT_CLAUDE_SETTINGS: &str = r#"{
"permissions": {
"allow": [
"Bash(cargo build:*)",
"Bash(cargo check:*)",
"Bash(cargo clippy:*)",
"Bash(cargo test:*)",
"Bash(cargo run:*)",
"Bash(cargo nextest run:*)",
"Bash(git *)",
"Bash(ls *)",
"Bash(mkdir *)",
"Bash(mv *)",
"Bash(rm *)",
"Bash(touch *)",
"Bash(echo:*)",
"Bash(pwd *)",
"Bash(pnpm install:*)",
"Bash(pnpm run build:*)",
"Bash(pnpm run test:*)",
"Bash(pnpm test:*)",
"Bash(pnpm build:*)",
"Bash(npm run build:*)",
"Bash(npx tsc:*)",
"Bash(npx vitest:*)",
"Bash(npx @biomejs/biome check:*)",
"Bash(npx playwright test:*)",
"Bash(script/test:*)",
"Bash(./script/test:*)",
"Edit",
"Write",
"mcp__story-kit__*"
]
},
"enabledMcpjsonServers": [
"storkit"
]
}
"#;
const DEFAULT_PROJECT_AGENTS_TOML: &str = r#"# Project-wide default QA mode: "server", "agent", or "human".
# Per-story `qa` front matter overrides this setting.
default_qa = "server"
[[agent]]
name = "coder-1"
stage = "coder"
role = "Full-stack engineer. Implements features across all components."
model = "sonnet"
max_turns = 50
max_budget_usd = 5.00
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .storkit/README.md to understand the dev process. Follow the workflow through implementation and verification. The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop.\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits."
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Commit all your work before finishing. Do not accept stories, move them to archived, or merge to master."
[[agent]]
name = "qa"
stage = "qa"
role = "Reviews coder work: runs quality gates, generates testing plans, and reports findings."
model = "sonnet"
max_turns = 40
max_budget_usd = 4.00
prompt = "You are the QA agent for story {{story_id}}. Review the coder's work and produce a structured QA report. Run quality gates (linting, tests), attempt a build, and generate a manual testing plan. Do NOT modify any code."
system_prompt = "You are a QA agent. Your job is read-only: review code quality, run tests, and produce a structured QA report. Do not modify code."
[[agent]]
name = "mergemaster"
stage = "mergemaster"
role = "Merges completed work into master, runs quality gates, and archives stories."
model = "sonnet"
max_turns = 30
max_budget_usd = 5.00
prompt = "You are the mergemaster agent for story {{story_id}}. Call merge_agent_work(story_id='{{story_id}}') to start the merge pipeline. Then poll get_merge_status(story_id='{{story_id}}') every 15 seconds until the status is 'completed' or 'failed'. Report the final result. If the merge fails, call report_merge_failure."
system_prompt = "You are the mergemaster agent. Call merge_agent_work to start the merge, then poll get_merge_status every 15 seconds until done. Never manually move story files. Call report_merge_failure when merges fail."
"#;
/// Detect the tech stack from the project root and return TOML `[[component]]` entries.
///
/// Inspects well-known marker files at the project root to identify which
/// tech stacks are present, then emits one `[[component]]` entry per detected
/// stack with sensible default `setup` commands. If no markers are found, a
/// single fallback `app` component with an empty `setup` list is returned so
/// that the pipeline never breaks on an unknown stack.
pub fn detect_components_toml(root: &Path) -> String {
let mut sections = Vec::new();
if root.join("Cargo.toml").exists() {
sections.push(
"[[component]]\nname = \"server\"\npath = \".\"\nsetup = [\"cargo check\"]\n"
.to_string(),
);
}
if root.join("package.json").exists() {
let setup_cmd = if root.join("pnpm-lock.yaml").exists() {
"pnpm install"
} else {
"npm install"
};
sections.push(format!(
"[[component]]\nname = \"frontend\"\npath = \".\"\nsetup = [\"{setup_cmd}\"]\n"
));
}
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
sections.push(
"[[component]]\nname = \"python\"\npath = \".\"\nsetup = [\"pip install -r requirements.txt\"]\n"
.to_string(),
);
}
if root.join("go.mod").exists() {
sections.push(
"[[component]]\nname = \"go\"\npath = \".\"\nsetup = [\"go build ./...\"]\n"
.to_string(),
);
}
if root.join("Gemfile").exists() {
sections.push(
"[[component]]\nname = \"ruby\"\npath = \".\"\nsetup = [\"bundle install\"]\n"
.to_string(),
);
}
if sections.is_empty() {
// No tech stack markers detected — emit two example components so that
// the scaffold is immediately usable and agents can see the expected
// format. The ONBOARDING_PROMPT instructs the chat agent to inspect
// the project and replace these placeholders with real definitions.
sections.push(
"# EXAMPLE: Replace with your actual backend component.\n\
# Common patterns: \"cargo check\" (Rust), \"go build ./...\" (Go),\n\
# \"python -m pytest\" (Python), \"mvn verify\" (Java)\n\
[[component]]\n\
name = \"backend\"\n\
path = \".\"\n\
setup = [\"cargo check\"]\n\
teardown = []\n"
.to_string(),
);
sections.push(
"# EXAMPLE: Replace with your actual frontend component.\n\
# Common patterns: \"pnpm install\" (pnpm), \"npm install\" (npm),\n\
# \"yarn\" (Yarn), \"bun install\" (Bun)\n\
[[component]]\n\
name = \"frontend\"\n\
path = \".\"\n\
setup = [\"pnpm install\"]\n\
teardown = []\n"
.to_string(),
);
}
sections.join("\n")
}
/// Generate a complete `project.toml` for a new project at `root`.
///
/// Detects the tech stack via [`detect_components_toml`] and prepends the
/// resulting `[[component]]` entries before the default `[[agent]]` sections.
fn generate_project_toml(root: &Path) -> String {
let components = detect_components_toml(root);
format!("{components}\n{DEFAULT_PROJECT_AGENTS_TOML}")
}
/// Resolve a path argument supplied on the CLI against the given working
/// directory. Relative paths (including `.`) are joined with `cwd` and
/// then canonicalized when possible. Absolute paths are returned
/// canonicalized when possible, unchanged otherwise.
pub fn resolve_cli_path(cwd: &Path, path_arg: &str) -> PathBuf {
let p = PathBuf::from(path_arg);
let joined = if p.is_absolute() { p } else { cwd.join(p) };
// Canonicalize resolves `.`, `..` and symlinks. We fall back to the
// joined (non-canonical) path when the target does not yet exist so
// that callers can still create it later.
std::fs::canonicalize(&joined).unwrap_or(joined)
}
/// Walk from `start` up through parent directories, returning the first
/// directory that contains a `.storkit/` subdirectory, or `None`.
pub fn find_story_kit_root(start: &Path) -> Option<PathBuf> {
let mut current = start.to_path_buf();
loop {
if current.join(".storkit").is_dir() {
return Some(current);
}
if !current.pop() {
return None;
}
}
}
pub fn get_home_directory() -> Result<String, String> {
let home = homedir::my_home()
.map_err(|e| format!("Failed to resolve home directory: {e}"))?
.ok_or_else(|| "Home directory not found".to_string())?;
Ok(home.to_string_lossy().to_string())
}
/// Resolves a relative path against the active project root (pure function for testing).
/// Returns error if path attempts traversal (..).
fn resolve_path_impl(root: PathBuf, relative_path: &str) -> Result<PathBuf, String> {
if relative_path.contains("..") {
return Err("Security Violation: Directory traversal ('..') is not allowed.".to_string());
}
Ok(root.join(relative_path))
}
/// Resolves a relative path against the active project root.
/// Returns error if no project is open or if path attempts traversal (..).
fn resolve_path(state: &SessionState, relative_path: &str) -> Result<PathBuf, String> {
let root = state.get_project_root()?;
resolve_path_impl(root, relative_path)
}
/// Validate that a path exists and is a directory (pure function for testing)
async fn validate_project_path(path: PathBuf) -> Result<(), String> {
tokio::task::spawn_blocking(move || {
if !path.exists() {
return Err(format!("Path does not exist: {}", path.display()));
}
if !path.is_dir() {
return Err(format!("Path is not a directory: {}", path.display()));
}
Ok(())
})
.await
.map_err(|e| format!("Task failed: {}", e))?
}
fn write_file_if_missing(path: &Path, content: &str) -> Result<(), String> {
if path.exists() {
return Ok(());
}
fs::write(path, content).map_err(|e| format!("Failed to write file: {}", e))?;
Ok(())
}
/// Write `content` to `path` if missing, then ensure the file is executable.
fn write_script_if_missing(path: &Path, content: &str) -> Result<(), String> {
write_file_if_missing(path, content)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(path)
.map_err(|e| format!("Failed to read permissions for {}: {}", path.display(), e))?
.permissions();
perms.set_mode(0o755);
fs::set_permissions(path, perms)
.map_err(|e| format!("Failed to set permissions on {}: {}", path.display(), e))?;
}
Ok(())
}
/// Write (or idempotently update) `.storkit/.gitignore` with Story Kitspecific
/// ignore patterns for files that live inside the `.storkit/` directory.
/// Patterns are relative to `.storkit/` as git resolves `.gitignore` files
/// relative to the directory that contains them.
fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
// Entries that belong inside .storkit/.gitignore (relative to .storkit/).
let entries = [
"bot.toml",
"matrix_store/",
"matrix_device_id",
"worktrees/",
"merge_workspace/",
"coverage/",
];
let gitignore_path = root.join(".storkit").join(".gitignore");
let existing = if gitignore_path.exists() {
fs::read_to_string(&gitignore_path)
.map_err(|e| format!("Failed to read .storkit/.gitignore: {}", e))?
} else {
String::new()
};
let missing: Vec<&str> = entries
.iter()
.copied()
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
.collect();
if missing.is_empty() {
return Ok(());
}
let mut new_content = existing;
if !new_content.is_empty() && !new_content.ends_with('\n') {
new_content.push('\n');
}
for entry in missing {
new_content.push_str(entry);
new_content.push('\n');
}
fs::write(&gitignore_path, new_content)
.map_err(|e| format!("Failed to write .storkit/.gitignore: {}", e))?;
Ok(())
}
/// Append root-level Story Kit entries to the project `.gitignore`.
/// Only `store.json` and `.storkit_port` remain here because they live at
/// the project root and git does not support `../` patterns in `.gitignore`
/// files, so they cannot be expressed in `.storkit/.gitignore`.
fn append_root_gitignore_entries(root: &Path) -> Result<(), String> {
let entries = [".storkit_port", "store.json"];
let gitignore_path = root.join(".gitignore");
let existing = if gitignore_path.exists() {
fs::read_to_string(&gitignore_path)
.map_err(|e| format!("Failed to read .gitignore: {}", e))?
} else {
String::new()
};
let missing: Vec<&str> = entries
.iter()
.copied()
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
.collect();
if missing.is_empty() {
return Ok(());
}
let mut new_content = existing;
if !new_content.is_empty() && !new_content.ends_with('\n') {
new_content.push('\n');
}
for entry in missing {
new_content.push_str(entry);
new_content.push('\n');
}
fs::write(&gitignore_path, new_content)
.map_err(|e| format!("Failed to write .gitignore: {}", e))?;
Ok(())
}
fn scaffold_story_kit(root: &Path) -> Result<(), String> {
let story_kit_root = root.join(".storkit");
let specs_root = story_kit_root.join("specs");
let tech_root = specs_root.join("tech");
let functional_root = specs_root.join("functional");
let script_root = root.join("script");
// Create the work/ pipeline directories, each with a .gitkeep so empty dirs survive git clone
let work_stages = [
"1_backlog",
"2_current",
"3_qa",
"4_merge",
"5_done",
"6_archived",
];
for stage in &work_stages {
let dir = story_kit_root.join("work").join(stage);
fs::create_dir_all(&dir)
.map_err(|e| format!("Failed to create work/{}: {}", stage, e))?;
write_file_if_missing(&dir.join(".gitkeep"), "")?;
}
fs::create_dir_all(&tech_root).map_err(|e| format!("Failed to create specs/tech: {}", e))?;
fs::create_dir_all(&functional_root)
.map_err(|e| format!("Failed to create specs/functional: {}", e))?;
fs::create_dir_all(&script_root)
.map_err(|e| format!("Failed to create script/ directory: {}", e))?;
write_file_if_missing(&story_kit_root.join("README.md"), STORY_KIT_README)?;
let project_toml_content = generate_project_toml(root);
write_file_if_missing(&story_kit_root.join("project.toml"), &project_toml_content)?;
write_file_if_missing(&specs_root.join("00_CONTEXT.md"), STORY_KIT_CONTEXT)?;
write_file_if_missing(&tech_root.join("STACK.md"), STORY_KIT_STACK)?;
write_script_if_missing(&script_root.join("test"), STORY_KIT_SCRIPT_TEST)?;
write_file_if_missing(&root.join("CLAUDE.md"), STORY_KIT_CLAUDE_MD)?;
// Create .claude/settings.json with sensible permission defaults so that
// Claude Code (both agents and web UI chat) can operate without constant
// permission prompts.
let claude_dir = root.join(".claude");
fs::create_dir_all(&claude_dir)
.map_err(|e| format!("Failed to create .claude/ directory: {}", e))?;
write_file_if_missing(&claude_dir.join("settings.json"), STORY_KIT_CLAUDE_SETTINGS)?;
write_story_kit_gitignore(root)?;
append_root_gitignore_entries(root)?;
// Run `git init` if the directory is not already a git repo, then make an initial commit
if !root.join(".git").exists() {
let init_status = std::process::Command::new("git")
.args(["init"])
.current_dir(root)
.status()
.map_err(|e| format!("Failed to run git init: {}", e))?;
if !init_status.success() {
return Err("git init failed".to_string());
}
let add_output = std::process::Command::new("git")
.args(["add", ".storkit", "script", ".gitignore", "CLAUDE.md", ".claude"])
.current_dir(root)
.output()
.map_err(|e| format!("Failed to run git add: {}", e))?;
if !add_output.status.success() {
return Err(format!(
"git add failed: {}",
String::from_utf8_lossy(&add_output.stderr)
));
}
let commit_output = std::process::Command::new("git")
.args([
"-c",
"user.email=story-kit@localhost",
"-c",
"user.name=Story Kit",
"commit",
"-m",
"Initial Story Kit scaffold",
])
.current_dir(root)
.output()
.map_err(|e| format!("Failed to run git commit: {}", e))?;
if !commit_output.status.success() {
return Err(format!(
"git commit failed: {}",
String::from_utf8_lossy(&commit_output.stderr)
));
}
}
Ok(())
}
async fn ensure_project_root_with_story_kit(path: PathBuf) -> Result<(), String> {
tokio::task::spawn_blocking(move || {
if !path.exists() {
fs::create_dir_all(&path)
.map_err(|e| format!("Failed to create project directory: {}", e))?;
}
if !path.join(".storkit").is_dir() {
scaffold_story_kit(&path)?;
}
Ok(())
})
.await
.map_err(|e| format!("Task failed: {}", e))?
}
pub async fn open_project(
path: String,
state: &SessionState,
store: &dyn StoreOps,
) -> Result<String, String> {
let p = PathBuf::from(&path);
ensure_project_root_with_story_kit(p.clone()).await?;
validate_project_path(p.clone()).await?;
{
// TRACE:MERGE-DEBUG — remove once root cause is found
crate::slog!("[MERGE-DEBUG] open_project: setting project_root to {:?}", p);
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
*root = Some(p);
}
store.set(KEY_LAST_PROJECT, json!(path));
let mut known_projects = get_known_projects(store)?;
known_projects.retain(|p| p != &path);
known_projects.insert(0, path.clone());
store.set(KEY_KNOWN_PROJECTS, json!(known_projects));
store.save()?;
Ok(path)
}
pub fn close_project(state: &SessionState, store: &dyn StoreOps) -> Result<(), String> {
{
// TRACE:MERGE-DEBUG — remove once root cause is found
crate::slog!("[MERGE-DEBUG] close_project: setting project_root to None");
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
*root = None;
}
store.delete(KEY_LAST_PROJECT);
store.save()?;
Ok(())
}
pub fn get_current_project(
state: &SessionState,
store: &dyn StoreOps,
) -> Result<Option<String>, String> {
{
let root = state.project_root.lock().map_err(|e| e.to_string())?;
if let Some(path) = &*root {
return Ok(Some(path.to_string_lossy().to_string()));
}
}
if let Some(path_str) = store
.get(KEY_LAST_PROJECT)
.as_ref()
.and_then(|val| val.as_str())
{
let p = PathBuf::from(path_str);
if p.exists() && p.is_dir() {
// TRACE:MERGE-DEBUG — remove once root cause is found
crate::slog!(
"[MERGE-DEBUG] get_current_project: project_root was None, \
restoring from store to {:?}",
p
);
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
*root = Some(p);
return Ok(Some(path_str.to_string()));
}
}
Ok(None)
}
pub fn get_known_projects(store: &dyn StoreOps) -> Result<Vec<String>, String> {
let projects = store
.get(KEY_KNOWN_PROJECTS)
.and_then(|val| val.as_array().cloned())
.unwrap_or_default()
.into_iter()
.filter_map(|val| val.as_str().map(|s| s.to_string()))
.collect();
Ok(projects)
}
pub fn forget_known_project(path: String, store: &dyn StoreOps) -> Result<(), String> {
let mut known_projects = get_known_projects(store)?;
let original_len = known_projects.len();
known_projects.retain(|p| p != &path);
if known_projects.len() == original_len {
return Ok(());
}
store.set(KEY_KNOWN_PROJECTS, json!(known_projects));
store.save()?;
Ok(())
}
pub fn get_model_preference(store: &dyn StoreOps) -> Result<Option<String>, String> {
if let Some(model) = store
.get(KEY_SELECTED_MODEL)
.as_ref()
.and_then(|val| val.as_str())
{
return Ok(Some(model.to_string()));
}
Ok(None)
}
pub fn set_model_preference(model: String, store: &dyn StoreOps) -> Result<(), String> {
store.set(KEY_SELECTED_MODEL, json!(model));
store.save()?;
Ok(())
}
async fn read_file_impl(full_path: PathBuf) -> Result<String, String> {
tokio::task::spawn_blocking(move || {
fs::read_to_string(&full_path).map_err(|e| format!("Failed to read file: {}", e))
})
.await
.map_err(|e| format!("Task failed: {}", e))?
}
pub async fn read_file(path: String, state: &SessionState) -> Result<String, String> {
let full_path = resolve_path(state, &path)?;
read_file_impl(full_path).await
}
async fn write_file_impl(full_path: PathBuf, content: String) -> Result<(), String> {
tokio::task::spawn_blocking(move || {
if let Some(parent) = full_path.parent() {
fs::create_dir_all(parent)
.map_err(|e| format!("Failed to create directories: {}", e))?;
}
fs::write(&full_path, content).map_err(|e| format!("Failed to write file: {}", e))
})
.await
.map_err(|e| format!("Task failed: {}", e))?
}
pub async fn write_file(path: String, content: String, state: &SessionState) -> Result<(), String> {
let root = state.get_project_root()?;
let full_path = resolve_path_impl(root, &path)?;
write_file_impl(full_path, content).await
}
#[derive(Serialize, Debug, poem_openapi::Object)]
pub struct FileEntry {
pub name: String,
pub kind: String,
}
async fn list_directory_impl(full_path: PathBuf) -> Result<Vec<FileEntry>, String> {
tokio::task::spawn_blocking(move || {
let entries = fs::read_dir(&full_path).map_err(|e| format!("Failed to read dir: {}", e))?;
let mut result = Vec::new();
for entry in entries {
let entry = entry.map_err(|e| e.to_string())?;
let ft = entry.file_type().map_err(|e| e.to_string())?;
let name = entry.file_name().to_string_lossy().to_string();
result.push(FileEntry {
name,
kind: if ft.is_dir() {
"dir".to_string()
} else {
"file".to_string()
},
});
}
result.sort_by(|a, b| match (a.kind.as_str(), b.kind.as_str()) {
("dir", "file") => std::cmp::Ordering::Less,
("file", "dir") => std::cmp::Ordering::Greater,
_ => a.name.cmp(&b.name),
});
Ok(result)
})
.await
.map_err(|e| format!("Task failed: {}", e))?
}
pub async fn list_directory(path: String, state: &SessionState) -> Result<Vec<FileEntry>, String> {
let full_path = resolve_path(state, &path)?;
list_directory_impl(full_path).await
}
pub async fn list_directory_absolute(path: String) -> Result<Vec<FileEntry>, String> {
let full_path = PathBuf::from(path);
list_directory_impl(full_path).await
}
pub async fn create_directory_absolute(path: String) -> Result<bool, String> {
let full_path = PathBuf::from(path);
tokio::task::spawn_blocking(move || {
fs::create_dir_all(&full_path).map_err(|e| format!("Failed to create directory: {}", e))?;
Ok(true)
})
.await
.map_err(|e| format!("Task failed: {}", e))?
}
/// List all files in the project recursively, respecting .gitignore.
/// Returns relative paths from the project root (files only, not directories).
pub async fn list_project_files(state: &SessionState) -> Result<Vec<String>, String> {
let root = state.get_project_root()?;
list_project_files_impl(root).await
}
pub async fn list_project_files_impl(root: PathBuf) -> Result<Vec<String>, String> {
use ignore::WalkBuilder;
let root_clone = root.clone();
let files = tokio::task::spawn_blocking(move || {
let mut result = Vec::new();
let walker = WalkBuilder::new(&root_clone).git_ignore(true).build();
for entry in walker.flatten() {
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
let relative = entry
.path()
.strip_prefix(&root_clone)
.unwrap_or(entry.path())
.to_string_lossy()
.to_string();
result.push(relative);
}
}
result.sort();
result
})
.await
.map_err(|e| format!("Task failed: {e}"))?;
Ok(files)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::store::JsonFileStore;
use tempfile::tempdir;
fn make_store(dir: &tempfile::TempDir) -> JsonFileStore {
JsonFileStore::new(dir.path().join("test_store.json")).unwrap()
}
fn make_state_with_root(path: PathBuf) -> SessionState {
let state = SessionState::default();
{
let mut root = state.project_root.lock().unwrap();
*root = Some(path);
}
state
}
// --- resolve_path_impl ---
#[test]
fn resolve_path_joins_relative_to_root() {
let root = PathBuf::from("/projects/myapp");
let result = resolve_path_impl(root, "src/main.rs").unwrap();
assert_eq!(result, PathBuf::from("/projects/myapp/src/main.rs"));
}
#[test]
fn resolve_path_rejects_traversal() {
let root = PathBuf::from("/projects/myapp");
let result = resolve_path_impl(root, "../etc/passwd");
assert!(result.is_err());
assert!(result.unwrap_err().contains("traversal"));
}
// --- open/close/get project ---
#[tokio::test]
async fn open_project_sets_root_and_persists() {
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
fs::create_dir_all(&project_dir).unwrap();
let store = make_store(&dir);
let state = SessionState::default();
let result = open_project(
project_dir.to_string_lossy().to_string(),
&state,
&store,
)
.await;
assert!(result.is_ok());
let root = state.get_project_root().unwrap();
assert_eq!(root, project_dir);
}
#[tokio::test]
async fn open_project_does_not_write_mcp_json() {
// open_project must NOT overwrite .mcp.json — test servers started by QA
// agents share the real project root, so writing here would clobber the
// root .mcp.json with the wrong port. .mcp.json is written once during
// worktree creation (worktree.rs) and should not be touched again.
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
fs::create_dir_all(&project_dir).unwrap();
let store = make_store(&dir);
let state = SessionState::default();
open_project(
project_dir.to_string_lossy().to_string(),
&state,
&store,
)
.await
.unwrap();
let mcp_path = project_dir.join(".mcp.json");
assert!(
!mcp_path.exists(),
"open_project must not write .mcp.json — that would overwrite the root with the wrong port"
);
}
#[tokio::test]
async fn close_project_clears_root() {
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
fs::create_dir_all(&project_dir).unwrap();
let store = make_store(&dir);
let state = make_state_with_root(project_dir);
close_project(&state, &store).unwrap();
let root = state.project_root.lock().unwrap();
assert!(root.is_none());
}
#[tokio::test]
async fn get_current_project_returns_none_when_no_project() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
let state = SessionState::default();
let result = get_current_project(&state, &store).unwrap();
assert!(result.is_none());
}
#[tokio::test]
async fn get_current_project_returns_active_root() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
let state = make_state_with_root(dir.path().to_path_buf());
let result = get_current_project(&state, &store).unwrap();
assert!(result.is_some());
}
// --- known projects ---
#[test]
fn known_projects_empty_by_default() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
let projects = get_known_projects(&store).unwrap();
assert!(projects.is_empty());
}
#[tokio::test]
async fn open_project_adds_to_known_projects() {
let dir = tempdir().unwrap();
let project_dir = dir.path().join("proj1");
fs::create_dir_all(&project_dir).unwrap();
let store = make_store(&dir);
let state = SessionState::default();
open_project(
project_dir.to_string_lossy().to_string(),
&state,
&store,
)
.await
.unwrap();
let projects = get_known_projects(&store).unwrap();
assert_eq!(projects.len(), 1);
}
#[test]
fn forget_known_project_removes_it() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
store.set(KEY_KNOWN_PROJECTS, json!(["/a", "/b", "/c"]));
forget_known_project("/b".to_string(), &store).unwrap();
let projects = get_known_projects(&store).unwrap();
assert_eq!(projects, vec!["/a", "/c"]);
}
#[test]
fn forget_unknown_project_is_noop() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
store.set(KEY_KNOWN_PROJECTS, json!(["/a"]));
forget_known_project("/nonexistent".to_string(), &store).unwrap();
let projects = get_known_projects(&store).unwrap();
assert_eq!(projects, vec!["/a"]);
}
// --- model preference ---
#[test]
fn model_preference_none_by_default() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
assert!(get_model_preference(&store).unwrap().is_none());
}
#[test]
fn set_and_get_model_preference() {
let dir = tempdir().unwrap();
let store = make_store(&dir);
set_model_preference("claude-3-sonnet".to_string(), &store).unwrap();
assert_eq!(
get_model_preference(&store).unwrap(),
Some("claude-3-sonnet".to_string())
);
}
// --- file operations ---
#[tokio::test]
async fn read_file_impl_reads_content() {
let dir = tempdir().unwrap();
let file = dir.path().join("test.txt");
fs::write(&file, "hello world").unwrap();
let content = read_file_impl(file).await.unwrap();
assert_eq!(content, "hello world");
}
#[tokio::test]
async fn read_file_impl_errors_on_missing() {
let dir = tempdir().unwrap();
let result = read_file_impl(dir.path().join("missing.txt")).await;
assert!(result.is_err());
}
#[tokio::test]
async fn write_file_impl_creates_and_writes() {
let dir = tempdir().unwrap();
let file = dir.path().join("sub").join("output.txt");
write_file_impl(file.clone(), "content".to_string()).await.unwrap();
assert_eq!(fs::read_to_string(&file).unwrap(), "content");
}
// --- list directory ---
#[tokio::test]
async fn list_directory_impl_returns_sorted_entries() {
let dir = tempdir().unwrap();
fs::create_dir(dir.path().join("zdir")).unwrap();
fs::create_dir(dir.path().join("adir")).unwrap();
fs::write(dir.path().join("file.txt"), "").unwrap();
let entries = list_directory_impl(dir.path().to_path_buf()).await.unwrap();
assert_eq!(entries[0].name, "adir");
assert_eq!(entries[0].kind, "dir");
assert_eq!(entries[1].name, "zdir");
assert_eq!(entries[1].kind, "dir");
assert_eq!(entries[2].name, "file.txt");
assert_eq!(entries[2].kind, "file");
}
// --- validate_project_path ---
#[tokio::test]
async fn validate_project_path_rejects_missing() {
let result = validate_project_path(PathBuf::from("/nonexistent/path")).await;
assert!(result.is_err());
}
#[tokio::test]
async fn validate_project_path_rejects_file() {
let dir = tempdir().unwrap();
let file = dir.path().join("not_a_dir.txt");
fs::write(&file, "").unwrap();
let result = validate_project_path(file).await;
assert!(result.is_err());
}
#[tokio::test]
async fn validate_project_path_accepts_directory() {
let dir = tempdir().unwrap();
let result = validate_project_path(dir.path().to_path_buf()).await;
assert!(result.is_ok());
}
// --- find_story_kit_root ---
#[test]
fn find_story_kit_root_returns_cwd_when_story_kit_in_cwd() {
let tmp = tempfile::tempdir().unwrap();
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
let result = find_story_kit_root(tmp.path());
assert_eq!(result, Some(tmp.path().to_path_buf()));
}
#[test]
fn find_story_kit_root_returns_parent_when_story_kit_in_parent() {
let tmp = tempfile::tempdir().unwrap();
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
let child = tmp.path().join("subdir").join("nested");
std::fs::create_dir_all(&child).unwrap();
let result = find_story_kit_root(&child);
assert_eq!(result, Some(tmp.path().to_path_buf()));
}
#[test]
fn find_story_kit_root_returns_none_when_no_story_kit() {
let tmp = tempfile::tempdir().unwrap();
let result = find_story_kit_root(tmp.path());
assert_eq!(result, None);
}
#[test]
fn find_story_kit_root_prefers_nearest_ancestor() {
let tmp = tempfile::tempdir().unwrap();
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
let child = tmp.path().join("inner");
std::fs::create_dir_all(child.join(".storkit")).unwrap();
let result = find_story_kit_root(&child);
assert_eq!(result, Some(child));
}
// --- scaffold ---
#[test]
fn scaffold_story_kit_creates_structure() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
assert!(dir.path().join(".storkit/README.md").exists());
assert!(dir.path().join(".storkit/project.toml").exists());
assert!(dir.path().join(".storkit/specs/00_CONTEXT.md").exists());
assert!(dir.path().join(".storkit/specs/tech/STACK.md").exists());
// Old stories/ dirs should NOT be created
assert!(!dir.path().join(".storkit/stories").exists());
assert!(dir.path().join("script/test").exists());
}
#[test]
fn scaffold_story_kit_creates_work_pipeline_dirs() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let stages = ["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"];
for stage in &stages {
let path = dir.path().join(".storkit/work").join(stage);
assert!(path.is_dir(), "work/{} should be a directory", stage);
assert!(
path.join(".gitkeep").exists(),
"work/{} should have a .gitkeep file",
stage
);
}
}
#[test]
fn scaffold_story_kit_project_toml_has_coder_qa_mergemaster() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
assert!(content.contains("[[agent]]"));
assert!(content.contains("stage = \"coder\""));
assert!(content.contains("stage = \"qa\""));
assert!(content.contains("stage = \"mergemaster\""));
assert!(content.contains("model = \"sonnet\""));
}
#[test]
fn scaffold_context_is_blank_template_not_story_kit_content() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".storkit/specs/00_CONTEXT.md")).unwrap();
assert!(content.contains("<!-- story-kit:scaffold-template -->"));
assert!(content.contains("## High-Level Goal"));
assert!(content.contains("## Core Features"));
assert!(content.contains("## Domain Definition"));
assert!(content.contains("## Glossary"));
// Must NOT contain Story Kit-specific content
assert!(!content.contains("Agentic AI Code Assistant"));
assert!(!content.contains("Poem HTTP server"));
}
#[test]
fn scaffold_stack_is_blank_template_not_story_kit_content() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".storkit/specs/tech/STACK.md")).unwrap();
assert!(content.contains("<!-- story-kit:scaffold-template -->"));
assert!(content.contains("## Core Stack"));
assert!(content.contains("## Coding Standards"));
assert!(content.contains("## Quality Gates"));
assert!(content.contains("## Libraries"));
// Must NOT contain Story Kit-specific content
assert!(!content.contains("Poem HTTP server"));
assert!(!content.contains("TypeScript + React"));
}
#[cfg(unix)]
#[test]
fn scaffold_story_kit_creates_executable_script_test() {
use std::os::unix::fs::PermissionsExt;
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let script_test = dir.path().join("script/test");
assert!(script_test.exists(), "script/test should be created");
let perms = fs::metadata(&script_test).unwrap().permissions();
assert!(
perms.mode() & 0o111 != 0,
"script/test should be executable"
);
}
#[test]
fn scaffold_story_kit_does_not_overwrite_existing() {
let dir = tempdir().unwrap();
let readme = dir.path().join(".storkit/README.md");
fs::create_dir_all(readme.parent().unwrap()).unwrap();
fs::write(&readme, "custom content").unwrap();
scaffold_story_kit(dir.path()).unwrap();
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
}
#[test]
fn scaffold_story_kit_is_idempotent() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let readme_content =
fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap();
let toml_content =
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
// Run again — must not change content or add duplicate .gitignore entries
scaffold_story_kit(dir.path()).unwrap();
assert_eq!(
fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap(),
readme_content
);
assert_eq!(
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap(),
toml_content
);
let story_kit_gitignore =
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
let count = story_kit_gitignore
.lines()
.filter(|l| l.trim() == "worktrees/")
.count();
assert_eq!(
count,
1,
".storkit/.gitignore should not have duplicate entries"
);
}
#[test]
fn scaffold_story_kit_existing_git_repo_no_commit() {
let dir = tempdir().unwrap();
// Initialize a git repo before scaffold
std::process::Command::new("git")
.args(["init"])
.current_dir(dir.path())
.status()
.unwrap();
std::process::Command::new("git")
.args([
"-c",
"user.email=test@test.com",
"-c",
"user.name=Test",
"commit",
"--allow-empty",
"-m",
"pre-scaffold",
])
.current_dir(dir.path())
.status()
.unwrap();
scaffold_story_kit(dir.path()).unwrap();
// Only 1 commit should exist — scaffold must not commit into an existing repo
let log_output = std::process::Command::new("git")
.args(["log", "--oneline"])
.current_dir(dir.path())
.output()
.unwrap();
let log = String::from_utf8_lossy(&log_output.stdout);
let commit_count = log.lines().count();
assert_eq!(
commit_count,
1,
"scaffold should not create a commit in an existing git repo"
);
}
#[test]
fn scaffold_creates_story_kit_gitignore_with_relative_entries() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
// .storkit/.gitignore must contain relative patterns for files under .storkit/
let sk_content =
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
assert!(sk_content.contains("worktrees/"));
assert!(sk_content.contains("merge_workspace/"));
assert!(sk_content.contains("coverage/"));
// Must NOT contain absolute .storkit/ prefixed paths
assert!(!sk_content.contains(".storkit/"));
// Root .gitignore must contain root-level story-kit entries
let root_content = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
assert!(root_content.contains(".storkit_port"));
assert!(root_content.contains("store.json"));
// Root .gitignore must NOT contain .storkit/ sub-directory patterns
assert!(!root_content.contains(".storkit/worktrees/"));
assert!(!root_content.contains(".storkit/merge_workspace/"));
assert!(!root_content.contains(".storkit/coverage/"));
}
#[test]
fn scaffold_story_kit_gitignore_does_not_duplicate_existing_entries() {
let dir = tempdir().unwrap();
// Pre-create .storkit dir and .gitignore with some entries already present
fs::create_dir_all(dir.path().join(".storkit")).unwrap();
fs::write(
dir.path().join(".storkit/.gitignore"),
"worktrees/\ncoverage/\n",
)
.unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
let worktrees_count = content
.lines()
.filter(|l| l.trim() == "worktrees/")
.count();
assert_eq!(worktrees_count, 1, "worktrees/ should not be duplicated");
let coverage_count = content
.lines()
.filter(|l| l.trim() == "coverage/")
.count();
assert_eq!(coverage_count, 1, "coverage/ should not be duplicated");
// The missing entry must have been added
assert!(content.contains("merge_workspace/"));
}
// --- CLAUDE.md scaffold ---
#[test]
fn scaffold_creates_claude_md_at_project_root() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let claude_md = dir.path().join("CLAUDE.md");
assert!(claude_md.exists(), "CLAUDE.md should be created at project root");
let content = fs::read_to_string(&claude_md).unwrap();
assert!(
content.contains("<!-- story-kit:scaffold-template -->"),
"CLAUDE.md should contain the scaffold sentinel"
);
assert!(
content.contains("Read .storkit/README.md"),
"CLAUDE.md should include directive to read .storkit/README.md"
);
assert!(
content.contains("Never chain shell commands"),
"CLAUDE.md should include command chaining rule"
);
}
#[test]
fn scaffold_does_not_overwrite_existing_claude_md() {
let dir = tempdir().unwrap();
let claude_md = dir.path().join("CLAUDE.md");
fs::write(&claude_md, "custom CLAUDE.md content").unwrap();
scaffold_story_kit(dir.path()).unwrap();
assert_eq!(
fs::read_to_string(&claude_md).unwrap(),
"custom CLAUDE.md content",
"scaffold should not overwrite an existing CLAUDE.md"
);
}
// --- open_project scaffolding ---
#[tokio::test]
async fn open_project_scaffolds_when_story_kit_missing() {
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
fs::create_dir_all(&project_dir).unwrap();
let store = make_store(&dir);
let state = SessionState::default();
open_project(
project_dir.to_string_lossy().to_string(),
&state,
&store,
)
.await
.unwrap();
// .storkit/ should have been created automatically
assert!(project_dir.join(".storkit").is_dir());
}
#[tokio::test]
async fn open_project_does_not_overwrite_existing_story_kit() {
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
let sk_dir = project_dir.join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
let readme = sk_dir.join("README.md");
fs::write(&readme, "custom content").unwrap();
let store = make_store(&dir);
let state = SessionState::default();
open_project(
project_dir.to_string_lossy().to_string(),
&state,
&store,
)
.await
.unwrap();
// Existing .storkit/ content should not be overwritten
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
}
// --- resolve_cli_path ---
#[test]
fn resolve_cli_path_absolute_returned_unchanged_when_nonexistent() {
let cwd = PathBuf::from("/some/cwd");
let result = resolve_cli_path(&cwd, "/nonexistent/absolute/path");
assert_eq!(result, PathBuf::from("/nonexistent/absolute/path"));
}
#[test]
fn resolve_cli_path_dot_resolves_to_cwd() {
let tmp = tempdir().unwrap();
let cwd = tmp.path().to_path_buf();
let result = resolve_cli_path(&cwd, ".");
// Canonicalize should resolve "." in an existing dir to the canonical cwd
assert_eq!(result, cwd.canonicalize().unwrap_or(cwd));
}
#[test]
fn resolve_cli_path_relative_resolves_against_cwd() {
let tmp = tempdir().unwrap();
let cwd = tmp.path().to_path_buf();
let subdir = cwd.join("sub");
fs::create_dir_all(&subdir).unwrap();
let result = resolve_cli_path(&cwd, "sub");
assert_eq!(result, subdir.canonicalize().unwrap_or(subdir));
}
#[test]
fn resolve_cli_path_nonexistent_relative_falls_back_to_joined() {
let tmp = tempdir().unwrap();
let cwd = tmp.path().to_path_buf();
let result = resolve_cli_path(&cwd, "newproject");
// Path doesn't exist yet — canonicalize fails, fallback is cwd/newproject
assert_eq!(result, cwd.join("newproject"));
}
// --- detect_components_toml ---
#[test]
fn detect_no_markers_returns_fallback_components() {
let dir = tempdir().unwrap();
let toml = detect_components_toml(dir.path());
// At least one [[component]] entry should always be present
assert!(
toml.contains("[[component]]"),
"should always emit at least one component"
);
// The fallback should include example backend and frontend entries
assert!(
toml.contains("name = \"backend\"") || toml.contains("name = \"frontend\""),
"fallback should include example component entries"
);
}
#[test]
fn detect_cargo_toml_generates_rust_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"test\"\n").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"server\""));
assert!(toml.contains("setup = [\"cargo check\"]"));
}
#[test]
fn detect_package_json_with_pnpm_lock_generates_pnpm_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("package.json"), "{}").unwrap();
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"frontend\""));
assert!(toml.contains("setup = [\"pnpm install\"]"));
}
#[test]
fn detect_package_json_without_pnpm_lock_generates_npm_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("package.json"), "{}").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"frontend\""));
assert!(toml.contains("setup = [\"npm install\"]"));
}
#[test]
fn detect_pyproject_toml_generates_python_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("pyproject.toml"), "[project]\nname = \"test\"\n").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"python\""));
assert!(toml.contains("pip install"));
}
#[test]
fn detect_requirements_txt_generates_python_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"python\""));
assert!(toml.contains("pip install"));
}
#[test]
fn detect_go_mod_generates_go_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"go\""));
assert!(toml.contains("setup = [\"go build ./...\"]"));
}
#[test]
fn detect_gemfile_generates_ruby_component() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Gemfile"), "source \"https://rubygems.org\"\n").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"ruby\""));
assert!(toml.contains("setup = [\"bundle install\"]"));
}
#[test]
fn detect_multiple_markers_generates_multiple_components() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"server\"\n").unwrap();
fs::write(dir.path().join("package.json"), "{}").unwrap();
let toml = detect_components_toml(dir.path());
assert!(toml.contains("name = \"server\""));
assert!(toml.contains("name = \"frontend\""));
// Both component entries should be present
let component_count = toml.matches("[[component]]").count();
assert_eq!(component_count, 2);
}
#[test]
fn detect_no_fallback_when_markers_found() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
let toml = detect_components_toml(dir.path());
// The fallback "app" component should NOT appear when a real stack is detected
assert!(!toml.contains("name = \"app\""));
}
// --- generate_project_toml ---
#[test]
fn generate_project_toml_includes_both_components_and_agents() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
let toml = generate_project_toml(dir.path());
// Component section
assert!(toml.contains("[[component]]"));
assert!(toml.contains("name = \"server\""));
// Agent sections
assert!(toml.contains("[[agent]]"));
assert!(toml.contains("stage = \"coder\""));
assert!(toml.contains("stage = \"qa\""));
assert!(toml.contains("stage = \"mergemaster\""));
}
#[test]
fn scaffold_project_toml_contains_detected_components() {
let dir = tempdir().unwrap();
// Place a Cargo.toml in the project root before scaffolding
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"myapp\"\n").unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
assert!(
content.contains("[[component]]"),
"project.toml should contain a component entry"
);
assert!(
content.contains("name = \"server\""),
"Rust project should have a 'server' component"
);
assert!(
content.contains("cargo check"),
"Rust component should have cargo check setup"
);
}
#[test]
fn scaffold_project_toml_fallback_when_no_stack_detected() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content =
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
assert!(
content.contains("[[component]]"),
"project.toml should always have at least one component"
);
// Fallback emits example components so the scaffold is immediately usable
assert!(
content.contains("name = \"backend\"") || content.contains("name = \"frontend\""),
"fallback should include example component entries"
);
}
#[test]
fn scaffold_does_not_overwrite_existing_project_toml_with_components() {
let dir = tempdir().unwrap();
let sk_dir = dir.path().join(".storkit");
fs::create_dir_all(&sk_dir).unwrap();
let existing = "[[component]]\nname = \"custom\"\npath = \".\"\nsetup = [\"make build\"]\n";
fs::write(sk_dir.join("project.toml"), existing).unwrap();
scaffold_story_kit(dir.path()).unwrap();
let content = fs::read_to_string(sk_dir.join("project.toml")).unwrap();
assert_eq!(
content, existing,
"scaffold should not overwrite existing project.toml"
);
}
// --- list_project_files_impl ---
#[tokio::test]
async fn list_project_files_returns_all_files() {
let dir = tempdir().unwrap();
fs::create_dir(dir.path().join("src")).unwrap();
fs::write(dir.path().join("src/main.rs"), "fn main() {}").unwrap();
fs::write(dir.path().join("README.md"), "# readme").unwrap();
let files = list_project_files_impl(dir.path().to_path_buf())
.await
.unwrap();
assert!(files.contains(&"README.md".to_string()));
assert!(files.contains(&"src/main.rs".to_string()));
}
#[tokio::test]
async fn list_project_files_excludes_dirs_from_output() {
let dir = tempdir().unwrap();
fs::create_dir(dir.path().join("subdir")).unwrap();
fs::write(dir.path().join("file.txt"), "").unwrap();
let files = list_project_files_impl(dir.path().to_path_buf())
.await
.unwrap();
assert!(files.contains(&"file.txt".to_string()));
assert!(!files.iter().any(|f| f == "subdir"));
}
#[tokio::test]
async fn list_project_files_returns_sorted() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("z.txt"), "").unwrap();
fs::write(dir.path().join("a.txt"), "").unwrap();
let files = list_project_files_impl(dir.path().to_path_buf())
.await
.unwrap();
let a_idx = files.iter().position(|f| f == "a.txt").unwrap();
let z_idx = files.iter().position(|f| f == "z.txt").unwrap();
assert!(a_idx < z_idx);
}
#[tokio::test]
async fn list_project_files_with_state() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("hello.rs"), "").unwrap();
let state = make_state_with_root(dir.path().to_path_buf());
let files = list_project_files(&state).await.unwrap();
assert!(files.contains(&"hello.rs".to_string()));
}
#[tokio::test]
async fn list_project_files_errors_without_project() {
let state = SessionState::default();
let result = list_project_files(&state).await;
assert!(result.is_err());
}
}