438be196c9
New projects now get bot.toml.matrix.example, bot.toml.whatsapp-meta.example, bot.toml.whatsapp-twilio.example, and bot.toml.slack.example in .storkit/ during scaffolding. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2008 lines
70 KiB
Rust
2008 lines
70 KiB
Rust
use crate::state::SessionState;
|
||
use crate::store::StoreOps;
|
||
use serde::Serialize;
|
||
use serde_json::json;
|
||
use std::fs;
|
||
use std::path::{Path, PathBuf};
|
||
|
||
const KEY_LAST_PROJECT: &str = "last_project_path";
|
||
const KEY_SELECTED_MODEL: &str = "selected_model";
|
||
const KEY_KNOWN_PROJECTS: &str = "known_projects";
|
||
|
||
const STORY_KIT_README: &str = include_str!("../../../.storkit/README.md");
|
||
|
||
const BOT_TOML_MATRIX_EXAMPLE: &str = include_str!("../../../.storkit/bot.toml.matrix.example");
|
||
const BOT_TOML_WHATSAPP_META_EXAMPLE: &str =
|
||
include_str!("../../../.storkit/bot.toml.whatsapp-meta.example");
|
||
const BOT_TOML_WHATSAPP_TWILIO_EXAMPLE: &str =
|
||
include_str!("../../../.storkit/bot.toml.whatsapp-twilio.example");
|
||
const BOT_TOML_SLACK_EXAMPLE: &str = include_str!("../../../.storkit/bot.toml.slack.example");
|
||
|
||
const STORY_KIT_CONTEXT: &str = "<!-- storkit:scaffold-template -->\n\
|
||
# Project Context\n\
|
||
\n\
|
||
## High-Level Goal\n\
|
||
\n\
|
||
TODO: Describe the high-level goal of this project.\n\
|
||
\n\
|
||
## Core Features\n\
|
||
\n\
|
||
TODO: List the core features of this project.\n\
|
||
\n\
|
||
## Domain Definition\n\
|
||
\n\
|
||
TODO: Define the key domain concepts and entities.\n\
|
||
\n\
|
||
## Glossary\n\
|
||
\n\
|
||
TODO: Define abbreviations and technical terms.\n";
|
||
|
||
const STORY_KIT_STACK: &str = "<!-- storkit:scaffold-template -->\n\
|
||
# Tech Stack & Constraints\n\
|
||
\n\
|
||
## Core Stack\n\
|
||
\n\
|
||
TODO: Describe the language, frameworks, and runtimes.\n\
|
||
\n\
|
||
## Coding Standards\n\
|
||
\n\
|
||
TODO: Describe code style, linting rules, and error handling conventions.\n\
|
||
\n\
|
||
## Quality Gates\n\
|
||
\n\
|
||
TODO: List the commands that must pass before merging (e.g., cargo test, npm run build).\n\
|
||
\n\
|
||
## Libraries\n\
|
||
\n\
|
||
TODO: List approved libraries and their purpose.\n";
|
||
|
||
const STORY_KIT_SCRIPT_TEST: &str = "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's test commands here.\n# Story Kit agents invoke this script as the canonical test runner.\n# Exit 0 on success, non-zero on failure.\necho \"No tests configured\"\n";
|
||
|
||
const STORY_KIT_CLAUDE_MD: &str = "<!-- storkit:scaffold-template -->\n\
|
||
Never chain shell commands with `&&`, `||`, or `;` in a single Bash call. \
|
||
The permission system validates the entire command string, and chained commands \
|
||
won't match allow rules like `Bash(git *)`. Use separate Bash calls instead — \
|
||
parallel calls work fine.\n\
|
||
\n\
|
||
Read .storkit/README.md to see our dev process.\n";
|
||
|
||
const STORY_KIT_CLAUDE_SETTINGS: &str = r#"{
|
||
"permissions": {
|
||
"allow": [
|
||
"Bash(cargo build:*)",
|
||
"Bash(cargo check:*)",
|
||
"Bash(cargo clippy:*)",
|
||
"Bash(cargo test:*)",
|
||
"Bash(cargo run:*)",
|
||
"Bash(cargo nextest run:*)",
|
||
"Bash(git *)",
|
||
"Bash(ls *)",
|
||
"Bash(mkdir *)",
|
||
"Bash(mv *)",
|
||
"Bash(rm *)",
|
||
"Bash(touch *)",
|
||
"Bash(echo:*)",
|
||
"Bash(pwd *)",
|
||
"Bash(pnpm install:*)",
|
||
"Bash(pnpm run build:*)",
|
||
"Bash(pnpm run test:*)",
|
||
"Bash(pnpm test:*)",
|
||
"Bash(pnpm build:*)",
|
||
"Bash(npm run build:*)",
|
||
"Bash(npx tsc:*)",
|
||
"Bash(npx vitest:*)",
|
||
"Bash(npx @biomejs/biome check:*)",
|
||
"Bash(npx playwright test:*)",
|
||
"Bash(script/test:*)",
|
||
"Bash(./script/test:*)",
|
||
"Edit",
|
||
"Write",
|
||
"mcp__storkit__*"
|
||
]
|
||
},
|
||
"enabledMcpjsonServers": [
|
||
"storkit"
|
||
]
|
||
}
|
||
"#;
|
||
|
||
const DEFAULT_PROJECT_AGENTS_TOML: &str = r#"# Project-wide default QA mode: "server", "agent", or "human".
|
||
# Per-story `qa` front matter overrides this setting.
|
||
default_qa = "server"
|
||
|
||
[[agent]]
|
||
name = "coder-1"
|
||
stage = "coder"
|
||
role = "Full-stack engineer. Implements features across all components."
|
||
model = "sonnet"
|
||
max_turns = 50
|
||
max_budget_usd = 5.00
|
||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .storkit/README.md to understand the dev process. Follow the workflow through implementation and verification. The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop.\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits.\n\nIf `script/test` still contains the generic 'No tests configured' stub, update it to run the project's actual test suite before starting implementation."
|
||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Commit all your work before finishing. Do not accept stories, move them to archived, or merge to master."
|
||
|
||
[[agent]]
|
||
name = "qa"
|
||
stage = "qa"
|
||
role = "Reviews coder work: runs quality gates, generates testing plans, and reports findings."
|
||
model = "sonnet"
|
||
max_turns = 40
|
||
max_budget_usd = 4.00
|
||
prompt = "You are the QA agent for story {{story_id}}. Review the coder's work and produce a structured QA report. Run quality gates (linting, tests), attempt a build, and generate a manual testing plan. Do NOT modify any code."
|
||
system_prompt = "You are a QA agent. Your job is read-only: review code quality, run tests, and produce a structured QA report. Do not modify code."
|
||
|
||
[[agent]]
|
||
name = "mergemaster"
|
||
stage = "mergemaster"
|
||
role = "Merges completed work into master, runs quality gates, and archives stories."
|
||
model = "sonnet"
|
||
max_turns = 30
|
||
max_budget_usd = 5.00
|
||
prompt = "You are the mergemaster agent for story {{story_id}}. Call merge_agent_work(story_id='{{story_id}}') to start the merge pipeline. Then poll get_merge_status(story_id='{{story_id}}') every 15 seconds until the status is 'completed' or 'failed'. Report the final result. If the merge fails, call report_merge_failure."
|
||
system_prompt = "You are the mergemaster agent. Call merge_agent_work to start the merge, then poll get_merge_status every 15 seconds until done. Never manually move story files. Call report_merge_failure when merges fail."
|
||
"#;
|
||
|
||
/// Detect the tech stack from the project root and return TOML `[[component]]` entries.
|
||
///
|
||
/// Inspects well-known marker files at the project root to identify which
|
||
/// tech stacks are present, then emits one `[[component]]` entry per detected
|
||
/// stack with sensible default `setup` commands. If no markers are found, a
|
||
/// single fallback `app` component with an empty `setup` list is returned so
|
||
/// that the pipeline never breaks on an unknown stack.
|
||
pub fn detect_components_toml(root: &Path) -> String {
|
||
let mut sections = Vec::new();
|
||
|
||
if root.join("Cargo.toml").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"server\"\npath = \".\"\nsetup = [\"cargo check\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if root.join("package.json").exists() {
|
||
let setup_cmd = if root.join("pnpm-lock.yaml").exists() {
|
||
"pnpm install"
|
||
} else {
|
||
"npm install"
|
||
};
|
||
sections.push(format!(
|
||
"[[component]]\nname = \"frontend\"\npath = \".\"\nsetup = [\"{setup_cmd}\"]\n"
|
||
));
|
||
}
|
||
|
||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"python\"\npath = \".\"\nsetup = [\"pip install -r requirements.txt\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if root.join("go.mod").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"go\"\npath = \".\"\nsetup = [\"go build ./...\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if root.join("Gemfile").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"ruby\"\npath = \".\"\nsetup = [\"bundle install\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if sections.is_empty() {
|
||
// No tech stack markers detected — emit a single generic component
|
||
// with an empty setup list. The ONBOARDING_PROMPT instructs the chat
|
||
// agent to inspect the project and replace this with real definitions.
|
||
sections.push(
|
||
"[[component]]\nname = \"app\"\npath = \".\"\nsetup = []\n".to_string(),
|
||
);
|
||
}
|
||
|
||
sections.join("\n")
|
||
}
|
||
|
||
/// Generate `script/test` content for a new project at `root`.
|
||
///
|
||
/// Inspects well-known marker files to identify which tech stacks are present
|
||
/// and emits the appropriate test commands. Multi-stack projects get combined
|
||
/// commands run sequentially. Falls back to the generic stub when no markers
|
||
/// are found so the scaffold is always valid.
|
||
pub fn detect_script_test(root: &Path) -> String {
|
||
let mut commands: Vec<&str> = Vec::new();
|
||
|
||
if root.join("Cargo.toml").exists() {
|
||
commands.push("cargo test");
|
||
}
|
||
|
||
if root.join("package.json").exists() {
|
||
if root.join("pnpm-lock.yaml").exists() {
|
||
commands.push("pnpm test");
|
||
} else {
|
||
commands.push("npm test");
|
||
}
|
||
}
|
||
|
||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||
commands.push("pytest");
|
||
}
|
||
|
||
if root.join("go.mod").exists() {
|
||
commands.push("go test ./...");
|
||
}
|
||
|
||
if commands.is_empty() {
|
||
return STORY_KIT_SCRIPT_TEST.to_string();
|
||
}
|
||
|
||
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
|
||
for cmd in commands {
|
||
script.push_str(cmd);
|
||
script.push('\n');
|
||
}
|
||
script
|
||
}
|
||
|
||
/// Generate a complete `project.toml` for a new project at `root`.
|
||
///
|
||
/// Detects the tech stack via [`detect_components_toml`] and prepends the
|
||
/// resulting `[[component]]` entries before the default `[[agent]]` sections.
|
||
fn generate_project_toml(root: &Path) -> String {
|
||
let components = detect_components_toml(root);
|
||
format!("{components}\n{DEFAULT_PROJECT_AGENTS_TOML}")
|
||
}
|
||
|
||
/// Resolve a path argument supplied on the CLI against the given working
|
||
/// directory. Relative paths (including `.`) are joined with `cwd` and
|
||
/// then canonicalized when possible. Absolute paths are returned
|
||
/// canonicalized when possible, unchanged otherwise.
|
||
pub fn resolve_cli_path(cwd: &Path, path_arg: &str) -> PathBuf {
|
||
let p = PathBuf::from(path_arg);
|
||
let joined = if p.is_absolute() { p } else { cwd.join(p) };
|
||
// Canonicalize resolves `.`, `..` and symlinks. We fall back to the
|
||
// joined (non-canonical) path when the target does not yet exist so
|
||
// that callers can still create it later.
|
||
std::fs::canonicalize(&joined).unwrap_or(joined)
|
||
}
|
||
|
||
/// Walk from `start` up through parent directories, returning the first
|
||
/// directory that contains a `.storkit/` subdirectory, or `None`.
|
||
pub fn find_story_kit_root(start: &Path) -> Option<PathBuf> {
|
||
let mut current = start.to_path_buf();
|
||
loop {
|
||
if current.join(".storkit").is_dir() {
|
||
return Some(current);
|
||
}
|
||
if !current.pop() {
|
||
return None;
|
||
}
|
||
}
|
||
}
|
||
|
||
pub fn get_home_directory() -> Result<String, String> {
|
||
let home = homedir::my_home()
|
||
.map_err(|e| format!("Failed to resolve home directory: {e}"))?
|
||
.ok_or_else(|| "Home directory not found".to_string())?;
|
||
Ok(home.to_string_lossy().to_string())
|
||
}
|
||
|
||
/// Resolves a relative path against the active project root (pure function for testing).
|
||
/// Returns error if path attempts traversal (..).
|
||
fn resolve_path_impl(root: PathBuf, relative_path: &str) -> Result<PathBuf, String> {
|
||
if relative_path.contains("..") {
|
||
return Err("Security Violation: Directory traversal ('..') is not allowed.".to_string());
|
||
}
|
||
|
||
Ok(root.join(relative_path))
|
||
}
|
||
|
||
/// Resolves a relative path against the active project root.
|
||
/// Returns error if no project is open or if path attempts traversal (..).
|
||
fn resolve_path(state: &SessionState, relative_path: &str) -> Result<PathBuf, String> {
|
||
let root = state.get_project_root()?;
|
||
resolve_path_impl(root, relative_path)
|
||
}
|
||
|
||
/// Validate that a path exists and is a directory (pure function for testing)
|
||
async fn validate_project_path(path: PathBuf) -> Result<(), String> {
|
||
tokio::task::spawn_blocking(move || {
|
||
if !path.exists() {
|
||
return Err(format!("Path does not exist: {}", path.display()));
|
||
}
|
||
if !path.is_dir() {
|
||
return Err(format!("Path is not a directory: {}", path.display()));
|
||
}
|
||
Ok(())
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {}", e))?
|
||
}
|
||
|
||
fn write_file_if_missing(path: &Path, content: &str) -> Result<(), String> {
|
||
if path.exists() {
|
||
return Ok(());
|
||
}
|
||
fs::write(path, content).map_err(|e| format!("Failed to write file: {}", e))?;
|
||
Ok(())
|
||
}
|
||
|
||
/// Write `content` to `path` if missing, then ensure the file is executable.
|
||
fn write_script_if_missing(path: &Path, content: &str) -> Result<(), String> {
|
||
write_file_if_missing(path, content)?;
|
||
|
||
#[cfg(unix)]
|
||
{
|
||
use std::os::unix::fs::PermissionsExt;
|
||
let mut perms = fs::metadata(path)
|
||
.map_err(|e| format!("Failed to read permissions for {}: {}", path.display(), e))?
|
||
.permissions();
|
||
perms.set_mode(0o755);
|
||
fs::set_permissions(path, perms)
|
||
.map_err(|e| format!("Failed to set permissions on {}: {}", path.display(), e))?;
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Write (or idempotently update) `.storkit/.gitignore` with Story Kit–specific
|
||
/// ignore patterns for files that live inside the `.storkit/` directory.
|
||
/// Patterns are relative to `.storkit/` as git resolves `.gitignore` files
|
||
/// relative to the directory that contains them.
|
||
fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
|
||
// Entries that belong inside .storkit/.gitignore (relative to .storkit/).
|
||
let entries = [
|
||
"bot.toml",
|
||
"matrix_store/",
|
||
"matrix_device_id",
|
||
"worktrees/",
|
||
"merge_workspace/",
|
||
"coverage/",
|
||
"work/2_current/",
|
||
"work/3_qa/",
|
||
"work/4_merge/",
|
||
"logs/",
|
||
"token_usage.jsonl",
|
||
];
|
||
|
||
let gitignore_path = root.join(".storkit").join(".gitignore");
|
||
let existing = if gitignore_path.exists() {
|
||
fs::read_to_string(&gitignore_path)
|
||
.map_err(|e| format!("Failed to read .storkit/.gitignore: {}", e))?
|
||
} else {
|
||
String::new()
|
||
};
|
||
|
||
let missing: Vec<&str> = entries
|
||
.iter()
|
||
.copied()
|
||
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
|
||
.collect();
|
||
|
||
if missing.is_empty() {
|
||
return Ok(());
|
||
}
|
||
|
||
let mut new_content = existing;
|
||
if !new_content.is_empty() && !new_content.ends_with('\n') {
|
||
new_content.push('\n');
|
||
}
|
||
for entry in missing {
|
||
new_content.push_str(entry);
|
||
new_content.push('\n');
|
||
}
|
||
|
||
fs::write(&gitignore_path, new_content)
|
||
.map_err(|e| format!("Failed to write .storkit/.gitignore: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Append root-level Story Kit entries to the project `.gitignore`.
|
||
/// Only `store.json` and `.storkit_port` remain here because they live at
|
||
/// the project root and git does not support `../` patterns in `.gitignore`
|
||
/// files, so they cannot be expressed in `.storkit/.gitignore`.
|
||
fn append_root_gitignore_entries(root: &Path) -> Result<(), String> {
|
||
let entries = [".storkit_port", "store.json", ".mcp.json"];
|
||
|
||
let gitignore_path = root.join(".gitignore");
|
||
let existing = if gitignore_path.exists() {
|
||
fs::read_to_string(&gitignore_path)
|
||
.map_err(|e| format!("Failed to read .gitignore: {}", e))?
|
||
} else {
|
||
String::new()
|
||
};
|
||
|
||
let missing: Vec<&str> = entries
|
||
.iter()
|
||
.copied()
|
||
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
|
||
.collect();
|
||
|
||
if missing.is_empty() {
|
||
return Ok(());
|
||
}
|
||
|
||
let mut new_content = existing;
|
||
if !new_content.is_empty() && !new_content.ends_with('\n') {
|
||
new_content.push('\n');
|
||
}
|
||
for entry in missing {
|
||
new_content.push_str(entry);
|
||
new_content.push('\n');
|
||
}
|
||
|
||
fs::write(&gitignore_path, new_content)
|
||
.map_err(|e| format!("Failed to write .gitignore: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
fn scaffold_story_kit(root: &Path, port: u16) -> Result<(), String> {
|
||
let story_kit_root = root.join(".storkit");
|
||
let specs_root = story_kit_root.join("specs");
|
||
let tech_root = specs_root.join("tech");
|
||
let functional_root = specs_root.join("functional");
|
||
let script_root = root.join("script");
|
||
|
||
// Create the work/ pipeline directories, each with a .gitkeep so empty dirs survive git clone
|
||
let work_stages = [
|
||
"1_backlog",
|
||
"2_current",
|
||
"3_qa",
|
||
"4_merge",
|
||
"5_done",
|
||
"6_archived",
|
||
];
|
||
for stage in &work_stages {
|
||
let dir = story_kit_root.join("work").join(stage);
|
||
fs::create_dir_all(&dir).map_err(|e| format!("Failed to create work/{}: {}", stage, e))?;
|
||
write_file_if_missing(&dir.join(".gitkeep"), "")?;
|
||
}
|
||
|
||
fs::create_dir_all(&tech_root).map_err(|e| format!("Failed to create specs/tech: {}", e))?;
|
||
fs::create_dir_all(&functional_root)
|
||
.map_err(|e| format!("Failed to create specs/functional: {}", e))?;
|
||
fs::create_dir_all(&script_root)
|
||
.map_err(|e| format!("Failed to create script/ directory: {}", e))?;
|
||
|
||
write_file_if_missing(&story_kit_root.join("README.md"), STORY_KIT_README)?;
|
||
let project_toml_content = generate_project_toml(root);
|
||
write_file_if_missing(&story_kit_root.join("project.toml"), &project_toml_content)?;
|
||
write_file_if_missing(&specs_root.join("00_CONTEXT.md"), STORY_KIT_CONTEXT)?;
|
||
write_file_if_missing(&tech_root.join("STACK.md"), STORY_KIT_STACK)?;
|
||
let script_test_content = detect_script_test(root);
|
||
write_script_if_missing(&script_root.join("test"), &script_test_content)?;
|
||
write_file_if_missing(&root.join("CLAUDE.md"), STORY_KIT_CLAUDE_MD)?;
|
||
|
||
// Write per-transport bot.toml example files so users can see all options.
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.matrix.example"),
|
||
BOT_TOML_MATRIX_EXAMPLE,
|
||
)?;
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.whatsapp-meta.example"),
|
||
BOT_TOML_WHATSAPP_META_EXAMPLE,
|
||
)?;
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.whatsapp-twilio.example"),
|
||
BOT_TOML_WHATSAPP_TWILIO_EXAMPLE,
|
||
)?;
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.slack.example"),
|
||
BOT_TOML_SLACK_EXAMPLE,
|
||
)?;
|
||
|
||
// Write .mcp.json at the project root so agents can find the MCP server.
|
||
// Only written when missing — never overwrites an existing file, because
|
||
// the port is environment-specific and must not clobber a running instance.
|
||
let mcp_content = format!(
|
||
"{{\n \"mcpServers\": {{\n \"storkit\": {{\n \"type\": \"http\",\n \"url\": \"http://localhost:{port}/mcp\"\n }}\n }}\n}}\n"
|
||
);
|
||
write_file_if_missing(&root.join(".mcp.json"), &mcp_content)?;
|
||
|
||
// Create .claude/settings.json with sensible permission defaults so that
|
||
// Claude Code (both agents and web UI chat) can operate without constant
|
||
// permission prompts.
|
||
let claude_dir = root.join(".claude");
|
||
fs::create_dir_all(&claude_dir)
|
||
.map_err(|e| format!("Failed to create .claude/ directory: {}", e))?;
|
||
write_file_if_missing(&claude_dir.join("settings.json"), STORY_KIT_CLAUDE_SETTINGS)?;
|
||
|
||
write_story_kit_gitignore(root)?;
|
||
append_root_gitignore_entries(root)?;
|
||
|
||
// Run `git init` if the directory is not already a git repo, then make an initial commit
|
||
if !root.join(".git").exists() {
|
||
let init_status = std::process::Command::new("git")
|
||
.args(["init"])
|
||
.current_dir(root)
|
||
.status()
|
||
.map_err(|e| format!("Failed to run git init: {}", e))?;
|
||
if !init_status.success() {
|
||
return Err("git init failed".to_string());
|
||
}
|
||
|
||
let add_output = std::process::Command::new("git")
|
||
.args([
|
||
"add",
|
||
".storkit",
|
||
"script",
|
||
".gitignore",
|
||
"CLAUDE.md",
|
||
".claude",
|
||
])
|
||
.current_dir(root)
|
||
.output()
|
||
.map_err(|e| format!("Failed to run git add: {}", e))?;
|
||
if !add_output.status.success() {
|
||
return Err(format!(
|
||
"git add failed: {}",
|
||
String::from_utf8_lossy(&add_output.stderr)
|
||
));
|
||
}
|
||
|
||
let commit_output = std::process::Command::new("git")
|
||
.args([
|
||
"-c",
|
||
"user.email=storkit@localhost",
|
||
"-c",
|
||
"user.name=Story Kit",
|
||
"commit",
|
||
"-m",
|
||
"Initial Story Kit scaffold",
|
||
])
|
||
.current_dir(root)
|
||
.output()
|
||
.map_err(|e| format!("Failed to run git commit: {}", e))?;
|
||
if !commit_output.status.success() {
|
||
return Err(format!(
|
||
"git commit failed: {}",
|
||
String::from_utf8_lossy(&commit_output.stderr)
|
||
));
|
||
}
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
async fn ensure_project_root_with_story_kit(path: PathBuf, port: u16) -> Result<(), String> {
|
||
tokio::task::spawn_blocking(move || {
|
||
if !path.exists() {
|
||
fs::create_dir_all(&path)
|
||
.map_err(|e| format!("Failed to create project directory: {}", e))?;
|
||
}
|
||
if !path.join(".storkit").is_dir() {
|
||
scaffold_story_kit(&path, port)?;
|
||
}
|
||
Ok(())
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {}", e))?
|
||
}
|
||
|
||
pub async fn open_project(
|
||
path: String,
|
||
state: &SessionState,
|
||
store: &dyn StoreOps,
|
||
port: u16,
|
||
) -> Result<String, String> {
|
||
let p = PathBuf::from(&path);
|
||
|
||
ensure_project_root_with_story_kit(p.clone(), port).await?;
|
||
validate_project_path(p.clone()).await?;
|
||
|
||
{
|
||
// TRACE:MERGE-DEBUG — remove once root cause is found
|
||
crate::slog!(
|
||
"[MERGE-DEBUG] open_project: setting project_root to {:?}",
|
||
p
|
||
);
|
||
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||
*root = Some(p);
|
||
}
|
||
|
||
store.set(KEY_LAST_PROJECT, json!(path));
|
||
|
||
let mut known_projects = get_known_projects(store)?;
|
||
|
||
known_projects.retain(|p| p != &path);
|
||
known_projects.insert(0, path.clone());
|
||
store.set(KEY_KNOWN_PROJECTS, json!(known_projects));
|
||
|
||
store.save()?;
|
||
|
||
Ok(path)
|
||
}
|
||
|
||
pub fn close_project(state: &SessionState, store: &dyn StoreOps) -> Result<(), String> {
|
||
{
|
||
// TRACE:MERGE-DEBUG — remove once root cause is found
|
||
crate::slog!("[MERGE-DEBUG] close_project: setting project_root to None");
|
||
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||
*root = None;
|
||
}
|
||
|
||
store.delete(KEY_LAST_PROJECT);
|
||
store.save()?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
pub fn get_current_project(
|
||
state: &SessionState,
|
||
store: &dyn StoreOps,
|
||
) -> Result<Option<String>, String> {
|
||
{
|
||
let root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||
if let Some(path) = &*root {
|
||
return Ok(Some(path.to_string_lossy().to_string()));
|
||
}
|
||
}
|
||
|
||
if let Some(path_str) = store
|
||
.get(KEY_LAST_PROJECT)
|
||
.as_ref()
|
||
.and_then(|val| val.as_str())
|
||
{
|
||
let p = PathBuf::from(path_str);
|
||
if p.exists() && p.is_dir() {
|
||
// TRACE:MERGE-DEBUG — remove once root cause is found
|
||
crate::slog!(
|
||
"[MERGE-DEBUG] get_current_project: project_root was None, \
|
||
restoring from store to {:?}",
|
||
p
|
||
);
|
||
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||
*root = Some(p);
|
||
return Ok(Some(path_str.to_string()));
|
||
}
|
||
}
|
||
|
||
Ok(None)
|
||
}
|
||
|
||
pub fn get_known_projects(store: &dyn StoreOps) -> Result<Vec<String>, String> {
|
||
let projects = store
|
||
.get(KEY_KNOWN_PROJECTS)
|
||
.and_then(|val| val.as_array().cloned())
|
||
.unwrap_or_default()
|
||
.into_iter()
|
||
.filter_map(|val| val.as_str().map(|s| s.to_string()))
|
||
.collect();
|
||
|
||
Ok(projects)
|
||
}
|
||
|
||
pub fn forget_known_project(path: String, store: &dyn StoreOps) -> Result<(), String> {
|
||
let mut known_projects = get_known_projects(store)?;
|
||
let original_len = known_projects.len();
|
||
|
||
known_projects.retain(|p| p != &path);
|
||
|
||
if known_projects.len() == original_len {
|
||
return Ok(());
|
||
}
|
||
|
||
store.set(KEY_KNOWN_PROJECTS, json!(known_projects));
|
||
store.save()?;
|
||
Ok(())
|
||
}
|
||
|
||
pub fn get_model_preference(store: &dyn StoreOps) -> Result<Option<String>, String> {
|
||
if let Some(model) = store
|
||
.get(KEY_SELECTED_MODEL)
|
||
.as_ref()
|
||
.and_then(|val| val.as_str())
|
||
{
|
||
return Ok(Some(model.to_string()));
|
||
}
|
||
Ok(None)
|
||
}
|
||
|
||
pub fn set_model_preference(model: String, store: &dyn StoreOps) -> Result<(), String> {
|
||
store.set(KEY_SELECTED_MODEL, json!(model));
|
||
store.save()?;
|
||
Ok(())
|
||
}
|
||
|
||
async fn read_file_impl(full_path: PathBuf) -> Result<String, String> {
|
||
tokio::task::spawn_blocking(move || {
|
||
fs::read_to_string(&full_path).map_err(|e| format!("Failed to read file: {}", e))
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {}", e))?
|
||
}
|
||
|
||
pub async fn read_file(path: String, state: &SessionState) -> Result<String, String> {
|
||
let full_path = resolve_path(state, &path)?;
|
||
read_file_impl(full_path).await
|
||
}
|
||
|
||
async fn write_file_impl(full_path: PathBuf, content: String) -> Result<(), String> {
|
||
tokio::task::spawn_blocking(move || {
|
||
if let Some(parent) = full_path.parent() {
|
||
fs::create_dir_all(parent)
|
||
.map_err(|e| format!("Failed to create directories: {}", e))?;
|
||
}
|
||
|
||
fs::write(&full_path, content).map_err(|e| format!("Failed to write file: {}", e))
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {}", e))?
|
||
}
|
||
|
||
pub async fn write_file(path: String, content: String, state: &SessionState) -> Result<(), String> {
|
||
let root = state.get_project_root()?;
|
||
let full_path = resolve_path_impl(root, &path)?;
|
||
write_file_impl(full_path, content).await
|
||
}
|
||
|
||
#[derive(Serialize, Debug, poem_openapi::Object)]
|
||
pub struct FileEntry {
|
||
pub name: String,
|
||
pub kind: String,
|
||
}
|
||
|
||
async fn list_directory_impl(full_path: PathBuf) -> Result<Vec<FileEntry>, String> {
|
||
tokio::task::spawn_blocking(move || {
|
||
let entries = fs::read_dir(&full_path).map_err(|e| format!("Failed to read dir: {}", e))?;
|
||
|
||
let mut result = Vec::new();
|
||
for entry in entries {
|
||
let entry = entry.map_err(|e| e.to_string())?;
|
||
let ft = entry.file_type().map_err(|e| e.to_string())?;
|
||
let name = entry.file_name().to_string_lossy().to_string();
|
||
|
||
result.push(FileEntry {
|
||
name,
|
||
kind: if ft.is_dir() {
|
||
"dir".to_string()
|
||
} else {
|
||
"file".to_string()
|
||
},
|
||
});
|
||
}
|
||
|
||
result.sort_by(|a, b| match (a.kind.as_str(), b.kind.as_str()) {
|
||
("dir", "file") => std::cmp::Ordering::Less,
|
||
("file", "dir") => std::cmp::Ordering::Greater,
|
||
_ => a.name.cmp(&b.name),
|
||
});
|
||
|
||
Ok(result)
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {}", e))?
|
||
}
|
||
|
||
pub async fn list_directory(path: String, state: &SessionState) -> Result<Vec<FileEntry>, String> {
|
||
let full_path = resolve_path(state, &path)?;
|
||
list_directory_impl(full_path).await
|
||
}
|
||
|
||
pub async fn list_directory_absolute(path: String) -> Result<Vec<FileEntry>, String> {
|
||
let full_path = PathBuf::from(path);
|
||
list_directory_impl(full_path).await
|
||
}
|
||
|
||
pub async fn create_directory_absolute(path: String) -> Result<bool, String> {
|
||
let full_path = PathBuf::from(path);
|
||
tokio::task::spawn_blocking(move || {
|
||
fs::create_dir_all(&full_path).map_err(|e| format!("Failed to create directory: {}", e))?;
|
||
Ok(true)
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {}", e))?
|
||
}
|
||
|
||
/// List all files in the project recursively, respecting .gitignore.
|
||
/// Returns relative paths from the project root (files only, not directories).
|
||
pub async fn list_project_files(state: &SessionState) -> Result<Vec<String>, String> {
|
||
let root = state.get_project_root()?;
|
||
list_project_files_impl(root).await
|
||
}
|
||
|
||
pub async fn list_project_files_impl(root: PathBuf) -> Result<Vec<String>, String> {
|
||
use ignore::WalkBuilder;
|
||
|
||
let root_clone = root.clone();
|
||
let files = tokio::task::spawn_blocking(move || {
|
||
let mut result = Vec::new();
|
||
let walker = WalkBuilder::new(&root_clone).git_ignore(true).build();
|
||
|
||
for entry in walker.flatten() {
|
||
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
|
||
let relative = entry
|
||
.path()
|
||
.strip_prefix(&root_clone)
|
||
.unwrap_or(entry.path())
|
||
.to_string_lossy()
|
||
.to_string();
|
||
result.push(relative);
|
||
}
|
||
}
|
||
|
||
result.sort();
|
||
result
|
||
})
|
||
.await
|
||
.map_err(|e| format!("Task failed: {e}"))?;
|
||
|
||
Ok(files)
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod tests {
|
||
use super::*;
|
||
use crate::store::JsonFileStore;
|
||
use tempfile::tempdir;
|
||
|
||
fn make_store(dir: &tempfile::TempDir) -> JsonFileStore {
|
||
JsonFileStore::new(dir.path().join("test_store.json")).unwrap()
|
||
}
|
||
|
||
fn make_state_with_root(path: PathBuf) -> SessionState {
|
||
let state = SessionState::default();
|
||
{
|
||
let mut root = state.project_root.lock().unwrap();
|
||
*root = Some(path);
|
||
}
|
||
state
|
||
}
|
||
|
||
// --- resolve_path_impl ---
|
||
|
||
#[test]
|
||
fn resolve_path_joins_relative_to_root() {
|
||
let root = PathBuf::from("/projects/myapp");
|
||
let result = resolve_path_impl(root, "src/main.rs").unwrap();
|
||
assert_eq!(result, PathBuf::from("/projects/myapp/src/main.rs"));
|
||
}
|
||
|
||
#[test]
|
||
fn resolve_path_rejects_traversal() {
|
||
let root = PathBuf::from("/projects/myapp");
|
||
let result = resolve_path_impl(root, "../etc/passwd");
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("traversal"));
|
||
}
|
||
|
||
// --- open/close/get project ---
|
||
|
||
#[tokio::test]
|
||
async fn open_project_sets_root_and_persists() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
let result = open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001).await;
|
||
|
||
assert!(result.is_ok());
|
||
let root = state.get_project_root().unwrap();
|
||
assert_eq!(root, project_dir);
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn open_project_does_not_overwrite_existing_mcp_json() {
|
||
// scaffold must NOT overwrite .mcp.json when it already exists — QA
|
||
// test servers share the real project root, and re-writing would
|
||
// clobber the file with the wrong port.
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
// Pre-write .mcp.json with a different port to simulate an already-configured project.
|
||
let mcp_path = project_dir.join(".mcp.json");
|
||
fs::write(&mcp_path, "{\"existing\": true}").unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001)
|
||
.await
|
||
.unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(&mcp_path).unwrap(),
|
||
"{\"existing\": true}",
|
||
"open_project must not overwrite an existing .mcp.json"
|
||
);
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn open_project_writes_mcp_json_when_missing() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001)
|
||
.await
|
||
.unwrap();
|
||
|
||
let mcp_path = project_dir.join(".mcp.json");
|
||
assert!(mcp_path.exists(), "open_project should write .mcp.json for new projects");
|
||
let content = fs::read_to_string(&mcp_path).unwrap();
|
||
assert!(content.contains("3001"), "mcp.json should reference the server port");
|
||
assert!(content.contains("localhost"), "mcp.json should reference localhost");
|
||
}
|
||
|
||
/// Regression test for bug 371: no-arg `storkit` in empty directory skips scaffold.
|
||
/// `open_project` on a directory without `.storkit/` must create all required scaffold
|
||
/// files — the same files that `storkit .` produces.
|
||
#[tokio::test]
|
||
async fn open_project_on_empty_dir_creates_full_scaffold() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001)
|
||
.await
|
||
.unwrap();
|
||
|
||
assert!(
|
||
project_dir.join(".storkit/project.toml").exists(),
|
||
"open_project must create .storkit/project.toml"
|
||
);
|
||
assert!(
|
||
project_dir.join(".mcp.json").exists(),
|
||
"open_project must create .mcp.json"
|
||
);
|
||
assert!(
|
||
project_dir.join("CLAUDE.md").exists(),
|
||
"open_project must create CLAUDE.md"
|
||
);
|
||
assert!(
|
||
project_dir.join("script/test").exists(),
|
||
"open_project must create script/test"
|
||
);
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn close_project_clears_root() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
let store = make_store(&dir);
|
||
let state = make_state_with_root(project_dir);
|
||
|
||
close_project(&state, &store).unwrap();
|
||
|
||
let root = state.project_root.lock().unwrap();
|
||
assert!(root.is_none());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn get_current_project_returns_none_when_no_project() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
let result = get_current_project(&state, &store).unwrap();
|
||
assert!(result.is_none());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn get_current_project_returns_active_root() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
let state = make_state_with_root(dir.path().to_path_buf());
|
||
|
||
let result = get_current_project(&state, &store).unwrap();
|
||
assert!(result.is_some());
|
||
}
|
||
|
||
// --- known projects ---
|
||
|
||
#[test]
|
||
fn known_projects_empty_by_default() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
let projects = get_known_projects(&store).unwrap();
|
||
assert!(projects.is_empty());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn open_project_adds_to_known_projects() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("proj1");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001)
|
||
.await
|
||
.unwrap();
|
||
|
||
let projects = get_known_projects(&store).unwrap();
|
||
assert_eq!(projects.len(), 1);
|
||
}
|
||
|
||
#[test]
|
||
fn forget_known_project_removes_it() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
|
||
store.set(KEY_KNOWN_PROJECTS, json!(["/a", "/b", "/c"]));
|
||
forget_known_project("/b".to_string(), &store).unwrap();
|
||
|
||
let projects = get_known_projects(&store).unwrap();
|
||
assert_eq!(projects, vec!["/a", "/c"]);
|
||
}
|
||
|
||
#[test]
|
||
fn forget_unknown_project_is_noop() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
|
||
store.set(KEY_KNOWN_PROJECTS, json!(["/a"]));
|
||
forget_known_project("/nonexistent".to_string(), &store).unwrap();
|
||
|
||
let projects = get_known_projects(&store).unwrap();
|
||
assert_eq!(projects, vec!["/a"]);
|
||
}
|
||
|
||
// --- model preference ---
|
||
|
||
#[test]
|
||
fn model_preference_none_by_default() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
assert!(get_model_preference(&store).unwrap().is_none());
|
||
}
|
||
|
||
#[test]
|
||
fn set_and_get_model_preference() {
|
||
let dir = tempdir().unwrap();
|
||
let store = make_store(&dir);
|
||
set_model_preference("claude-3-sonnet".to_string(), &store).unwrap();
|
||
assert_eq!(
|
||
get_model_preference(&store).unwrap(),
|
||
Some("claude-3-sonnet".to_string())
|
||
);
|
||
}
|
||
|
||
// --- file operations ---
|
||
|
||
#[tokio::test]
|
||
async fn read_file_impl_reads_content() {
|
||
let dir = tempdir().unwrap();
|
||
let file = dir.path().join("test.txt");
|
||
fs::write(&file, "hello world").unwrap();
|
||
|
||
let content = read_file_impl(file).await.unwrap();
|
||
assert_eq!(content, "hello world");
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn read_file_impl_errors_on_missing() {
|
||
let dir = tempdir().unwrap();
|
||
let result = read_file_impl(dir.path().join("missing.txt")).await;
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn write_file_impl_creates_and_writes() {
|
||
let dir = tempdir().unwrap();
|
||
let file = dir.path().join("sub").join("output.txt");
|
||
|
||
write_file_impl(file.clone(), "content".to_string())
|
||
.await
|
||
.unwrap();
|
||
|
||
assert_eq!(fs::read_to_string(&file).unwrap(), "content");
|
||
}
|
||
|
||
// --- list directory ---
|
||
|
||
#[tokio::test]
|
||
async fn list_directory_impl_returns_sorted_entries() {
|
||
let dir = tempdir().unwrap();
|
||
fs::create_dir(dir.path().join("zdir")).unwrap();
|
||
fs::create_dir(dir.path().join("adir")).unwrap();
|
||
fs::write(dir.path().join("file.txt"), "").unwrap();
|
||
|
||
let entries = list_directory_impl(dir.path().to_path_buf()).await.unwrap();
|
||
|
||
assert_eq!(entries[0].name, "adir");
|
||
assert_eq!(entries[0].kind, "dir");
|
||
assert_eq!(entries[1].name, "zdir");
|
||
assert_eq!(entries[1].kind, "dir");
|
||
assert_eq!(entries[2].name, "file.txt");
|
||
assert_eq!(entries[2].kind, "file");
|
||
}
|
||
|
||
// --- validate_project_path ---
|
||
|
||
#[tokio::test]
|
||
async fn validate_project_path_rejects_missing() {
|
||
let result = validate_project_path(PathBuf::from("/nonexistent/path")).await;
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn validate_project_path_rejects_file() {
|
||
let dir = tempdir().unwrap();
|
||
let file = dir.path().join("not_a_dir.txt");
|
||
fs::write(&file, "").unwrap();
|
||
|
||
let result = validate_project_path(file).await;
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn validate_project_path_accepts_directory() {
|
||
let dir = tempdir().unwrap();
|
||
let result = validate_project_path(dir.path().to_path_buf()).await;
|
||
assert!(result.is_ok());
|
||
}
|
||
|
||
// --- find_story_kit_root ---
|
||
|
||
#[test]
|
||
fn find_story_kit_root_returns_cwd_when_story_kit_in_cwd() {
|
||
let tmp = tempfile::tempdir().unwrap();
|
||
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
|
||
|
||
let result = find_story_kit_root(tmp.path());
|
||
assert_eq!(result, Some(tmp.path().to_path_buf()));
|
||
}
|
||
|
||
#[test]
|
||
fn find_story_kit_root_returns_parent_when_story_kit_in_parent() {
|
||
let tmp = tempfile::tempdir().unwrap();
|
||
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
|
||
let child = tmp.path().join("subdir").join("nested");
|
||
std::fs::create_dir_all(&child).unwrap();
|
||
|
||
let result = find_story_kit_root(&child);
|
||
assert_eq!(result, Some(tmp.path().to_path_buf()));
|
||
}
|
||
|
||
#[test]
|
||
fn find_story_kit_root_returns_none_when_no_story_kit() {
|
||
let tmp = tempfile::tempdir().unwrap();
|
||
|
||
let result = find_story_kit_root(tmp.path());
|
||
assert_eq!(result, None);
|
||
}
|
||
|
||
#[test]
|
||
fn find_story_kit_root_prefers_nearest_ancestor() {
|
||
let tmp = tempfile::tempdir().unwrap();
|
||
std::fs::create_dir_all(tmp.path().join(".storkit")).unwrap();
|
||
let child = tmp.path().join("inner");
|
||
std::fs::create_dir_all(child.join(".storkit")).unwrap();
|
||
|
||
let result = find_story_kit_root(&child);
|
||
assert_eq!(result, Some(child));
|
||
}
|
||
|
||
// --- scaffold ---
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_creates_structure() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert!(dir.path().join(".storkit/README.md").exists());
|
||
assert!(dir.path().join(".storkit/project.toml").exists());
|
||
assert!(dir.path().join(".storkit/specs/00_CONTEXT.md").exists());
|
||
assert!(dir.path().join(".storkit/specs/tech/STACK.md").exists());
|
||
// Old stories/ dirs should NOT be created
|
||
assert!(!dir.path().join(".storkit/stories").exists());
|
||
assert!(dir.path().join("script/test").exists());
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_creates_work_pipeline_dirs() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let stages = [
|
||
"1_backlog",
|
||
"2_current",
|
||
"3_qa",
|
||
"4_merge",
|
||
"5_done",
|
||
"6_archived",
|
||
];
|
||
for stage in &stages {
|
||
let path = dir.path().join(".storkit/work").join(stage);
|
||
assert!(path.is_dir(), "work/{} should be a directory", stage);
|
||
assert!(
|
||
path.join(".gitkeep").exists(),
|
||
"work/{} should have a .gitkeep file",
|
||
stage
|
||
);
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_project_toml_has_coder_qa_mergemaster() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
assert!(content.contains("[[agent]]"));
|
||
assert!(content.contains("stage = \"coder\""));
|
||
assert!(content.contains("stage = \"qa\""));
|
||
assert!(content.contains("stage = \"mergemaster\""));
|
||
assert!(content.contains("model = \"sonnet\""));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_context_is_blank_template_not_story_kit_content() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/specs/00_CONTEXT.md")).unwrap();
|
||
assert!(content.contains("<!-- storkit:scaffold-template -->"));
|
||
assert!(content.contains("## High-Level Goal"));
|
||
assert!(content.contains("## Core Features"));
|
||
assert!(content.contains("## Domain Definition"));
|
||
assert!(content.contains("## Glossary"));
|
||
// Must NOT contain Story Kit-specific content
|
||
assert!(!content.contains("Agentic AI Code Assistant"));
|
||
assert!(!content.contains("Poem HTTP server"));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_stack_is_blank_template_not_story_kit_content() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/specs/tech/STACK.md")).unwrap();
|
||
assert!(content.contains("<!-- storkit:scaffold-template -->"));
|
||
assert!(content.contains("## Core Stack"));
|
||
assert!(content.contains("## Coding Standards"));
|
||
assert!(content.contains("## Quality Gates"));
|
||
assert!(content.contains("## Libraries"));
|
||
// Must NOT contain Story Kit-specific content
|
||
assert!(!content.contains("Poem HTTP server"));
|
||
assert!(!content.contains("TypeScript + React"));
|
||
}
|
||
|
||
#[cfg(unix)]
|
||
#[test]
|
||
fn scaffold_story_kit_creates_executable_script_test() {
|
||
use std::os::unix::fs::PermissionsExt;
|
||
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let script_test = dir.path().join("script/test");
|
||
assert!(script_test.exists(), "script/test should be created");
|
||
let perms = fs::metadata(&script_test).unwrap().permissions();
|
||
assert!(
|
||
perms.mode() & 0o111 != 0,
|
||
"script/test should be executable"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_does_not_overwrite_existing() {
|
||
let dir = tempdir().unwrap();
|
||
let readme = dir.path().join(".storkit/README.md");
|
||
fs::create_dir_all(readme.parent().unwrap()).unwrap();
|
||
fs::write(&readme, "custom content").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_is_idempotent() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let readme_content = fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap();
|
||
let toml_content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
|
||
// Run again — must not change content or add duplicate .gitignore entries
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap(),
|
||
readme_content
|
||
);
|
||
assert_eq!(
|
||
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap(),
|
||
toml_content
|
||
);
|
||
|
||
let story_kit_gitignore =
|
||
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
|
||
let count = story_kit_gitignore
|
||
.lines()
|
||
.filter(|l| l.trim() == "worktrees/")
|
||
.count();
|
||
assert_eq!(
|
||
count, 1,
|
||
".storkit/.gitignore should not have duplicate entries"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_existing_git_repo_no_commit() {
|
||
let dir = tempdir().unwrap();
|
||
|
||
// Initialize a git repo before scaffold
|
||
std::process::Command::new("git")
|
||
.args(["init"])
|
||
.current_dir(dir.path())
|
||
.status()
|
||
.unwrap();
|
||
std::process::Command::new("git")
|
||
.args([
|
||
"-c",
|
||
"user.email=test@test.com",
|
||
"-c",
|
||
"user.name=Test",
|
||
"commit",
|
||
"--allow-empty",
|
||
"-m",
|
||
"pre-scaffold",
|
||
])
|
||
.current_dir(dir.path())
|
||
.status()
|
||
.unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
// Only 1 commit should exist — scaffold must not commit into an existing repo
|
||
let log_output = std::process::Command::new("git")
|
||
.args(["log", "--oneline"])
|
||
.current_dir(dir.path())
|
||
.output()
|
||
.unwrap();
|
||
let log = String::from_utf8_lossy(&log_output.stdout);
|
||
let commit_count = log.lines().count();
|
||
assert_eq!(
|
||
commit_count, 1,
|
||
"scaffold should not create a commit in an existing git repo"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_creates_story_kit_gitignore_with_relative_entries() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
// .storkit/.gitignore must contain relative patterns for files under .storkit/
|
||
let sk_content = fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
|
||
assert!(sk_content.contains("worktrees/"));
|
||
assert!(sk_content.contains("merge_workspace/"));
|
||
assert!(sk_content.contains("coverage/"));
|
||
// Must NOT contain absolute .storkit/ prefixed paths
|
||
assert!(!sk_content.contains(".storkit/"));
|
||
|
||
// Root .gitignore must contain root-level storkit entries
|
||
let root_content = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
|
||
assert!(root_content.contains(".storkit_port"));
|
||
assert!(root_content.contains("store.json"));
|
||
// Root .gitignore must NOT contain .storkit/ sub-directory patterns
|
||
assert!(!root_content.contains(".storkit/worktrees/"));
|
||
assert!(!root_content.contains(".storkit/merge_workspace/"));
|
||
assert!(!root_content.contains(".storkit/coverage/"));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_gitignore_does_not_duplicate_existing_entries() {
|
||
let dir = tempdir().unwrap();
|
||
// Pre-create .storkit dir and .gitignore with some entries already present
|
||
fs::create_dir_all(dir.path().join(".storkit")).unwrap();
|
||
fs::write(
|
||
dir.path().join(".storkit/.gitignore"),
|
||
"worktrees/\ncoverage/\n",
|
||
)
|
||
.unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
|
||
let worktrees_count = content.lines().filter(|l| l.trim() == "worktrees/").count();
|
||
assert_eq!(worktrees_count, 1, "worktrees/ should not be duplicated");
|
||
let coverage_count = content.lines().filter(|l| l.trim() == "coverage/").count();
|
||
assert_eq!(coverage_count, 1, "coverage/ should not be duplicated");
|
||
// The missing entry must have been added
|
||
assert!(content.contains("merge_workspace/"));
|
||
}
|
||
|
||
// --- CLAUDE.md scaffold ---
|
||
|
||
#[test]
|
||
fn scaffold_creates_claude_md_at_project_root() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let claude_md = dir.path().join("CLAUDE.md");
|
||
assert!(
|
||
claude_md.exists(),
|
||
"CLAUDE.md should be created at project root"
|
||
);
|
||
|
||
let content = fs::read_to_string(&claude_md).unwrap();
|
||
assert!(
|
||
content.contains("<!-- storkit:scaffold-template -->"),
|
||
"CLAUDE.md should contain the scaffold sentinel"
|
||
);
|
||
assert!(
|
||
content.contains("Read .storkit/README.md"),
|
||
"CLAUDE.md should include directive to read .storkit/README.md"
|
||
);
|
||
assert!(
|
||
content.contains("Never chain shell commands"),
|
||
"CLAUDE.md should include command chaining rule"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_does_not_overwrite_existing_claude_md() {
|
||
let dir = tempdir().unwrap();
|
||
let claude_md = dir.path().join("CLAUDE.md");
|
||
fs::write(&claude_md, "custom CLAUDE.md content").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(&claude_md).unwrap(),
|
||
"custom CLAUDE.md content",
|
||
"scaffold should not overwrite an existing CLAUDE.md"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_writes_mcp_json_with_port() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 4242).unwrap();
|
||
|
||
let mcp_path = dir.path().join(".mcp.json");
|
||
assert!(mcp_path.exists(), ".mcp.json should be created by scaffold");
|
||
let content = fs::read_to_string(&mcp_path).unwrap();
|
||
assert!(content.contains("4242"), ".mcp.json should reference the given port");
|
||
assert!(content.contains("localhost"), ".mcp.json should reference localhost");
|
||
assert!(content.contains("storkit"), ".mcp.json should name the storkit server");
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_does_not_overwrite_existing_mcp_json() {
|
||
let dir = tempdir().unwrap();
|
||
let mcp_path = dir.path().join(".mcp.json");
|
||
fs::write(&mcp_path, "{\"custom\": true}").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(&mcp_path).unwrap(),
|
||
"{\"custom\": true}",
|
||
"scaffold should not overwrite an existing .mcp.json"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_gitignore_includes_mcp_json() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let root_gitignore = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
|
||
assert!(
|
||
root_gitignore.contains(".mcp.json"),
|
||
"root .gitignore should include .mcp.json (port is environment-specific)"
|
||
);
|
||
}
|
||
|
||
// --- open_project scaffolding ---
|
||
|
||
#[tokio::test]
|
||
async fn open_project_scaffolds_when_story_kit_missing() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
fs::create_dir_all(&project_dir).unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001)
|
||
.await
|
||
.unwrap();
|
||
|
||
// .storkit/ should have been created automatically
|
||
assert!(project_dir.join(".storkit").is_dir());
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn open_project_does_not_overwrite_existing_story_kit() {
|
||
let dir = tempdir().unwrap();
|
||
let project_dir = dir.path().join("myproject");
|
||
let sk_dir = project_dir.join(".storkit");
|
||
fs::create_dir_all(&sk_dir).unwrap();
|
||
let readme = sk_dir.join("README.md");
|
||
fs::write(&readme, "custom content").unwrap();
|
||
let store = make_store(&dir);
|
||
let state = SessionState::default();
|
||
|
||
open_project(project_dir.to_string_lossy().to_string(), &state, &store, 3001)
|
||
.await
|
||
.unwrap();
|
||
|
||
// Existing .storkit/ content should not be overwritten
|
||
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
|
||
}
|
||
|
||
// --- resolve_cli_path ---
|
||
|
||
#[test]
|
||
fn resolve_cli_path_absolute_returned_unchanged_when_nonexistent() {
|
||
let cwd = PathBuf::from("/some/cwd");
|
||
let result = resolve_cli_path(&cwd, "/nonexistent/absolute/path");
|
||
assert_eq!(result, PathBuf::from("/nonexistent/absolute/path"));
|
||
}
|
||
|
||
#[test]
|
||
fn resolve_cli_path_dot_resolves_to_cwd() {
|
||
let tmp = tempdir().unwrap();
|
||
let cwd = tmp.path().to_path_buf();
|
||
let result = resolve_cli_path(&cwd, ".");
|
||
// Canonicalize should resolve "." in an existing dir to the canonical cwd
|
||
assert_eq!(result, cwd.canonicalize().unwrap_or(cwd));
|
||
}
|
||
|
||
#[test]
|
||
fn resolve_cli_path_relative_resolves_against_cwd() {
|
||
let tmp = tempdir().unwrap();
|
||
let cwd = tmp.path().to_path_buf();
|
||
let subdir = cwd.join("sub");
|
||
fs::create_dir_all(&subdir).unwrap();
|
||
let result = resolve_cli_path(&cwd, "sub");
|
||
assert_eq!(result, subdir.canonicalize().unwrap_or(subdir));
|
||
}
|
||
|
||
#[test]
|
||
fn resolve_cli_path_nonexistent_relative_falls_back_to_joined() {
|
||
let tmp = tempdir().unwrap();
|
||
let cwd = tmp.path().to_path_buf();
|
||
let result = resolve_cli_path(&cwd, "newproject");
|
||
// Path doesn't exist yet — canonicalize fails, fallback is cwd/newproject
|
||
assert_eq!(result, cwd.join("newproject"));
|
||
}
|
||
|
||
// --- detect_components_toml ---
|
||
|
||
#[test]
|
||
fn detect_no_markers_returns_fallback_components() {
|
||
let dir = tempdir().unwrap();
|
||
let toml = detect_components_toml(dir.path());
|
||
// At least one [[component]] entry should always be present
|
||
assert!(
|
||
toml.contains("[[component]]"),
|
||
"should always emit at least one component"
|
||
);
|
||
// Fallback should use a generic app component with empty setup
|
||
assert!(
|
||
toml.contains("name = \"app\""),
|
||
"fallback should use generic 'app' component name"
|
||
);
|
||
assert!(
|
||
toml.contains("setup = []"),
|
||
"fallback should have empty setup list"
|
||
);
|
||
// Must not contain Rust-specific commands in a non-Rust project
|
||
assert!(
|
||
!toml.contains("cargo"),
|
||
"fallback must not contain Rust-specific commands"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn detect_cargo_toml_generates_rust_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("Cargo.toml"),
|
||
"[package]\nname = \"test\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"server\""));
|
||
assert!(toml.contains("setup = [\"cargo check\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_package_json_with_pnpm_lock_generates_pnpm_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"frontend\""));
|
||
assert!(toml.contains("setup = [\"pnpm install\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_package_json_without_pnpm_lock_generates_npm_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"frontend\""));
|
||
assert!(toml.contains("setup = [\"npm install\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_pyproject_toml_generates_python_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("pyproject.toml"),
|
||
"[project]\nname = \"test\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"python\""));
|
||
assert!(toml.contains("pip install"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_requirements_txt_generates_python_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"python\""));
|
||
assert!(toml.contains("pip install"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_go_mod_generates_go_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"go\""));
|
||
assert!(toml.contains("setup = [\"go build ./...\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_gemfile_generates_ruby_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("Gemfile"),
|
||
"source \"https://rubygems.org\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"ruby\""));
|
||
assert!(toml.contains("setup = [\"bundle install\"]"));
|
||
}
|
||
|
||
// --- Bug 375: no Rust-specific commands for non-Rust projects ---
|
||
|
||
#[test]
|
||
fn no_rust_commands_in_go_project() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(!toml.contains("cargo"), "go project must not contain cargo commands");
|
||
assert!(toml.contains("go build"), "go project must use Go tooling");
|
||
}
|
||
|
||
#[test]
|
||
fn no_rust_commands_in_node_project() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(!toml.contains("cargo"), "node project must not contain cargo commands");
|
||
assert!(toml.contains("npm install"), "node project must use npm tooling");
|
||
}
|
||
|
||
#[test]
|
||
fn no_rust_commands_when_no_stack_detected() {
|
||
let dir = tempdir().unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(!toml.contains("cargo"), "unknown stack must not contain cargo commands");
|
||
// setup list must be empty
|
||
assert!(toml.contains("setup = []"), "unknown stack must have empty setup list");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_multiple_markers_generates_multiple_components() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("Cargo.toml"),
|
||
"[package]\nname = \"server\"\n",
|
||
)
|
||
.unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"server\""));
|
||
assert!(toml.contains("name = \"frontend\""));
|
||
// Both component entries should be present
|
||
let component_count = toml.matches("[[component]]").count();
|
||
assert_eq!(component_count, 2);
|
||
}
|
||
|
||
#[test]
|
||
fn detect_no_fallback_when_markers_found() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
// The fallback "app" component should NOT appear when a real stack is detected
|
||
assert!(!toml.contains("name = \"app\""));
|
||
}
|
||
|
||
// --- detect_script_test ---
|
||
|
||
#[test]
|
||
fn detect_script_test_no_markers_returns_stub() {
|
||
let dir = tempdir().unwrap();
|
||
let script = detect_script_test(dir.path());
|
||
assert!(
|
||
script.contains("No tests configured"),
|
||
"fallback should contain the generic stub message"
|
||
);
|
||
assert!(script.starts_with("#!/usr/bin/env bash"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_cargo_toml_adds_cargo_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("cargo test"), "Rust project should run cargo test");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_package_json_npm_adds_npm_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("npm test"), "Node project without pnpm-lock should run npm test");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_package_json_pnpm_adds_pnpm_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("pnpm test"), "Node project with pnpm-lock should run pnpm test");
|
||
// "pnpm test" is a substring of itself; verify there's no bare "npm test" line
|
||
assert!(!script.lines().any(|l| l.trim() == "npm test"), "should not use npm when pnpm-lock.yaml is present");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_pyproject_toml_adds_pytest() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("pyproject.toml"), "[project]\nname = \"x\"\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("pytest"), "Python project should run pytest");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_requirements_txt_adds_pytest() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("pytest"), "Python project (requirements.txt) should run pytest");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_go_mod_adds_go_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("go test ./..."), "Go project should run go test ./...");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_multi_stack_combines_commands() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("go test ./..."), "multi-stack should include Go test command");
|
||
assert!(script.contains("npm test"), "multi-stack should include Node test command");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_output_starts_with_shebang() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(
|
||
script.starts_with("#!/usr/bin/env bash\nset -euo pipefail\n"),
|
||
"generated script should start with bash shebang and set -euo pipefail"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_script_test_contains_detected_commands_for_rust() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"myapp\"\n").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join("script/test")).unwrap();
|
||
assert!(content.contains("cargo test"), "Rust project scaffold should set cargo test in script/test");
|
||
assert!(!content.contains("No tests configured"), "should not use stub when stack is detected");
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_script_test_fallback_stub_when_no_stack() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join("script/test")).unwrap();
|
||
assert!(content.contains("No tests configured"), "unknown stack should use the generic stub");
|
||
}
|
||
|
||
// --- generate_project_toml ---
|
||
|
||
#[test]
|
||
fn generate_project_toml_includes_both_components_and_agents() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let toml = generate_project_toml(dir.path());
|
||
// Component section
|
||
assert!(toml.contains("[[component]]"));
|
||
assert!(toml.contains("name = \"server\""));
|
||
// Agent sections
|
||
assert!(toml.contains("[[agent]]"));
|
||
assert!(toml.contains("stage = \"coder\""));
|
||
assert!(toml.contains("stage = \"qa\""));
|
||
assert!(toml.contains("stage = \"mergemaster\""));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_project_toml_contains_detected_components() {
|
||
let dir = tempdir().unwrap();
|
||
// Place a Cargo.toml in the project root before scaffolding
|
||
fs::write(
|
||
dir.path().join("Cargo.toml"),
|
||
"[package]\nname = \"myapp\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
assert!(
|
||
content.contains("[[component]]"),
|
||
"project.toml should contain a component entry"
|
||
);
|
||
assert!(
|
||
content.contains("name = \"server\""),
|
||
"Rust project should have a 'server' component"
|
||
);
|
||
assert!(
|
||
content.contains("cargo check"),
|
||
"Rust component should have cargo check setup"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_project_toml_fallback_when_no_stack_detected() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
assert!(
|
||
content.contains("[[component]]"),
|
||
"project.toml should always have at least one component"
|
||
);
|
||
// Fallback uses generic app component with empty setup — no Rust-specific commands
|
||
assert!(
|
||
content.contains("name = \"app\""),
|
||
"fallback should use generic 'app' component name"
|
||
);
|
||
assert!(
|
||
!content.contains("cargo"),
|
||
"fallback must not contain Rust-specific commands for non-Rust projects"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_does_not_overwrite_existing_project_toml_with_components() {
|
||
let dir = tempdir().unwrap();
|
||
let sk_dir = dir.path().join(".storkit");
|
||
fs::create_dir_all(&sk_dir).unwrap();
|
||
let existing = "[[component]]\nname = \"custom\"\npath = \".\"\nsetup = [\"make build\"]\n";
|
||
fs::write(sk_dir.join("project.toml"), existing).unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(sk_dir.join("project.toml")).unwrap();
|
||
assert_eq!(
|
||
content, existing,
|
||
"scaffold should not overwrite existing project.toml"
|
||
);
|
||
}
|
||
|
||
// --- list_project_files_impl ---
|
||
|
||
#[tokio::test]
|
||
async fn list_project_files_returns_all_files() {
|
||
let dir = tempdir().unwrap();
|
||
fs::create_dir(dir.path().join("src")).unwrap();
|
||
fs::write(dir.path().join("src/main.rs"), "fn main() {}").unwrap();
|
||
fs::write(dir.path().join("README.md"), "# readme").unwrap();
|
||
|
||
let files = list_project_files_impl(dir.path().to_path_buf())
|
||
.await
|
||
.unwrap();
|
||
|
||
assert!(files.contains(&"README.md".to_string()));
|
||
assert!(files.contains(&"src/main.rs".to_string()));
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn list_project_files_excludes_dirs_from_output() {
|
||
let dir = tempdir().unwrap();
|
||
fs::create_dir(dir.path().join("subdir")).unwrap();
|
||
fs::write(dir.path().join("file.txt"), "").unwrap();
|
||
|
||
let files = list_project_files_impl(dir.path().to_path_buf())
|
||
.await
|
||
.unwrap();
|
||
|
||
assert!(files.contains(&"file.txt".to_string()));
|
||
assert!(!files.iter().any(|f| f == "subdir"));
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn list_project_files_returns_sorted() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("z.txt"), "").unwrap();
|
||
fs::write(dir.path().join("a.txt"), "").unwrap();
|
||
|
||
let files = list_project_files_impl(dir.path().to_path_buf())
|
||
.await
|
||
.unwrap();
|
||
|
||
let a_idx = files.iter().position(|f| f == "a.txt").unwrap();
|
||
let z_idx = files.iter().position(|f| f == "z.txt").unwrap();
|
||
assert!(a_idx < z_idx);
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn list_project_files_with_state() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("hello.rs"), "").unwrap();
|
||
let state = make_state_with_root(dir.path().to_path_buf());
|
||
|
||
let files = list_project_files(&state).await.unwrap();
|
||
|
||
assert!(files.contains(&"hello.rs".to_string()));
|
||
}
|
||
|
||
#[tokio::test]
|
||
async fn list_project_files_errors_without_project() {
|
||
let state = SessionState::default();
|
||
let result = list_project_files(&state).await;
|
||
assert!(result.is_err());
|
||
}
|
||
}
|