1186 lines
43 KiB
Rust
1186 lines
43 KiB
Rust
use std::fs;
|
||
use std::path::Path;
|
||
|
||
const STORY_KIT_README: &str = include_str!("../../../../.storkit/README.md");
|
||
|
||
const BOT_TOML_MATRIX_EXAMPLE: &str =
|
||
include_str!("../../../../.storkit/bot.toml.matrix.example");
|
||
const BOT_TOML_WHATSAPP_META_EXAMPLE: &str =
|
||
include_str!("../../../../.storkit/bot.toml.whatsapp-meta.example");
|
||
const BOT_TOML_WHATSAPP_TWILIO_EXAMPLE: &str =
|
||
include_str!("../../../../.storkit/bot.toml.whatsapp-twilio.example");
|
||
const BOT_TOML_SLACK_EXAMPLE: &str = include_str!("../../../../.storkit/bot.toml.slack.example");
|
||
|
||
const STORY_KIT_CONTEXT: &str = "<!-- storkit:scaffold-template -->\n\
|
||
# Project Context\n\
|
||
\n\
|
||
## High-Level Goal\n\
|
||
\n\
|
||
TODO: Describe the high-level goal of this project.\n\
|
||
\n\
|
||
## Core Features\n\
|
||
\n\
|
||
TODO: List the core features of this project.\n\
|
||
\n\
|
||
## Domain Definition\n\
|
||
\n\
|
||
TODO: Define the key domain concepts and entities.\n\
|
||
\n\
|
||
## Glossary\n\
|
||
\n\
|
||
TODO: Define abbreviations and technical terms.\n";
|
||
|
||
const STORY_KIT_STACK: &str = "<!-- storkit:scaffold-template -->\n\
|
||
# Tech Stack & Constraints\n\
|
||
\n\
|
||
## Core Stack\n\
|
||
\n\
|
||
TODO: Describe the language, frameworks, and runtimes.\n\
|
||
\n\
|
||
## Coding Standards\n\
|
||
\n\
|
||
TODO: Describe code style, linting rules, and error handling conventions.\n\
|
||
\n\
|
||
## Quality Gates\n\
|
||
\n\
|
||
TODO: List the commands that must pass before merging (e.g., cargo test, npm run build).\n\
|
||
\n\
|
||
## Libraries\n\
|
||
\n\
|
||
TODO: List approved libraries and their purpose.\n";
|
||
|
||
const STORY_KIT_SCRIPT_TEST: &str = "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's test commands here.\n# Story Kit agents invoke this script as the canonical test runner.\n# Exit 0 on success, non-zero on failure.\necho \"No tests configured\"\n";
|
||
|
||
const STORY_KIT_CLAUDE_MD: &str = "<!-- storkit:scaffold-template -->\n\
|
||
Never chain shell commands with `&&`, `||`, or `;` in a single Bash call. \
|
||
The permission system validates the entire command string, and chained commands \
|
||
won't match allow rules like `Bash(git *)`. Use separate Bash calls instead — \
|
||
parallel calls work fine.\n\
|
||
\n\
|
||
Read .storkit/README.md to see our dev process.\n";
|
||
|
||
const STORY_KIT_CLAUDE_SETTINGS: &str = r#"{
|
||
"permissions": {
|
||
"allow": [
|
||
"Bash(cargo build:*)",
|
||
"Bash(cargo check:*)",
|
||
"Bash(cargo clippy:*)",
|
||
"Bash(cargo test:*)",
|
||
"Bash(cargo run:*)",
|
||
"Bash(cargo nextest run:*)",
|
||
"Bash(git *)",
|
||
"Bash(ls *)",
|
||
"Bash(mkdir *)",
|
||
"Bash(mv *)",
|
||
"Bash(rm *)",
|
||
"Bash(touch *)",
|
||
"Bash(echo:*)",
|
||
"Bash(pwd *)",
|
||
"Bash(pnpm install:*)",
|
||
"Bash(pnpm run build:*)",
|
||
"Bash(pnpm run test:*)",
|
||
"Bash(pnpm test:*)",
|
||
"Bash(pnpm build:*)",
|
||
"Bash(npm run build:*)",
|
||
"Bash(npx tsc:*)",
|
||
"Bash(npx vitest:*)",
|
||
"Bash(npx @biomejs/biome check:*)",
|
||
"Bash(npx playwright test:*)",
|
||
"Bash(script/test:*)",
|
||
"Bash(./script/test:*)",
|
||
"Edit",
|
||
"Write",
|
||
"mcp__storkit__*"
|
||
]
|
||
},
|
||
"enabledMcpjsonServers": [
|
||
"storkit"
|
||
]
|
||
}
|
||
"#;
|
||
|
||
const DEFAULT_PROJECT_AGENTS_TOML: &str = r#"# Project-wide default QA mode: "server", "agent", or "human".
|
||
# Per-story `qa` front matter overrides this setting.
|
||
default_qa = "server"
|
||
|
||
[[agent]]
|
||
name = "coder-1"
|
||
stage = "coder"
|
||
role = "Full-stack engineer. Implements features across all components."
|
||
model = "sonnet"
|
||
max_turns = 50
|
||
max_budget_usd = 5.00
|
||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .storkit/README.md to understand the dev process. Follow the workflow through implementation and verification. The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop.\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits.\n\nIf `script/test` still contains the generic 'No tests configured' stub, update it to run the project's actual test suite before starting implementation."
|
||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Commit all your work before finishing. Do not accept stories, move them to archived, or merge to master."
|
||
|
||
[[agent]]
|
||
name = "qa"
|
||
stage = "qa"
|
||
role = "Reviews coder work: runs quality gates, generates testing plans, and reports findings."
|
||
model = "sonnet"
|
||
max_turns = 40
|
||
max_budget_usd = 4.00
|
||
prompt = "You are the QA agent for story {{story_id}}. Review the coder's work and produce a structured QA report. Run quality gates (linting, tests), attempt a build, and generate a manual testing plan. Do NOT modify any code."
|
||
system_prompt = "You are a QA agent. Your job is read-only: review code quality, run tests, and produce a structured QA report. Do not modify code."
|
||
|
||
[[agent]]
|
||
name = "mergemaster"
|
||
stage = "mergemaster"
|
||
role = "Merges completed work into master, runs quality gates, and archives stories."
|
||
model = "sonnet"
|
||
max_turns = 30
|
||
max_budget_usd = 5.00
|
||
prompt = "You are the mergemaster agent for story {{story_id}}. Call merge_agent_work(story_id='{{story_id}}') to start the merge pipeline. Then poll get_merge_status(story_id='{{story_id}}') every 15 seconds until the status is 'completed' or 'failed'. Report the final result. If the merge fails, call report_merge_failure."
|
||
system_prompt = "You are the mergemaster agent. Call merge_agent_work to start the merge, then poll get_merge_status every 15 seconds until done. Never manually move story files. Call report_merge_failure when merges fail."
|
||
"#;
|
||
|
||
/// Detect the tech stack from the project root and return TOML `[[component]]` entries.
|
||
///
|
||
/// Inspects well-known marker files at the project root to identify which
|
||
/// tech stacks are present, then emits one `[[component]]` entry per detected
|
||
/// stack with sensible default `setup` commands. If no markers are found, a
|
||
/// single fallback `app` component with an empty `setup` list is returned so
|
||
/// that the pipeline never breaks on an unknown stack.
|
||
pub fn detect_components_toml(root: &Path) -> String {
|
||
let mut sections = Vec::new();
|
||
|
||
if root.join("Cargo.toml").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"server\"\npath = \".\"\nsetup = [\"cargo check\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if root.join("package.json").exists() {
|
||
let setup_cmd = if root.join("pnpm-lock.yaml").exists() {
|
||
"pnpm install"
|
||
} else {
|
||
"npm install"
|
||
};
|
||
sections.push(format!(
|
||
"[[component]]\nname = \"frontend\"\npath = \".\"\nsetup = [\"{setup_cmd}\"]\n"
|
||
));
|
||
}
|
||
|
||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"python\"\npath = \".\"\nsetup = [\"pip install -r requirements.txt\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if root.join("go.mod").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"go\"\npath = \".\"\nsetup = [\"go build ./...\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if root.join("Gemfile").exists() {
|
||
sections.push(
|
||
"[[component]]\nname = \"ruby\"\npath = \".\"\nsetup = [\"bundle install\"]\n"
|
||
.to_string(),
|
||
);
|
||
}
|
||
|
||
if sections.is_empty() {
|
||
// No tech stack markers detected — emit a single generic component
|
||
// with an empty setup list. The ONBOARDING_PROMPT instructs the chat
|
||
// agent to inspect the project and replace this with real definitions.
|
||
sections.push(
|
||
"[[component]]\nname = \"app\"\npath = \".\"\nsetup = []\n".to_string(),
|
||
);
|
||
}
|
||
|
||
sections.join("\n")
|
||
}
|
||
|
||
/// Generate `script/test` content for a new project at `root`.
|
||
///
|
||
/// Inspects well-known marker files to identify which tech stacks are present
|
||
/// and emits the appropriate test commands. Multi-stack projects get combined
|
||
/// commands run sequentially. Falls back to the generic stub when no markers
|
||
/// are found so the scaffold is always valid.
|
||
pub fn detect_script_test(root: &Path) -> String {
|
||
let mut commands: Vec<&str> = Vec::new();
|
||
|
||
if root.join("Cargo.toml").exists() {
|
||
commands.push("cargo test");
|
||
}
|
||
|
||
if root.join("package.json").exists() {
|
||
if root.join("pnpm-lock.yaml").exists() {
|
||
commands.push("pnpm test");
|
||
} else {
|
||
commands.push("npm test");
|
||
}
|
||
}
|
||
|
||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||
commands.push("pytest");
|
||
}
|
||
|
||
if root.join("go.mod").exists() {
|
||
commands.push("go test ./...");
|
||
}
|
||
|
||
if commands.is_empty() {
|
||
return STORY_KIT_SCRIPT_TEST.to_string();
|
||
}
|
||
|
||
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
|
||
for cmd in commands {
|
||
script.push_str(cmd);
|
||
script.push('\n');
|
||
}
|
||
script
|
||
}
|
||
|
||
/// Generate a complete `project.toml` for a new project at `root`.
|
||
///
|
||
/// Detects the tech stack via [`detect_components_toml`] and prepends the
|
||
/// resulting `[[component]]` entries before the default `[[agent]]` sections.
|
||
fn generate_project_toml(root: &Path) -> String {
|
||
let components = detect_components_toml(root);
|
||
format!("{components}\n{DEFAULT_PROJECT_AGENTS_TOML}")
|
||
}
|
||
|
||
fn write_file_if_missing(path: &Path, content: &str) -> Result<(), String> {
|
||
if path.exists() {
|
||
return Ok(());
|
||
}
|
||
fs::write(path, content).map_err(|e| format!("Failed to write file: {}", e))?;
|
||
Ok(())
|
||
}
|
||
|
||
/// Write `content` to `path` if missing, then ensure the file is executable.
|
||
fn write_script_if_missing(path: &Path, content: &str) -> Result<(), String> {
|
||
write_file_if_missing(path, content)?;
|
||
|
||
#[cfg(unix)]
|
||
{
|
||
use std::os::unix::fs::PermissionsExt;
|
||
let mut perms = fs::metadata(path)
|
||
.map_err(|e| format!("Failed to read permissions for {}: {}", path.display(), e))?
|
||
.permissions();
|
||
perms.set_mode(0o755);
|
||
fs::set_permissions(path, perms)
|
||
.map_err(|e| format!("Failed to set permissions on {}: {}", path.display(), e))?;
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Write (or idempotently update) `.storkit/.gitignore` with Story Kit–specific
|
||
/// ignore patterns for files that live inside the `.storkit/` directory.
|
||
/// Patterns are relative to `.storkit/` as git resolves `.gitignore` files
|
||
/// relative to the directory that contains them.
|
||
fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
|
||
// Entries that belong inside .storkit/.gitignore (relative to .storkit/).
|
||
let entries = [
|
||
"bot.toml",
|
||
"matrix_store/",
|
||
"matrix_device_id",
|
||
"worktrees/",
|
||
"merge_workspace/",
|
||
"coverage/",
|
||
"work/2_current/",
|
||
"work/3_qa/",
|
||
"work/4_merge/",
|
||
"logs/",
|
||
"token_usage.jsonl",
|
||
];
|
||
|
||
let gitignore_path = root.join(".storkit").join(".gitignore");
|
||
let existing = if gitignore_path.exists() {
|
||
fs::read_to_string(&gitignore_path)
|
||
.map_err(|e| format!("Failed to read .storkit/.gitignore: {}", e))?
|
||
} else {
|
||
String::new()
|
||
};
|
||
|
||
let missing: Vec<&str> = entries
|
||
.iter()
|
||
.copied()
|
||
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
|
||
.collect();
|
||
|
||
if missing.is_empty() {
|
||
return Ok(());
|
||
}
|
||
|
||
let mut new_content = existing;
|
||
if !new_content.is_empty() && !new_content.ends_with('\n') {
|
||
new_content.push('\n');
|
||
}
|
||
for entry in missing {
|
||
new_content.push_str(entry);
|
||
new_content.push('\n');
|
||
}
|
||
|
||
fs::write(&gitignore_path, new_content)
|
||
.map_err(|e| format!("Failed to write .storkit/.gitignore: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Append root-level Story Kit entries to the project `.gitignore`.
|
||
/// Only `store.json` and `.storkit_port` remain here because they live at
|
||
/// the project root and git does not support `../` patterns in `.gitignore`
|
||
/// files, so they cannot be expressed in `.storkit/.gitignore`.
|
||
fn append_root_gitignore_entries(root: &Path) -> Result<(), String> {
|
||
let entries = [".storkit_port", "store.json", ".mcp.json"];
|
||
|
||
let gitignore_path = root.join(".gitignore");
|
||
let existing = if gitignore_path.exists() {
|
||
fs::read_to_string(&gitignore_path)
|
||
.map_err(|e| format!("Failed to read .gitignore: {}", e))?
|
||
} else {
|
||
String::new()
|
||
};
|
||
|
||
let missing: Vec<&str> = entries
|
||
.iter()
|
||
.copied()
|
||
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
|
||
.collect();
|
||
|
||
if missing.is_empty() {
|
||
return Ok(());
|
||
}
|
||
|
||
let mut new_content = existing;
|
||
if !new_content.is_empty() && !new_content.ends_with('\n') {
|
||
new_content.push('\n');
|
||
}
|
||
for entry in missing {
|
||
new_content.push_str(entry);
|
||
new_content.push('\n');
|
||
}
|
||
|
||
fs::write(&gitignore_path, new_content)
|
||
.map_err(|e| format!("Failed to write .gitignore: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
pub(crate) fn scaffold_story_kit(root: &Path, port: u16) -> Result<(), String> {
|
||
let story_kit_root = root.join(".storkit");
|
||
let specs_root = story_kit_root.join("specs");
|
||
let tech_root = specs_root.join("tech");
|
||
let functional_root = specs_root.join("functional");
|
||
let script_root = root.join("script");
|
||
|
||
// Create the work/ pipeline directories, each with a .gitkeep so empty dirs survive git clone
|
||
let work_stages = [
|
||
"1_backlog",
|
||
"2_current",
|
||
"3_qa",
|
||
"4_merge",
|
||
"5_done",
|
||
"6_archived",
|
||
];
|
||
for stage in &work_stages {
|
||
let dir = story_kit_root.join("work").join(stage);
|
||
fs::create_dir_all(&dir).map_err(|e| format!("Failed to create work/{}: {}", stage, e))?;
|
||
write_file_if_missing(&dir.join(".gitkeep"), "")?;
|
||
}
|
||
|
||
fs::create_dir_all(&tech_root).map_err(|e| format!("Failed to create specs/tech: {}", e))?;
|
||
fs::create_dir_all(&functional_root)
|
||
.map_err(|e| format!("Failed to create specs/functional: {}", e))?;
|
||
fs::create_dir_all(&script_root)
|
||
.map_err(|e| format!("Failed to create script/ directory: {}", e))?;
|
||
|
||
write_file_if_missing(&story_kit_root.join("README.md"), STORY_KIT_README)?;
|
||
let project_toml_content = generate_project_toml(root);
|
||
write_file_if_missing(&story_kit_root.join("project.toml"), &project_toml_content)?;
|
||
write_file_if_missing(&specs_root.join("00_CONTEXT.md"), STORY_KIT_CONTEXT)?;
|
||
write_file_if_missing(&tech_root.join("STACK.md"), STORY_KIT_STACK)?;
|
||
let script_test_content = detect_script_test(root);
|
||
write_script_if_missing(&script_root.join("test"), &script_test_content)?;
|
||
write_file_if_missing(&root.join("CLAUDE.md"), STORY_KIT_CLAUDE_MD)?;
|
||
|
||
// Write per-transport bot.toml example files so users can see all options.
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.matrix.example"),
|
||
BOT_TOML_MATRIX_EXAMPLE,
|
||
)?;
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.whatsapp-meta.example"),
|
||
BOT_TOML_WHATSAPP_META_EXAMPLE,
|
||
)?;
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.whatsapp-twilio.example"),
|
||
BOT_TOML_WHATSAPP_TWILIO_EXAMPLE,
|
||
)?;
|
||
write_file_if_missing(
|
||
&story_kit_root.join("bot.toml.slack.example"),
|
||
BOT_TOML_SLACK_EXAMPLE,
|
||
)?;
|
||
|
||
// Write .mcp.json at the project root so agents can find the MCP server.
|
||
// Only written when missing — never overwrites an existing file, because
|
||
// the port is environment-specific and must not clobber a running instance.
|
||
let mcp_content = format!(
|
||
"{{\n \"mcpServers\": {{\n \"storkit\": {{\n \"type\": \"http\",\n \"url\": \"http://localhost:{port}/mcp\"\n }}\n }}\n}}\n"
|
||
);
|
||
write_file_if_missing(&root.join(".mcp.json"), &mcp_content)?;
|
||
|
||
// Create .claude/settings.json with sensible permission defaults so that
|
||
// Claude Code (both agents and web UI chat) can operate without constant
|
||
// permission prompts.
|
||
let claude_dir = root.join(".claude");
|
||
fs::create_dir_all(&claude_dir)
|
||
.map_err(|e| format!("Failed to create .claude/ directory: {}", e))?;
|
||
write_file_if_missing(&claude_dir.join("settings.json"), STORY_KIT_CLAUDE_SETTINGS)?;
|
||
|
||
write_story_kit_gitignore(root)?;
|
||
append_root_gitignore_entries(root)?;
|
||
|
||
// Run `git init` if the directory is not already a git repo, then make an initial commit
|
||
if !root.join(".git").exists() {
|
||
let init_status = std::process::Command::new("git")
|
||
.args(["init"])
|
||
.current_dir(root)
|
||
.status()
|
||
.map_err(|e| format!("Failed to run git init: {}", e))?;
|
||
if !init_status.success() {
|
||
return Err("git init failed".to_string());
|
||
}
|
||
|
||
let add_output = std::process::Command::new("git")
|
||
.args([
|
||
"add",
|
||
".storkit",
|
||
"script",
|
||
".gitignore",
|
||
"CLAUDE.md",
|
||
".claude",
|
||
])
|
||
.current_dir(root)
|
||
.output()
|
||
.map_err(|e| format!("Failed to run git add: {}", e))?;
|
||
if !add_output.status.success() {
|
||
return Err(format!(
|
||
"git add failed: {}",
|
||
String::from_utf8_lossy(&add_output.stderr)
|
||
));
|
||
}
|
||
|
||
let commit_output = std::process::Command::new("git")
|
||
.args([
|
||
"-c",
|
||
"user.email=storkit@localhost",
|
||
"-c",
|
||
"user.name=Story Kit",
|
||
"commit",
|
||
"-m",
|
||
"Initial Story Kit scaffold",
|
||
])
|
||
.current_dir(root)
|
||
.output()
|
||
.map_err(|e| format!("Failed to run git commit: {}", e))?;
|
||
if !commit_output.status.success() {
|
||
return Err(format!(
|
||
"git commit failed: {}",
|
||
String::from_utf8_lossy(&commit_output.stderr)
|
||
));
|
||
}
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod tests {
|
||
use super::*;
|
||
use tempfile::tempdir;
|
||
|
||
// --- scaffold ---
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_creates_structure() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert!(dir.path().join(".storkit/README.md").exists());
|
||
assert!(dir.path().join(".storkit/project.toml").exists());
|
||
assert!(dir.path().join(".storkit/specs/00_CONTEXT.md").exists());
|
||
assert!(dir.path().join(".storkit/specs/tech/STACK.md").exists());
|
||
// Old stories/ dirs should NOT be created
|
||
assert!(!dir.path().join(".storkit/stories").exists());
|
||
assert!(dir.path().join("script/test").exists());
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_creates_work_pipeline_dirs() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let stages = [
|
||
"1_backlog",
|
||
"2_current",
|
||
"3_qa",
|
||
"4_merge",
|
||
"5_done",
|
||
"6_archived",
|
||
];
|
||
for stage in &stages {
|
||
let path = dir.path().join(".storkit/work").join(stage);
|
||
assert!(path.is_dir(), "work/{} should be a directory", stage);
|
||
assert!(
|
||
path.join(".gitkeep").exists(),
|
||
"work/{} should have a .gitkeep file",
|
||
stage
|
||
);
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_project_toml_has_coder_qa_mergemaster() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
assert!(content.contains("[[agent]]"));
|
||
assert!(content.contains("stage = \"coder\""));
|
||
assert!(content.contains("stage = \"qa\""));
|
||
assert!(content.contains("stage = \"mergemaster\""));
|
||
assert!(content.contains("model = \"sonnet\""));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_context_is_blank_template_not_story_kit_content() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/specs/00_CONTEXT.md")).unwrap();
|
||
assert!(content.contains("<!-- storkit:scaffold-template -->"));
|
||
assert!(content.contains("## High-Level Goal"));
|
||
assert!(content.contains("## Core Features"));
|
||
assert!(content.contains("## Domain Definition"));
|
||
assert!(content.contains("## Glossary"));
|
||
// Must NOT contain Story Kit-specific content
|
||
assert!(!content.contains("Agentic AI Code Assistant"));
|
||
assert!(!content.contains("Poem HTTP server"));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_stack_is_blank_template_not_story_kit_content() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/specs/tech/STACK.md")).unwrap();
|
||
assert!(content.contains("<!-- storkit:scaffold-template -->"));
|
||
assert!(content.contains("## Core Stack"));
|
||
assert!(content.contains("## Coding Standards"));
|
||
assert!(content.contains("## Quality Gates"));
|
||
assert!(content.contains("## Libraries"));
|
||
// Must NOT contain Story Kit-specific content
|
||
assert!(!content.contains("Poem HTTP server"));
|
||
assert!(!content.contains("TypeScript + React"));
|
||
}
|
||
|
||
#[cfg(unix)]
|
||
#[test]
|
||
fn scaffold_story_kit_creates_executable_script_test() {
|
||
use std::os::unix::fs::PermissionsExt;
|
||
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let script_test = dir.path().join("script/test");
|
||
assert!(script_test.exists(), "script/test should be created");
|
||
let perms = fs::metadata(&script_test).unwrap().permissions();
|
||
assert!(
|
||
perms.mode() & 0o111 != 0,
|
||
"script/test should be executable"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_does_not_overwrite_existing() {
|
||
let dir = tempdir().unwrap();
|
||
let readme = dir.path().join(".storkit/README.md");
|
||
fs::create_dir_all(readme.parent().unwrap()).unwrap();
|
||
fs::write(&readme, "custom content").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_is_idempotent() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let readme_content = fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap();
|
||
let toml_content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
|
||
// Run again — must not change content or add duplicate .gitignore entries
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(dir.path().join(".storkit/README.md")).unwrap(),
|
||
readme_content
|
||
);
|
||
assert_eq!(
|
||
fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap(),
|
||
toml_content
|
||
);
|
||
|
||
let story_kit_gitignore =
|
||
fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
|
||
let count = story_kit_gitignore
|
||
.lines()
|
||
.filter(|l| l.trim() == "worktrees/")
|
||
.count();
|
||
assert_eq!(
|
||
count, 1,
|
||
".storkit/.gitignore should not have duplicate entries"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_existing_git_repo_no_commit() {
|
||
let dir = tempdir().unwrap();
|
||
|
||
// Initialize a git repo before scaffold
|
||
std::process::Command::new("git")
|
||
.args(["init"])
|
||
.current_dir(dir.path())
|
||
.status()
|
||
.unwrap();
|
||
std::process::Command::new("git")
|
||
.args([
|
||
"-c",
|
||
"user.email=test@test.com",
|
||
"-c",
|
||
"user.name=Test",
|
||
"commit",
|
||
"--allow-empty",
|
||
"-m",
|
||
"pre-scaffold",
|
||
])
|
||
.current_dir(dir.path())
|
||
.status()
|
||
.unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
// Only 1 commit should exist — scaffold must not commit into an existing repo
|
||
let log_output = std::process::Command::new("git")
|
||
.args(["log", "--oneline"])
|
||
.current_dir(dir.path())
|
||
.output()
|
||
.unwrap();
|
||
let log = String::from_utf8_lossy(&log_output.stdout);
|
||
let commit_count = log.lines().count();
|
||
assert_eq!(
|
||
commit_count, 1,
|
||
"scaffold should not create a commit in an existing git repo"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_creates_story_kit_gitignore_with_relative_entries() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
// .storkit/.gitignore must contain relative patterns for files under .storkit/
|
||
let sk_content = fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
|
||
assert!(sk_content.contains("worktrees/"));
|
||
assert!(sk_content.contains("merge_workspace/"));
|
||
assert!(sk_content.contains("coverage/"));
|
||
// Must NOT contain absolute .storkit/ prefixed paths
|
||
assert!(!sk_content.contains(".storkit/"));
|
||
|
||
// Root .gitignore must contain root-level storkit entries
|
||
let root_content = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
|
||
assert!(root_content.contains(".storkit_port"));
|
||
assert!(root_content.contains("store.json"));
|
||
// Root .gitignore must NOT contain .storkit/ sub-directory patterns
|
||
assert!(!root_content.contains(".storkit/worktrees/"));
|
||
assert!(!root_content.contains(".storkit/merge_workspace/"));
|
||
assert!(!root_content.contains(".storkit/coverage/"));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_gitignore_does_not_duplicate_existing_entries() {
|
||
let dir = tempdir().unwrap();
|
||
// Pre-create .storkit dir and .gitignore with some entries already present
|
||
fs::create_dir_all(dir.path().join(".storkit")).unwrap();
|
||
fs::write(
|
||
dir.path().join(".storkit/.gitignore"),
|
||
"worktrees/\ncoverage/\n",
|
||
)
|
||
.unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/.gitignore")).unwrap();
|
||
let worktrees_count = content.lines().filter(|l| l.trim() == "worktrees/").count();
|
||
assert_eq!(worktrees_count, 1, "worktrees/ should not be duplicated");
|
||
let coverage_count = content.lines().filter(|l| l.trim() == "coverage/").count();
|
||
assert_eq!(coverage_count, 1, "coverage/ should not be duplicated");
|
||
// The missing entry must have been added
|
||
assert!(content.contains("merge_workspace/"));
|
||
}
|
||
|
||
// --- CLAUDE.md scaffold ---
|
||
|
||
#[test]
|
||
fn scaffold_creates_claude_md_at_project_root() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let claude_md = dir.path().join("CLAUDE.md");
|
||
assert!(
|
||
claude_md.exists(),
|
||
"CLAUDE.md should be created at project root"
|
||
);
|
||
|
||
let content = fs::read_to_string(&claude_md).unwrap();
|
||
assert!(
|
||
content.contains("<!-- storkit:scaffold-template -->"),
|
||
"CLAUDE.md should contain the scaffold sentinel"
|
||
);
|
||
assert!(
|
||
content.contains("Read .storkit/README.md"),
|
||
"CLAUDE.md should include directive to read .storkit/README.md"
|
||
);
|
||
assert!(
|
||
content.contains("Never chain shell commands"),
|
||
"CLAUDE.md should include command chaining rule"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_does_not_overwrite_existing_claude_md() {
|
||
let dir = tempdir().unwrap();
|
||
let claude_md = dir.path().join("CLAUDE.md");
|
||
fs::write(&claude_md, "custom CLAUDE.md content").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(&claude_md).unwrap(),
|
||
"custom CLAUDE.md content",
|
||
"scaffold should not overwrite an existing CLAUDE.md"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_writes_mcp_json_with_port() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 4242).unwrap();
|
||
|
||
let mcp_path = dir.path().join(".mcp.json");
|
||
assert!(mcp_path.exists(), ".mcp.json should be created by scaffold");
|
||
let content = fs::read_to_string(&mcp_path).unwrap();
|
||
assert!(content.contains("4242"), ".mcp.json should reference the given port");
|
||
assert!(content.contains("localhost"), ".mcp.json should reference localhost");
|
||
assert!(content.contains("storkit"), ".mcp.json should name the storkit server");
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_story_kit_does_not_overwrite_existing_mcp_json() {
|
||
let dir = tempdir().unwrap();
|
||
let mcp_path = dir.path().join(".mcp.json");
|
||
fs::write(&mcp_path, "{\"custom\": true}").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
assert_eq!(
|
||
fs::read_to_string(&mcp_path).unwrap(),
|
||
"{\"custom\": true}",
|
||
"scaffold should not overwrite an existing .mcp.json"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_gitignore_includes_mcp_json() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let root_gitignore = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
|
||
assert!(
|
||
root_gitignore.contains(".mcp.json"),
|
||
"root .gitignore should include .mcp.json (port is environment-specific)"
|
||
);
|
||
}
|
||
|
||
// --- detect_components_toml ---
|
||
|
||
#[test]
|
||
fn detect_no_markers_returns_fallback_components() {
|
||
let dir = tempdir().unwrap();
|
||
let toml = detect_components_toml(dir.path());
|
||
// At least one [[component]] entry should always be present
|
||
assert!(
|
||
toml.contains("[[component]]"),
|
||
"should always emit at least one component"
|
||
);
|
||
// Fallback should use a generic app component with empty setup
|
||
assert!(
|
||
toml.contains("name = \"app\""),
|
||
"fallback should use generic 'app' component name"
|
||
);
|
||
assert!(
|
||
toml.contains("setup = []"),
|
||
"fallback should have empty setup list"
|
||
);
|
||
// Must not contain Rust-specific commands in a non-Rust project
|
||
assert!(
|
||
!toml.contains("cargo"),
|
||
"fallback must not contain Rust-specific commands"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn detect_cargo_toml_generates_rust_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("Cargo.toml"),
|
||
"[package]\nname = \"test\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"server\""));
|
||
assert!(toml.contains("setup = [\"cargo check\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_package_json_with_pnpm_lock_generates_pnpm_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"frontend\""));
|
||
assert!(toml.contains("setup = [\"pnpm install\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_package_json_without_pnpm_lock_generates_npm_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"frontend\""));
|
||
assert!(toml.contains("setup = [\"npm install\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_pyproject_toml_generates_python_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("pyproject.toml"),
|
||
"[project]\nname = \"test\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"python\""));
|
||
assert!(toml.contains("pip install"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_requirements_txt_generates_python_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"python\""));
|
||
assert!(toml.contains("pip install"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_go_mod_generates_go_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"go\""));
|
||
assert!(toml.contains("setup = [\"go build ./...\"]"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_gemfile_generates_ruby_component() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("Gemfile"),
|
||
"source \"https://rubygems.org\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"ruby\""));
|
||
assert!(toml.contains("setup = [\"bundle install\"]"));
|
||
}
|
||
|
||
// --- Bug 375: no Rust-specific commands for non-Rust projects ---
|
||
|
||
#[test]
|
||
fn no_rust_commands_in_go_project() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(!toml.contains("cargo"), "go project must not contain cargo commands");
|
||
assert!(toml.contains("go build"), "go project must use Go tooling");
|
||
}
|
||
|
||
#[test]
|
||
fn no_rust_commands_in_node_project() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(!toml.contains("cargo"), "node project must not contain cargo commands");
|
||
assert!(toml.contains("npm install"), "node project must use npm tooling");
|
||
}
|
||
|
||
#[test]
|
||
fn no_rust_commands_when_no_stack_detected() {
|
||
let dir = tempdir().unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(!toml.contains("cargo"), "unknown stack must not contain cargo commands");
|
||
// setup list must be empty
|
||
assert!(toml.contains("setup = []"), "unknown stack must have empty setup list");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_multiple_markers_generates_multiple_components() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(
|
||
dir.path().join("Cargo.toml"),
|
||
"[package]\nname = \"server\"\n",
|
||
)
|
||
.unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
assert!(toml.contains("name = \"server\""));
|
||
assert!(toml.contains("name = \"frontend\""));
|
||
// Both component entries should be present
|
||
let component_count = toml.matches("[[component]]").count();
|
||
assert_eq!(component_count, 2);
|
||
}
|
||
|
||
#[test]
|
||
fn detect_no_fallback_when_markers_found() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let toml = detect_components_toml(dir.path());
|
||
// The fallback "app" component should NOT appear when a real stack is detected
|
||
assert!(!toml.contains("name = \"app\""));
|
||
}
|
||
|
||
// --- detect_script_test ---
|
||
|
||
#[test]
|
||
fn detect_script_test_no_markers_returns_stub() {
|
||
let dir = tempdir().unwrap();
|
||
let script = detect_script_test(dir.path());
|
||
assert!(
|
||
script.contains("No tests configured"),
|
||
"fallback should contain the generic stub message"
|
||
);
|
||
assert!(script.starts_with("#!/usr/bin/env bash"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_cargo_toml_adds_cargo_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("cargo test"), "Rust project should run cargo test");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_package_json_npm_adds_npm_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("npm test"), "Node project without pnpm-lock should run npm test");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_package_json_pnpm_adds_pnpm_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("pnpm test"), "Node project with pnpm-lock should run pnpm test");
|
||
// "pnpm test" is a substring of itself; verify there's no bare "npm test" line
|
||
assert!(!script.lines().any(|l| l.trim() == "npm test"), "should not use npm when pnpm-lock.yaml is present");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_pyproject_toml_adds_pytest() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("pyproject.toml"), "[project]\nname = \"x\"\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("pytest"), "Python project should run pytest");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_requirements_txt_adds_pytest() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("pytest"), "Python project (requirements.txt) should run pytest");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_go_mod_adds_go_test() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("go test ./..."), "Go project should run go test ./...");
|
||
assert!(!script.contains("No tests configured"));
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_multi_stack_combines_commands() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(script.contains("go test ./..."), "multi-stack should include Go test command");
|
||
assert!(script.contains("npm test"), "multi-stack should include Node test command");
|
||
}
|
||
|
||
#[test]
|
||
fn detect_script_test_output_starts_with_shebang() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let script = detect_script_test(dir.path());
|
||
assert!(
|
||
script.starts_with("#!/usr/bin/env bash\nset -euo pipefail\n"),
|
||
"generated script should start with bash shebang and set -euo pipefail"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_script_test_contains_detected_commands_for_rust() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"myapp\"\n").unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join("script/test")).unwrap();
|
||
assert!(content.contains("cargo test"), "Rust project scaffold should set cargo test in script/test");
|
||
assert!(!content.contains("No tests configured"), "should not use stub when stack is detected");
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_script_test_fallback_stub_when_no_stack() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join("script/test")).unwrap();
|
||
assert!(content.contains("No tests configured"), "unknown stack should use the generic stub");
|
||
}
|
||
|
||
// --- generate_project_toml ---
|
||
|
||
#[test]
|
||
fn generate_project_toml_includes_both_components_and_agents() {
|
||
let dir = tempdir().unwrap();
|
||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||
|
||
let toml = generate_project_toml(dir.path());
|
||
// Component section
|
||
assert!(toml.contains("[[component]]"));
|
||
assert!(toml.contains("name = \"server\""));
|
||
// Agent sections
|
||
assert!(toml.contains("[[agent]]"));
|
||
assert!(toml.contains("stage = \"coder\""));
|
||
assert!(toml.contains("stage = \"qa\""));
|
||
assert!(toml.contains("stage = \"mergemaster\""));
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_project_toml_contains_detected_components() {
|
||
let dir = tempdir().unwrap();
|
||
// Place a Cargo.toml in the project root before scaffolding
|
||
fs::write(
|
||
dir.path().join("Cargo.toml"),
|
||
"[package]\nname = \"myapp\"\n",
|
||
)
|
||
.unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
assert!(
|
||
content.contains("[[component]]"),
|
||
"project.toml should contain a component entry"
|
||
);
|
||
assert!(
|
||
content.contains("name = \"server\""),
|
||
"Rust project should have a 'server' component"
|
||
);
|
||
assert!(
|
||
content.contains("cargo check"),
|
||
"Rust component should have cargo check setup"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_project_toml_fallback_when_no_stack_detected() {
|
||
let dir = tempdir().unwrap();
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(dir.path().join(".storkit/project.toml")).unwrap();
|
||
assert!(
|
||
content.contains("[[component]]"),
|
||
"project.toml should always have at least one component"
|
||
);
|
||
// Fallback uses generic app component with empty setup — no Rust-specific commands
|
||
assert!(
|
||
content.contains("name = \"app\""),
|
||
"fallback should use generic 'app' component name"
|
||
);
|
||
assert!(
|
||
!content.contains("cargo"),
|
||
"fallback must not contain Rust-specific commands for non-Rust projects"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn scaffold_does_not_overwrite_existing_project_toml_with_components() {
|
||
let dir = tempdir().unwrap();
|
||
let sk_dir = dir.path().join(".storkit");
|
||
fs::create_dir_all(&sk_dir).unwrap();
|
||
let existing = "[[component]]\nname = \"custom\"\npath = \".\"\nsetup = [\"make build\"]\n";
|
||
fs::write(sk_dir.join("project.toml"), existing).unwrap();
|
||
|
||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||
|
||
let content = fs::read_to_string(sk_dir.join("project.toml")).unwrap();
|
||
assert_eq!(
|
||
content, existing,
|
||
"scaffold should not overwrite existing project.toml"
|
||
);
|
||
}
|
||
}
|