refactor: split io/fs/scaffold.rs into 4 sub-modules with co-located tests
The 2045-line scaffold.rs is split into a sub-module directory:
- templates.rs: STORY_KIT_* and DEFAULT_* template constants (161 lines)
- detect.rs: detect_components_toml + detect_script_{build,lint,test} + tests (989 lines)
- helpers.rs: write_*_if_missing, generate_project_toml, gitignore helpers (166 lines)
- mod.rs: scaffold_story_kit orchestrator + scaffold tests (756 lines)
include_str! paths in templates.rs are adjusted (one extra ../) for the deeper
nesting. Tests stay co-located with the code they exercise per Rust convention.
No behaviour change. All 77 scaffold tests pass; full suite green
(2635 tests with --test-threads=1).
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,992 @@
|
||||
//! Stack detection — inspect the project root for marker files and emit
|
||||
//! TOML `[[component]]` entries plus `script/build|lint|test` content.
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use super::templates::STORY_KIT_SCRIPT_TEST;
|
||||
|
||||
/// Detect the tech stack from the project root and return TOML `[[component]]` entries.
|
||||
///
|
||||
/// Inspects well-known marker files at the project root to identify which
|
||||
/// tech stacks are present, then emits one `[[component]]` entry per detected
|
||||
/// stack with sensible default `setup` commands. If no markers are found, a
|
||||
/// single fallback `app` component with an empty `setup` list is returned so
|
||||
/// that the pipeline never breaks on an unknown stack.
|
||||
pub(crate) fn detect_components_toml(root: &Path) -> String {
|
||||
let mut sections = Vec::new();
|
||||
|
||||
if root.join("Cargo.toml").exists() {
|
||||
sections.push(
|
||||
"[[component]]\nname = \"server\"\npath = \".\"\nsetup = [\"cargo check\"]\n"
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
if root.join("package.json").exists() {
|
||||
let setup_cmd = if root.join("pnpm-lock.yaml").exists() {
|
||||
"pnpm install"
|
||||
} else {
|
||||
"npm install"
|
||||
};
|
||||
sections.push(format!(
|
||||
"[[component]]\nname = \"frontend\"\npath = \".\"\nsetup = [\"{setup_cmd}\"]\n"
|
||||
));
|
||||
}
|
||||
|
||||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||||
sections.push(
|
||||
"[[component]]\nname = \"python\"\npath = \".\"\nsetup = [\"pip install -r requirements.txt\"]\n"
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
if root.join("go.mod").exists() {
|
||||
sections.push(
|
||||
"[[component]]\nname = \"go\"\npath = \".\"\nsetup = [\"go build ./...\"]\n"
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
if root.join("Gemfile").exists() {
|
||||
sections.push(
|
||||
"[[component]]\nname = \"ruby\"\npath = \".\"\nsetup = [\"bundle install\"]\n"
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
if sections.is_empty() {
|
||||
// No tech stack markers detected — emit a single generic component
|
||||
// with an empty setup list. The ONBOARDING_PROMPT instructs the chat
|
||||
// agent to inspect the project and replace this with real definitions.
|
||||
sections.push("[[component]]\nname = \"app\"\npath = \".\"\nsetup = []\n".to_string());
|
||||
}
|
||||
|
||||
sections.join("\n")
|
||||
}
|
||||
|
||||
/// Detect the appropriate Node.js test command for a directory containing `package.json`.
|
||||
///
|
||||
/// Reads the `package.json` content to identify known test runners (vitest, jest).
|
||||
/// Falls back to `npm test` or `pnpm test` based on which lock file is present.
|
||||
fn detect_node_test_cmd(pkg_dir: &Path) -> String {
|
||||
let has_pnpm = pkg_dir.join("pnpm-lock.yaml").exists();
|
||||
let content = std::fs::read_to_string(pkg_dir.join("package.json")).unwrap_or_default();
|
||||
|
||||
if content.contains("\"vitest\"") {
|
||||
let pm = if has_pnpm { "pnpm" } else { "npx" };
|
||||
return format!("{} vitest run", pm);
|
||||
}
|
||||
if content.contains("\"jest\"") {
|
||||
let pm = if has_pnpm { "pnpm" } else { "npx" };
|
||||
return format!("{} jest", pm);
|
||||
}
|
||||
|
||||
if has_pnpm {
|
||||
"pnpm test".to_string()
|
||||
} else {
|
||||
"npm test".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect the appropriate Node.js build command for a directory containing `package.json`.
|
||||
fn detect_node_build_cmd(pkg_dir: &Path) -> String {
|
||||
if pkg_dir.join("pnpm-lock.yaml").exists() {
|
||||
"pnpm run build".to_string()
|
||||
} else {
|
||||
"npm run build".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect the appropriate Node.js lint command for a directory containing `package.json`.
|
||||
///
|
||||
/// Reads the `package.json` content to identify eslint. Falls back to
|
||||
/// `npm run lint` or `pnpm run lint` based on which lock file is present.
|
||||
fn detect_node_lint_cmd(pkg_dir: &Path) -> String {
|
||||
let has_pnpm = pkg_dir.join("pnpm-lock.yaml").exists();
|
||||
let content = std::fs::read_to_string(pkg_dir.join("package.json")).unwrap_or_default();
|
||||
if content.contains("\"eslint\"") {
|
||||
let pm = if has_pnpm { "pnpm" } else { "npx" };
|
||||
return format!("{pm} eslint .");
|
||||
}
|
||||
if has_pnpm {
|
||||
"pnpm run lint".to_string()
|
||||
} else {
|
||||
"npm run lint".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate `script/build` content for a new project at `root`.
|
||||
///
|
||||
/// Inspects well-known marker files to identify which tech stacks are present
|
||||
/// and emits the appropriate build commands. Multi-stack projects get combined
|
||||
/// commands run sequentially. Falls back to a generic stub when no markers
|
||||
/// are found so the scaffold is always valid.
|
||||
///
|
||||
/// For projects with a frontend in a known subdirectory (`frontend/`, `client/`),
|
||||
/// the build command is detected from the presence of `pnpm-lock.yaml`.
|
||||
pub(crate) fn detect_script_build(root: &Path) -> String {
|
||||
let mut commands: Vec<String> = Vec::new();
|
||||
|
||||
if root.join("Cargo.toml").exists() {
|
||||
commands.push("cargo build --release".to_string());
|
||||
}
|
||||
|
||||
if root.join("package.json").exists() {
|
||||
commands.push(detect_node_build_cmd(root));
|
||||
}
|
||||
|
||||
// Detect frontend in known subdirectories (e.g. frontend/, client/)
|
||||
for subdir in &["frontend", "client"] {
|
||||
let sub_path = root.join(subdir);
|
||||
if sub_path.join("package.json").exists() {
|
||||
let cmd = detect_node_build_cmd(&sub_path);
|
||||
commands.push(format!("(cd {} && {})", subdir, cmd));
|
||||
}
|
||||
}
|
||||
|
||||
if root.join("pyproject.toml").exists() {
|
||||
commands.push("python -m build".to_string());
|
||||
}
|
||||
|
||||
if root.join("go.mod").exists() {
|
||||
commands.push("go build ./...".to_string());
|
||||
}
|
||||
|
||||
if commands.is_empty() {
|
||||
return "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's build commands here.\necho \"No build configured\"\n".to_string();
|
||||
}
|
||||
|
||||
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
|
||||
for cmd in commands {
|
||||
script.push_str(&cmd);
|
||||
script.push('\n');
|
||||
}
|
||||
script
|
||||
}
|
||||
|
||||
/// Generate `script/lint` content for a new project at `root`.
|
||||
///
|
||||
/// Inspects well-known marker files to identify which linters are present
|
||||
/// and emits the appropriate lint commands. Multi-stack projects get combined
|
||||
/// commands run sequentially. Falls back to a generic stub when no markers
|
||||
/// are found so the scaffold is always valid.
|
||||
///
|
||||
/// For projects with a frontend in a known subdirectory (`frontend/`, `client/`),
|
||||
/// the lint command is detected from the `package.json` (eslint, npm, pnpm).
|
||||
pub(crate) fn detect_script_lint(root: &Path) -> String {
|
||||
let mut commands: Vec<String> = Vec::new();
|
||||
|
||||
if root.join("Cargo.toml").exists() {
|
||||
commands.push("cargo fmt --all --check".to_string());
|
||||
commands.push("cargo clippy -- -D warnings".to_string());
|
||||
}
|
||||
|
||||
if root.join("package.json").exists() {
|
||||
commands.push(detect_node_lint_cmd(root));
|
||||
}
|
||||
|
||||
// Detect frontend in known subdirectories (e.g. frontend/, client/)
|
||||
for subdir in &["frontend", "client"] {
|
||||
let sub_path = root.join(subdir);
|
||||
if sub_path.join("package.json").exists() {
|
||||
let cmd = detect_node_lint_cmd(&sub_path);
|
||||
commands.push(format!("(cd {} && {})", subdir, cmd));
|
||||
}
|
||||
}
|
||||
|
||||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||||
let mut content = std::fs::read_to_string(root.join("pyproject.toml")).unwrap_or_default();
|
||||
content
|
||||
.push_str(&std::fs::read_to_string(root.join("requirements.txt")).unwrap_or_default());
|
||||
if content.contains("ruff") {
|
||||
commands.push("ruff check .".to_string());
|
||||
} else {
|
||||
commands.push("flake8 .".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if root.join("go.mod").exists() {
|
||||
commands.push("go vet ./...".to_string());
|
||||
}
|
||||
|
||||
if commands.is_empty() {
|
||||
return "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's lint commands here.\necho \"No linters configured\"\n".to_string();
|
||||
}
|
||||
|
||||
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
|
||||
for cmd in commands {
|
||||
script.push_str(&cmd);
|
||||
script.push('\n');
|
||||
}
|
||||
script
|
||||
}
|
||||
|
||||
/// Generate `script/test` content for a new project at `root`.
|
||||
///
|
||||
/// Inspects well-known marker files to identify which tech stacks are present
|
||||
/// and emits the appropriate test commands. Multi-stack projects get combined
|
||||
/// commands run sequentially. Falls back to the generic stub when no markers
|
||||
/// are found so the scaffold is always valid.
|
||||
///
|
||||
/// For projects with a frontend in a known subdirectory (`frontend/`, `client/`),
|
||||
/// the test runner is detected from the `package.json` (vitest, jest, npm, pnpm).
|
||||
pub(crate) fn detect_script_test(root: &Path) -> String {
|
||||
let mut commands: Vec<String> = Vec::new();
|
||||
|
||||
if root.join("Cargo.toml").exists() {
|
||||
commands.push("cargo test".to_string());
|
||||
}
|
||||
|
||||
if root.join("package.json").exists() {
|
||||
if root.join("pnpm-lock.yaml").exists() {
|
||||
commands.push("pnpm test".to_string());
|
||||
} else {
|
||||
commands.push("npm test".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Detect frontend in known subdirectories (e.g. frontend/, client/)
|
||||
for subdir in &["frontend", "client"] {
|
||||
let sub_path = root.join(subdir);
|
||||
if sub_path.join("package.json").exists() {
|
||||
let cmd = detect_node_test_cmd(&sub_path);
|
||||
commands.push(format!("(cd {} && {})", subdir, cmd));
|
||||
}
|
||||
}
|
||||
|
||||
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
|
||||
commands.push("pytest".to_string());
|
||||
}
|
||||
|
||||
if root.join("go.mod").exists() {
|
||||
commands.push("go test ./...".to_string());
|
||||
}
|
||||
|
||||
if commands.is_empty() {
|
||||
return STORY_KIT_SCRIPT_TEST.to_string();
|
||||
}
|
||||
|
||||
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
|
||||
for cmd in commands {
|
||||
script.push_str(&cmd);
|
||||
script.push('\n');
|
||||
}
|
||||
script
|
||||
}
|
||||
|
||||
/// Generate a `project.toml` for a new project at `root`.
|
||||
///
|
||||
/// Detects the tech stack via [`detect_components_toml`] and combines the
|
||||
/// resulting `[[component]]` entries with the default project settings.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn detect_no_markers_returns_fallback_components() {
|
||||
let dir = tempdir().unwrap();
|
||||
let toml = detect_components_toml(dir.path());
|
||||
// At least one [[component]] entry should always be present
|
||||
assert!(
|
||||
toml.contains("[[component]]"),
|
||||
"should always emit at least one component"
|
||||
);
|
||||
// Fallback should use a generic app component with empty setup
|
||||
assert!(
|
||||
toml.contains("name = \"app\""),
|
||||
"fallback should use generic 'app' component name"
|
||||
);
|
||||
assert!(
|
||||
toml.contains("setup = []"),
|
||||
"fallback should have empty setup list"
|
||||
);
|
||||
// Must not contain Rust-specific commands in a non-Rust project
|
||||
assert!(
|
||||
!toml.contains("cargo"),
|
||||
"fallback must not contain Rust-specific commands"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_cargo_toml_generates_rust_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"test\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"server\""));
|
||||
assert!(toml.contains("setup = [\"cargo check\"]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_package_json_with_pnpm_lock_generates_pnpm_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"frontend\""));
|
||||
assert!(toml.contains("setup = [\"pnpm install\"]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_package_json_without_pnpm_lock_generates_npm_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"frontend\""));
|
||||
assert!(toml.contains("setup = [\"npm install\"]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_pyproject_toml_generates_python_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("pyproject.toml"),
|
||||
"[project]\nname = \"test\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"python\""));
|
||||
assert!(toml.contains("pip install"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_requirements_txt_generates_python_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"python\""));
|
||||
assert!(toml.contains("pip install"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_go_mod_generates_go_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"go\""));
|
||||
assert!(toml.contains("setup = [\"go build ./...\"]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_gemfile_generates_ruby_component() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Gemfile"),
|
||||
"source \"https://rubygems.org\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"ruby\""));
|
||||
assert!(toml.contains("setup = [\"bundle install\"]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_rust_commands_in_go_project() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(
|
||||
!toml.contains("cargo"),
|
||||
"go project must not contain cargo commands"
|
||||
);
|
||||
assert!(toml.contains("go build"), "go project must use Go tooling");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_rust_commands_in_node_project() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(
|
||||
!toml.contains("cargo"),
|
||||
"node project must not contain cargo commands"
|
||||
);
|
||||
assert!(
|
||||
toml.contains("npm install"),
|
||||
"node project must use npm tooling"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_rust_commands_when_no_stack_detected() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(
|
||||
!toml.contains("cargo"),
|
||||
"unknown stack must not contain cargo commands"
|
||||
);
|
||||
// setup list must be empty
|
||||
assert!(
|
||||
toml.contains("setup = []"),
|
||||
"unknown stack must have empty setup list"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_multiple_markers_generates_multiple_components() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"server\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
assert!(toml.contains("name = \"server\""));
|
||||
assert!(toml.contains("name = \"frontend\""));
|
||||
// Both component entries should be present
|
||||
let component_count = toml.matches("[[component]]").count();
|
||||
assert_eq!(component_count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_no_fallback_when_markers_found() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||||
|
||||
let toml = detect_components_toml(dir.path());
|
||||
// The fallback "app" component should NOT appear when a real stack is detected
|
||||
assert!(!toml.contains("name = \"app\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_no_markers_returns_stub() {
|
||||
let dir = tempdir().unwrap();
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("No tests configured"),
|
||||
"fallback should contain the generic stub message"
|
||||
);
|
||||
assert!(script.starts_with("#!/usr/bin/env bash"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_cargo_toml_adds_cargo_test() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("cargo test"),
|
||||
"Rust project should run cargo test"
|
||||
);
|
||||
assert!(!script.contains("No tests configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_package_json_npm_adds_npm_test() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("npm test"),
|
||||
"Node project without pnpm-lock should run npm test"
|
||||
);
|
||||
assert!(!script.contains("No tests configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_package_json_pnpm_adds_pnpm_test() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("pnpm test"),
|
||||
"Node project with pnpm-lock should run pnpm test"
|
||||
);
|
||||
// "pnpm test" is a substring of itself; verify there's no bare "npm test" line
|
||||
assert!(
|
||||
!script.lines().any(|l| l.trim() == "npm test"),
|
||||
"should not use npm when pnpm-lock.yaml is present"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_pyproject_toml_adds_pytest() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("pyproject.toml"),
|
||||
"[project]\nname = \"x\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("pytest"),
|
||||
"Python project should run pytest"
|
||||
);
|
||||
assert!(!script.contains("No tests configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_requirements_txt_adds_pytest() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("pytest"),
|
||||
"Python project (requirements.txt) should run pytest"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_go_mod_adds_go_test() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("go test ./..."),
|
||||
"Go project should run go test ./..."
|
||||
);
|
||||
assert!(!script.contains("No tests configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_multi_stack_combines_commands() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("go test ./..."),
|
||||
"multi-stack should include Go test command"
|
||||
);
|
||||
assert!(
|
||||
script.contains("npm test"),
|
||||
"multi-stack should include Node test command"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_frontend_subdir_with_vitest_uses_npx_vitest() {
|
||||
let dir = tempdir().unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(
|
||||
frontend.join("package.json"),
|
||||
r#"{"devDependencies":{"vitest":"^1.0.0"},"scripts":{"test":"vitest run"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("vitest run"),
|
||||
"frontend with vitest should emit vitest run"
|
||||
);
|
||||
assert!(
|
||||
script.contains("cd frontend"),
|
||||
"should cd into the frontend directory"
|
||||
);
|
||||
assert!(
|
||||
!script.contains("No tests configured"),
|
||||
"should not use stub when frontend is detected"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_frontend_subdir_with_jest_uses_npx_jest() {
|
||||
let dir = tempdir().unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(
|
||||
frontend.join("package.json"),
|
||||
r#"{"devDependencies":{"jest":"^29.0.0"},"scripts":{"test":"jest"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("jest"),
|
||||
"frontend with jest should emit jest"
|
||||
);
|
||||
assert!(
|
||||
script.contains("cd frontend"),
|
||||
"should cd into the frontend directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_frontend_subdir_no_known_runner_uses_npm_test() {
|
||||
let dir = tempdir().unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(
|
||||
frontend.join("package.json"),
|
||||
r#"{"scripts":{"test":"mocha"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("npm test"),
|
||||
"frontend without known runner should fall back to npm test"
|
||||
);
|
||||
assert!(script.contains("cd frontend"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_frontend_subdir_pnpm_uses_pnpm_vitest() {
|
||||
let dir = tempdir().unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(
|
||||
frontend.join("package.json"),
|
||||
r#"{"devDependencies":{"vitest":"^1.0.0"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(frontend.join("pnpm-lock.yaml"), "").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("pnpm vitest run"),
|
||||
"pnpm frontend with vitest should use pnpm vitest run"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_rust_plus_frontend_subdir_both_included() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"server\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(
|
||||
frontend.join("package.json"),
|
||||
r#"{"devDependencies":{"vitest":"^1.0.0"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("cargo test"),
|
||||
"Rust + frontend should include cargo test"
|
||||
);
|
||||
assert!(
|
||||
script.contains("vitest run"),
|
||||
"Rust + frontend should include vitest run"
|
||||
);
|
||||
assert!(
|
||||
script.contains("cd frontend"),
|
||||
"Rust + frontend should cd into frontend"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_client_subdir_detected() {
|
||||
let dir = tempdir().unwrap();
|
||||
let client = dir.path().join("client");
|
||||
fs::create_dir_all(&client).unwrap();
|
||||
fs::write(
|
||||
client.join("package.json"),
|
||||
r#"{"scripts":{"test":"jest"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.contains("cd client"),
|
||||
"client/ subdir should also be detected"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_test_output_starts_with_shebang() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||||
|
||||
let script = detect_script_test(dir.path());
|
||||
assert!(
|
||||
script.starts_with("#!/usr/bin/env bash\nset -euo pipefail\n"),
|
||||
"generated script should start with bash shebang and set -euo pipefail"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_no_markers_returns_stub() {
|
||||
let dir = tempdir().unwrap();
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("No build configured"),
|
||||
"fallback should contain the generic stub message"
|
||||
);
|
||||
assert!(script.starts_with("#!/usr/bin/env bash"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_cargo_toml_adds_cargo_build_release() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("cargo build --release"),
|
||||
"Rust project should run cargo build --release"
|
||||
);
|
||||
assert!(!script.contains("No build configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_package_json_npm_adds_npm_run_build() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("npm run build"),
|
||||
"Node project without pnpm-lock should run npm run build"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_package_json_pnpm_adds_pnpm_run_build() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("pnpm run build"),
|
||||
"Node project with pnpm-lock should run pnpm run build"
|
||||
);
|
||||
assert!(
|
||||
!script.lines().any(|l| l.trim() == "npm run build"),
|
||||
"should not use npm when pnpm-lock.yaml is present"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_go_mod_adds_go_build() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("go build ./..."),
|
||||
"Go project should run go build ./..."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_pyproject_toml_adds_python_build() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("pyproject.toml"),
|
||||
"[project]\nname = \"x\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("python -m build"),
|
||||
"Python project should run python -m build"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_frontend_subdir_detected() {
|
||||
let dir = tempdir().unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(frontend.join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(
|
||||
script.contains("cd frontend"),
|
||||
"frontend subdir should be detected for build"
|
||||
);
|
||||
assert!(script.contains("npm run build"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_build_rust_plus_frontend_subdir_both_included() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"server\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(frontend.join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_build(dir.path());
|
||||
assert!(script.contains("cargo build --release"));
|
||||
assert!(script.contains("cd frontend"));
|
||||
assert!(script.contains("npm run build"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_no_markers_returns_stub() {
|
||||
let dir = tempdir().unwrap();
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("No linters configured"),
|
||||
"fallback should contain the generic stub message"
|
||||
);
|
||||
assert!(script.starts_with("#!/usr/bin/env bash"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_cargo_toml_adds_fmt_and_clippy() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("cargo fmt --all --check"),
|
||||
"Rust project should check formatting"
|
||||
);
|
||||
assert!(
|
||||
script.contains("cargo clippy -- -D warnings"),
|
||||
"Rust project should run clippy"
|
||||
);
|
||||
assert!(!script.contains("No linters configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_package_json_without_eslint_uses_npm_run_lint() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("npm run lint"),
|
||||
"Node project without eslint dep should fall back to npm run lint"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_package_json_with_eslint_uses_npx_eslint() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("package.json"),
|
||||
r#"{"devDependencies":{"eslint":"^8.0.0"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("npx eslint ."),
|
||||
"Node project with eslint should use npx eslint ."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_pnpm_with_eslint_uses_pnpm_eslint() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("package.json"),
|
||||
r#"{"devDependencies":{"eslint":"^8.0.0"}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("pnpm eslint ."),
|
||||
"pnpm project with eslint should use pnpm eslint ."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_python_requirements_uses_flake8() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("flake8 ."),
|
||||
"Python project without ruff should use flake8"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_python_with_ruff_uses_ruff() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("pyproject.toml"),
|
||||
"[project]\nname = \"x\"\n\n[tool.ruff]\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("ruff check ."),
|
||||
"Python project with ruff configured should use ruff"
|
||||
);
|
||||
assert!(
|
||||
!script.contains("flake8"),
|
||||
"should not use flake8 when ruff is configured"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_go_mod_adds_go_vet() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("go vet ./..."),
|
||||
"Go project should run go vet ./..."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_frontend_subdir_detected() {
|
||||
let dir = tempdir().unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(frontend.join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(
|
||||
script.contains("cd frontend"),
|
||||
"frontend subdir should be detected for lint"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detect_script_lint_rust_plus_frontend_subdir_both_included() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"server\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
let frontend = dir.path().join("frontend");
|
||||
fs::create_dir_all(&frontend).unwrap();
|
||||
fs::write(frontend.join("package.json"), "{}").unwrap();
|
||||
|
||||
let script = detect_script_lint(dir.path());
|
||||
assert!(script.contains("cargo fmt --all --check"));
|
||||
assert!(script.contains("cargo clippy -- -D warnings"));
|
||||
assert!(script.contains("cd frontend"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,167 @@
|
||||
//! Internal helpers for scaffolding: file/script writing, .gitignore management,
|
||||
//! and project.toml generation.
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use super::detect::detect_components_toml;
|
||||
use super::templates::{DEFAULT_AGENTS_TOML, DEFAULT_PROJECT_SETTINGS_TOML};
|
||||
|
||||
pub(super) fn generate_project_toml(root: &Path) -> String {
|
||||
let components = detect_components_toml(root);
|
||||
format!("{components}\n{DEFAULT_PROJECT_SETTINGS_TOML}")
|
||||
}
|
||||
|
||||
pub(super) fn write_file_if_missing(path: &Path, content: &str) -> Result<(), String> {
|
||||
if path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
fs::write(path, content).map_err(|e| format!("Failed to write file: {}", e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write `content` to `path` if missing, then ensure the file is executable.
|
||||
pub(super) fn write_script_if_missing(path: &Path, content: &str) -> Result<(), String> {
|
||||
write_file_if_missing(path, content)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = fs::metadata(path)
|
||||
.map_err(|e| format!("Failed to read permissions for {}: {}", path.display(), e))?
|
||||
.permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(path, perms)
|
||||
.map_err(|e| format!("Failed to set permissions on {}: {}", path.display(), e))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write (or idempotently update) `.huskies/.gitignore` with Story Kit–specific
|
||||
/// ignore patterns for files that live inside the `.huskies/` directory.
|
||||
/// Patterns are relative to `.huskies/` as git resolves `.gitignore` files
|
||||
/// relative to the directory that contains them.
|
||||
pub(super) fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
|
||||
// Entries that belong inside .huskies/.gitignore (relative to .huskies/).
|
||||
let entries = [
|
||||
"bot.toml",
|
||||
"matrix_store/",
|
||||
"matrix_device_id",
|
||||
"matrix_history.json",
|
||||
"timers.json",
|
||||
"worktrees/",
|
||||
"merge_workspace/",
|
||||
"coverage/",
|
||||
"work/2_current/",
|
||||
"work/3_qa/",
|
||||
"work/4_merge/",
|
||||
"logs/",
|
||||
"token_usage.jsonl",
|
||||
"wizard_state.json",
|
||||
"store.json",
|
||||
"pipeline.db",
|
||||
"*.db",
|
||||
];
|
||||
|
||||
let gitignore_path = root.join(".huskies").join(".gitignore");
|
||||
let existing = if gitignore_path.exists() {
|
||||
fs::read_to_string(&gitignore_path)
|
||||
.map_err(|e| format!("Failed to read .huskies/.gitignore: {}", e))?
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let missing: Vec<&str> = entries
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
|
||||
.collect();
|
||||
|
||||
if missing.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut new_content = existing;
|
||||
if !new_content.is_empty() && !new_content.ends_with('\n') {
|
||||
new_content.push('\n');
|
||||
}
|
||||
for entry in missing {
|
||||
new_content.push_str(entry);
|
||||
new_content.push('\n');
|
||||
}
|
||||
|
||||
fs::write(&gitignore_path, new_content)
|
||||
.map_err(|e| format!("Failed to write .huskies/.gitignore: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Append root-level Story Kit entries to the project `.gitignore`.
|
||||
/// Only `.huskies_port` and `.mcp.json` remain here because they live at
|
||||
/// the project root and git does not support `../` patterns in `.gitignore`
|
||||
/// files, so they cannot be expressed in `.huskies/.gitignore`.
|
||||
/// `store.json` is excluded via `.huskies/.gitignore` since it now lives
|
||||
/// inside the `.huskies/` directory.
|
||||
pub(super) fn append_root_gitignore_entries(root: &Path) -> Result<(), String> {
|
||||
let entries = [".huskies_port", ".mcp.json"];
|
||||
|
||||
let gitignore_path = root.join(".gitignore");
|
||||
let existing = if gitignore_path.exists() {
|
||||
fs::read_to_string(&gitignore_path)
|
||||
.map_err(|e| format!("Failed to read .gitignore: {}", e))?
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let missing: Vec<&str> = entries
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|e| !existing.lines().any(|l| l.trim() == *e))
|
||||
.collect();
|
||||
|
||||
if missing.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut new_content = existing;
|
||||
if !new_content.is_empty() && !new_content.ends_with('\n') {
|
||||
new_content.push('\n');
|
||||
}
|
||||
for entry in missing {
|
||||
new_content.push_str(entry);
|
||||
new_content.push('\n');
|
||||
}
|
||||
|
||||
fs::write(&gitignore_path, new_content)
|
||||
.map_err(|e| format!("Failed to write .gitignore: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn generate_project_toml_includes_components_but_not_agents() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
|
||||
|
||||
let toml = generate_project_toml(dir.path());
|
||||
// Component section should be present
|
||||
assert!(toml.contains("[[component]]"));
|
||||
assert!(toml.contains("name = \"server\""));
|
||||
// Agent sections must NOT be in project.toml — they go in agents.toml
|
||||
assert!(!toml.contains("[[agent]]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_agents_toml_has_coder_qa_mergemaster() {
|
||||
assert!(DEFAULT_AGENTS_TOML.contains("[[agent]]"));
|
||||
assert!(DEFAULT_AGENTS_TOML.contains("stage = \"coder\""));
|
||||
assert!(DEFAULT_AGENTS_TOML.contains("stage = \"qa\""));
|
||||
assert!(DEFAULT_AGENTS_TOML.contains("stage = \"mergemaster\""));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,757 @@
|
||||
//! Project scaffolding — creates the `.huskies/` directory structure and default files.
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
mod detect;
|
||||
mod helpers;
|
||||
mod templates;
|
||||
|
||||
use detect::{detect_components_toml, detect_script_build, detect_script_lint, detect_script_test};
|
||||
use helpers::{
|
||||
append_root_gitignore_entries, generate_project_toml, write_file_if_missing,
|
||||
write_script_if_missing, write_story_kit_gitignore,
|
||||
};
|
||||
use templates::{
|
||||
BOT_TOML_MATRIX_EXAMPLE, BOT_TOML_SLACK_EXAMPLE, BOT_TOML_WHATSAPP_META_EXAMPLE,
|
||||
BOT_TOML_WHATSAPP_TWILIO_EXAMPLE, DEFAULT_AGENTS_TOML, STORY_KIT_CLAUDE_MD,
|
||||
STORY_KIT_CLAUDE_SETTINGS, STORY_KIT_CONTEXT, STORY_KIT_README, STORY_KIT_SCRIPT_TEST,
|
||||
STORY_KIT_STACK,
|
||||
};
|
||||
|
||||
pub(crate) fn scaffold_story_kit(root: &Path, port: u16) -> Result<(), String> {
|
||||
let story_kit_root = root.join(".huskies");
|
||||
let specs_root = story_kit_root.join("specs");
|
||||
let tech_root = specs_root.join("tech");
|
||||
let functional_root = specs_root.join("functional");
|
||||
let script_root = root.join("script");
|
||||
|
||||
// Create the work/ pipeline directories, each with a .gitkeep so empty dirs survive git clone
|
||||
let work_stages = [
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
];
|
||||
for stage in &work_stages {
|
||||
let dir = story_kit_root.join("work").join(stage);
|
||||
fs::create_dir_all(&dir).map_err(|e| format!("Failed to create work/{}: {}", stage, e))?;
|
||||
write_file_if_missing(&dir.join(".gitkeep"), "")?;
|
||||
}
|
||||
|
||||
fs::create_dir_all(&tech_root).map_err(|e| format!("Failed to create specs/tech: {}", e))?;
|
||||
fs::create_dir_all(&functional_root)
|
||||
.map_err(|e| format!("Failed to create specs/functional: {}", e))?;
|
||||
fs::create_dir_all(&script_root)
|
||||
.map_err(|e| format!("Failed to create script/ directory: {}", e))?;
|
||||
|
||||
write_file_if_missing(&story_kit_root.join("README.md"), STORY_KIT_README)?;
|
||||
let project_toml_content = generate_project_toml(root);
|
||||
write_file_if_missing(&story_kit_root.join("project.toml"), &project_toml_content)?;
|
||||
write_file_if_missing(&story_kit_root.join("agents.toml"), DEFAULT_AGENTS_TOML)?;
|
||||
write_file_if_missing(&specs_root.join("00_CONTEXT.md"), STORY_KIT_CONTEXT)?;
|
||||
write_file_if_missing(&tech_root.join("STACK.md"), STORY_KIT_STACK)?;
|
||||
let script_test_content = detect_script_test(root);
|
||||
write_script_if_missing(&script_root.join("test"), &script_test_content)?;
|
||||
let script_build_content = detect_script_build(root);
|
||||
write_script_if_missing(&script_root.join("build"), &script_build_content)?;
|
||||
let script_lint_content = detect_script_lint(root);
|
||||
write_script_if_missing(&script_root.join("lint"), &script_lint_content)?;
|
||||
write_file_if_missing(&root.join("CLAUDE.md"), STORY_KIT_CLAUDE_MD)?;
|
||||
|
||||
// Write per-transport bot.toml example files so users can see all options.
|
||||
write_file_if_missing(
|
||||
&story_kit_root.join("bot.toml.matrix.example"),
|
||||
BOT_TOML_MATRIX_EXAMPLE,
|
||||
)?;
|
||||
write_file_if_missing(
|
||||
&story_kit_root.join("bot.toml.whatsapp-meta.example"),
|
||||
BOT_TOML_WHATSAPP_META_EXAMPLE,
|
||||
)?;
|
||||
write_file_if_missing(
|
||||
&story_kit_root.join("bot.toml.whatsapp-twilio.example"),
|
||||
BOT_TOML_WHATSAPP_TWILIO_EXAMPLE,
|
||||
)?;
|
||||
write_file_if_missing(
|
||||
&story_kit_root.join("bot.toml.slack.example"),
|
||||
BOT_TOML_SLACK_EXAMPLE,
|
||||
)?;
|
||||
|
||||
// Write .mcp.json at the project root so agents can find the MCP server.
|
||||
// Only written when missing — never overwrites an existing file, because
|
||||
// the port is environment-specific and must not clobber a running instance.
|
||||
let mcp_content = format!(
|
||||
"{{\n \"mcpServers\": {{\n \"huskies\": {{\n \"type\": \"http\",\n \"url\": \"http://localhost:{port}/mcp\"\n }}\n }}\n}}\n"
|
||||
);
|
||||
write_file_if_missing(&root.join(".mcp.json"), &mcp_content)?;
|
||||
|
||||
// Create .claude/settings.json with sensible permission defaults so that
|
||||
// Claude Code (both agents and web UI chat) can operate without constant
|
||||
// permission prompts.
|
||||
let claude_dir = root.join(".claude");
|
||||
fs::create_dir_all(&claude_dir)
|
||||
.map_err(|e| format!("Failed to create .claude/ directory: {}", e))?;
|
||||
write_file_if_missing(&claude_dir.join("settings.json"), STORY_KIT_CLAUDE_SETTINGS)?;
|
||||
|
||||
write_story_kit_gitignore(root)?;
|
||||
append_root_gitignore_entries(root)?;
|
||||
|
||||
// Run `git init` if the directory is not already a git repo, then make an initial commit
|
||||
if !root.join(".git").exists() {
|
||||
let init_status = std::process::Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(root)
|
||||
.status()
|
||||
.map_err(|e| format!("Failed to run git init: {}", e))?;
|
||||
if !init_status.success() {
|
||||
return Err("git init failed".to_string());
|
||||
}
|
||||
|
||||
let add_output = std::process::Command::new("git")
|
||||
.args([
|
||||
"add",
|
||||
".huskies",
|
||||
"script",
|
||||
".gitignore",
|
||||
"CLAUDE.md",
|
||||
".claude",
|
||||
])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to run git add: {}", e))?;
|
||||
if !add_output.status.success() {
|
||||
return Err(format!(
|
||||
"git add failed: {}",
|
||||
String::from_utf8_lossy(&add_output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
let commit_output = std::process::Command::new("git")
|
||||
.args([
|
||||
"-c",
|
||||
"user.email=huskies@localhost",
|
||||
"-c",
|
||||
"user.name=Story Kit",
|
||||
"commit",
|
||||
"-m",
|
||||
"Initial Story Kit scaffold",
|
||||
])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to run git commit: {}", e))?;
|
||||
if !commit_output.status.success() {
|
||||
return Err(format!(
|
||||
"git commit failed: {}",
|
||||
String::from_utf8_lossy(&commit_output.stderr)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_creates_structure() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
assert!(dir.path().join(".huskies/README.md").exists());
|
||||
assert!(dir.path().join(".huskies/project.toml").exists());
|
||||
assert!(dir.path().join(".huskies/agents.toml").exists());
|
||||
assert!(dir.path().join(".huskies/specs/00_CONTEXT.md").exists());
|
||||
assert!(dir.path().join(".huskies/specs/tech/STACK.md").exists());
|
||||
// Old stories/ dirs should NOT be created
|
||||
assert!(!dir.path().join(".huskies/stories").exists());
|
||||
assert!(dir.path().join("script/test").exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_creates_work_pipeline_dirs() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let stages = [
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
];
|
||||
for stage in &stages {
|
||||
let path = dir.path().join(".huskies/work").join(stage);
|
||||
assert!(path.is_dir(), "work/{} should be a directory", stage);
|
||||
assert!(
|
||||
path.join(".gitkeep").exists(),
|
||||
"work/{} should have a .gitkeep file",
|
||||
stage
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_agents_toml_has_coder_qa_mergemaster() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
// Agent definitions go into agents.toml, not project.toml.
|
||||
let agents = fs::read_to_string(dir.path().join(".huskies/agents.toml")).unwrap();
|
||||
assert!(agents.contains("[[agent]]"));
|
||||
assert!(agents.contains("stage = \"coder\""));
|
||||
assert!(agents.contains("stage = \"qa\""));
|
||||
assert!(agents.contains("stage = \"mergemaster\""));
|
||||
assert!(agents.contains("model = \"sonnet\""));
|
||||
|
||||
// project.toml should NOT contain [[agent]] blocks.
|
||||
let project = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
assert!(!project.contains("[[agent]]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_project_toml_contains_rate_limit_and_timezone_comments() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
assert!(
|
||||
content.contains("rate_limit_notifications"),
|
||||
"project.toml scaffold should document rate_limit_notifications"
|
||||
);
|
||||
assert!(
|
||||
content.contains("timezone"),
|
||||
"project.toml scaffold should document timezone"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_project_toml_contains_max_retries_with_default_value() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
assert!(
|
||||
content.contains("max_retries = 2"),
|
||||
"project.toml scaffold should include max_retries with default value 2"
|
||||
);
|
||||
assert!(
|
||||
content.contains("Maximum number of retries"),
|
||||
"project.toml scaffold should include a comment explaining max_retries"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_project_toml_contains_commented_out_optional_fields() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
assert!(
|
||||
content.contains("# default_coder_model"),
|
||||
"project.toml scaffold should include commented-out default_coder_model"
|
||||
);
|
||||
assert!(
|
||||
content.contains("# max_coders"),
|
||||
"project.toml scaffold should include commented-out max_coders"
|
||||
);
|
||||
assert!(
|
||||
content.contains("# base_branch"),
|
||||
"project.toml scaffold should include commented-out base_branch"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_project_toml_round_trips_through_project_config_load() {
|
||||
use crate::config::ProjectConfig;
|
||||
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
// The generated project.toml must parse without error.
|
||||
let config = ProjectConfig::load(dir.path())
|
||||
.expect("Generated project.toml should parse without error");
|
||||
|
||||
// Key defaults must survive the round-trip.
|
||||
assert_eq!(config.default_qa, "server");
|
||||
assert_eq!(config.max_retries, 2);
|
||||
assert!(
|
||||
config.rate_limit_notifications,
|
||||
"rate_limit_notifications should default to true"
|
||||
);
|
||||
assert!(
|
||||
config.default_coder_model.is_none(),
|
||||
"default_coder_model should be None when commented out"
|
||||
);
|
||||
assert!(
|
||||
config.max_coders.is_none(),
|
||||
"max_coders should be None when commented out"
|
||||
);
|
||||
assert!(
|
||||
config.base_branch.is_none(),
|
||||
"base_branch should be None when commented out"
|
||||
);
|
||||
assert!(
|
||||
config.timezone.is_none(),
|
||||
"timezone should be None when commented out"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_context_is_blank_template_not_story_kit_content() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/specs/00_CONTEXT.md")).unwrap();
|
||||
assert!(content.contains("<!-- huskies:scaffold-template -->"));
|
||||
assert!(content.contains("## High-Level Goal"));
|
||||
assert!(content.contains("## Core Features"));
|
||||
assert!(content.contains("## Domain Definition"));
|
||||
assert!(content.contains("## Glossary"));
|
||||
// Must NOT contain Story Kit-specific content
|
||||
assert!(!content.contains("Agentic AI Code Assistant"));
|
||||
assert!(!content.contains("Poem HTTP server"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_stack_is_blank_template_not_story_kit_content() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/specs/tech/STACK.md")).unwrap();
|
||||
assert!(content.contains("<!-- huskies:scaffold-template -->"));
|
||||
assert!(content.contains("## Core Stack"));
|
||||
assert!(content.contains("## Coding Standards"));
|
||||
assert!(content.contains("## Quality Gates"));
|
||||
assert!(content.contains("## Libraries"));
|
||||
// Must NOT contain Story Kit-specific content
|
||||
assert!(!content.contains("Poem HTTP server"));
|
||||
assert!(!content.contains("TypeScript + React"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_creates_executable_script_test() {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let script_test = dir.path().join("script/test");
|
||||
assert!(script_test.exists(), "script/test should be created");
|
||||
let perms = fs::metadata(&script_test).unwrap().permissions();
|
||||
assert!(
|
||||
perms.mode() & 0o111 != 0,
|
||||
"script/test should be executable"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_does_not_overwrite_existing() {
|
||||
let dir = tempdir().unwrap();
|
||||
let readme = dir.path().join(".huskies/README.md");
|
||||
fs::create_dir_all(readme.parent().unwrap()).unwrap();
|
||||
fs::write(&readme, "custom content").unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
assert_eq!(fs::read_to_string(&readme).unwrap(), "custom content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_is_idempotent() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let readme_content = fs::read_to_string(dir.path().join(".huskies/README.md")).unwrap();
|
||||
let toml_content = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
|
||||
// Run again — must not change content or add duplicate .gitignore entries
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(dir.path().join(".huskies/README.md")).unwrap(),
|
||||
readme_content
|
||||
);
|
||||
assert_eq!(
|
||||
fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap(),
|
||||
toml_content
|
||||
);
|
||||
|
||||
let story_kit_gitignore =
|
||||
fs::read_to_string(dir.path().join(".huskies/.gitignore")).unwrap();
|
||||
let count = story_kit_gitignore
|
||||
.lines()
|
||||
.filter(|l| l.trim() == "worktrees/")
|
||||
.count();
|
||||
assert_eq!(
|
||||
count, 1,
|
||||
".huskies/.gitignore should not have duplicate entries"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_existing_git_repo_no_commit() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
// Initialize a git repo before scaffold
|
||||
std::process::Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(dir.path())
|
||||
.status()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args([
|
||||
"-c",
|
||||
"user.email=test@test.com",
|
||||
"-c",
|
||||
"user.name=Test",
|
||||
"commit",
|
||||
"--allow-empty",
|
||||
"-m",
|
||||
"pre-scaffold",
|
||||
])
|
||||
.current_dir(dir.path())
|
||||
.status()
|
||||
.unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
// Only 1 commit should exist — scaffold must not commit into an existing repo
|
||||
let log_output = std::process::Command::new("git")
|
||||
.args(["log", "--oneline"])
|
||||
.current_dir(dir.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
let log = String::from_utf8_lossy(&log_output.stdout);
|
||||
let commit_count = log.lines().count();
|
||||
assert_eq!(
|
||||
commit_count, 1,
|
||||
"scaffold should not create a commit in an existing git repo"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_creates_story_kit_gitignore_with_relative_entries() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
// .huskies/.gitignore must contain relative patterns for files under .huskies/
|
||||
let sk_content = fs::read_to_string(dir.path().join(".huskies/.gitignore")).unwrap();
|
||||
assert!(sk_content.contains("worktrees/"));
|
||||
assert!(sk_content.contains("merge_workspace/"));
|
||||
assert!(sk_content.contains("coverage/"));
|
||||
assert!(sk_content.contains("matrix_history.json"));
|
||||
assert!(sk_content.contains("timers.json"));
|
||||
// Must NOT contain absolute .huskies/ prefixed paths
|
||||
assert!(!sk_content.contains(".huskies/"));
|
||||
|
||||
// Root .gitignore must contain root-level huskies entries
|
||||
let root_content = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
|
||||
assert!(root_content.contains(".huskies_port"));
|
||||
// store.json now lives inside .huskies/ and must NOT appear in root .gitignore
|
||||
assert!(!root_content.contains("store.json"));
|
||||
// Root .gitignore must NOT contain .huskies/ sub-directory patterns
|
||||
assert!(!root_content.contains(".huskies/worktrees/"));
|
||||
assert!(!root_content.contains(".huskies/merge_workspace/"));
|
||||
assert!(!root_content.contains(".huskies/coverage/"));
|
||||
// store.json must be in .huskies/.gitignore instead
|
||||
assert!(sk_content.contains("store.json"));
|
||||
// Database files must be ignored so novice users don't accidentally commit them
|
||||
assert!(sk_content.contains("pipeline.db"));
|
||||
assert!(sk_content.contains("*.db"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_gitignore_does_not_duplicate_existing_entries() {
|
||||
let dir = tempdir().unwrap();
|
||||
// Pre-create .huskies dir and .gitignore with some entries already present
|
||||
fs::create_dir_all(dir.path().join(".huskies")).unwrap();
|
||||
fs::write(
|
||||
dir.path().join(".huskies/.gitignore"),
|
||||
"worktrees/\ncoverage/\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/.gitignore")).unwrap();
|
||||
let worktrees_count = content.lines().filter(|l| l.trim() == "worktrees/").count();
|
||||
assert_eq!(worktrees_count, 1, "worktrees/ should not be duplicated");
|
||||
let coverage_count = content.lines().filter(|l| l.trim() == "coverage/").count();
|
||||
assert_eq!(coverage_count, 1, "coverage/ should not be duplicated");
|
||||
// The missing entry must have been added
|
||||
assert!(content.contains("merge_workspace/"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_creates_claude_md_at_project_root() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let claude_md = dir.path().join("CLAUDE.md");
|
||||
assert!(
|
||||
claude_md.exists(),
|
||||
"CLAUDE.md should be created at project root"
|
||||
);
|
||||
|
||||
let content = fs::read_to_string(&claude_md).unwrap();
|
||||
assert!(
|
||||
content.contains("<!-- huskies:scaffold-template -->"),
|
||||
"CLAUDE.md should contain the scaffold sentinel"
|
||||
);
|
||||
assert!(
|
||||
content.contains("Read .huskies/README.md"),
|
||||
"CLAUDE.md should include directive to read .huskies/README.md"
|
||||
);
|
||||
assert!(
|
||||
content.contains("Never chain shell commands"),
|
||||
"CLAUDE.md should include command chaining rule"
|
||||
);
|
||||
assert!(
|
||||
content.contains("wizard_status"),
|
||||
"CLAUDE.md should instruct Claude to call wizard_status on first conversation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_does_not_overwrite_existing_claude_md() {
|
||||
let dir = tempdir().unwrap();
|
||||
let claude_md = dir.path().join("CLAUDE.md");
|
||||
fs::write(&claude_md, "custom CLAUDE.md content").unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(&claude_md).unwrap(),
|
||||
"custom CLAUDE.md content",
|
||||
"scaffold should not overwrite an existing CLAUDE.md"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_writes_mcp_json_with_port() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 4242).unwrap();
|
||||
|
||||
let mcp_path = dir.path().join(".mcp.json");
|
||||
assert!(mcp_path.exists(), ".mcp.json should be created by scaffold");
|
||||
let content = fs::read_to_string(&mcp_path).unwrap();
|
||||
assert!(
|
||||
content.contains("4242"),
|
||||
".mcp.json should reference the given port"
|
||||
);
|
||||
assert!(
|
||||
content.contains("localhost"),
|
||||
".mcp.json should reference localhost"
|
||||
);
|
||||
assert!(
|
||||
content.contains("huskies"),
|
||||
".mcp.json should name the huskies server"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_does_not_overwrite_existing_mcp_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mcp_path = dir.path().join(".mcp.json");
|
||||
fs::write(&mcp_path, "{\"custom\": true}").unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(&mcp_path).unwrap(),
|
||||
"{\"custom\": true}",
|
||||
"scaffold should not overwrite an existing .mcp.json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_gitignore_includes_mcp_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let root_gitignore = fs::read_to_string(dir.path().join(".gitignore")).unwrap();
|
||||
assert!(
|
||||
root_gitignore.contains(".mcp.json"),
|
||||
"root .gitignore should include .mcp.json (port is environment-specific)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_script_test_contains_detected_commands_for_rust() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"myapp\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join("script/test")).unwrap();
|
||||
assert!(
|
||||
content.contains("cargo test"),
|
||||
"Rust project scaffold should set cargo test in script/test"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("No tests configured"),
|
||||
"should not use stub when stack is detected"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_script_test_fallback_stub_when_no_stack() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join("script/test")).unwrap();
|
||||
assert!(
|
||||
content.contains("No tests configured"),
|
||||
"unknown stack should use the generic stub"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_creates_script_build_and_lint() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
assert!(
|
||||
dir.path().join("script/build").exists(),
|
||||
"script/build should be created by scaffold"
|
||||
);
|
||||
assert!(
|
||||
dir.path().join("script/lint").exists(),
|
||||
"script/lint should be created by scaffold"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_story_kit_creates_executable_script_build_and_lint() {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
for name in &["build", "lint"] {
|
||||
let path = dir.path().join("script").join(name);
|
||||
assert!(path.exists(), "script/{name} should be created");
|
||||
let perms = fs::metadata(&path).unwrap().permissions();
|
||||
assert!(
|
||||
perms.mode() & 0o111 != 0,
|
||||
"script/{name} should be executable"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_script_build_contains_detected_commands_for_rust() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"myapp\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join("script/build")).unwrap();
|
||||
assert!(
|
||||
content.contains("cargo build --release"),
|
||||
"Rust project scaffold should set cargo build --release in script/build"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_script_lint_contains_detected_commands_for_rust() {
|
||||
let dir = tempdir().unwrap();
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"myapp\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join("script/lint")).unwrap();
|
||||
assert!(
|
||||
content.contains("cargo fmt --all --check"),
|
||||
"Rust project scaffold should include fmt check in script/lint"
|
||||
);
|
||||
assert!(
|
||||
content.contains("cargo clippy -- -D warnings"),
|
||||
"Rust project scaffold should include clippy in script/lint"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_project_toml_contains_detected_components() {
|
||||
let dir = tempdir().unwrap();
|
||||
// Place a Cargo.toml in the project root before scaffolding
|
||||
fs::write(
|
||||
dir.path().join("Cargo.toml"),
|
||||
"[package]\nname = \"myapp\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
assert!(
|
||||
content.contains("[[component]]"),
|
||||
"project.toml should contain a component entry"
|
||||
);
|
||||
assert!(
|
||||
content.contains("name = \"server\""),
|
||||
"Rust project should have a 'server' component"
|
||||
);
|
||||
assert!(
|
||||
content.contains("cargo check"),
|
||||
"Rust component should have cargo check setup"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_project_toml_fallback_when_no_stack_detected() {
|
||||
let dir = tempdir().unwrap();
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(dir.path().join(".huskies/project.toml")).unwrap();
|
||||
assert!(
|
||||
content.contains("[[component]]"),
|
||||
"project.toml should always have at least one component"
|
||||
);
|
||||
// Fallback uses generic app component with empty setup — no Rust-specific commands
|
||||
assert!(
|
||||
content.contains("name = \"app\""),
|
||||
"fallback should use generic 'app' component name"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("cargo"),
|
||||
"fallback must not contain Rust-specific commands for non-Rust projects"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scaffold_does_not_overwrite_existing_project_toml_with_components() {
|
||||
let dir = tempdir().unwrap();
|
||||
let sk_dir = dir.path().join(".huskies");
|
||||
fs::create_dir_all(&sk_dir).unwrap();
|
||||
let existing = "[[component]]\nname = \"custom\"\npath = \".\"\nsetup = [\"make build\"]\n";
|
||||
fs::write(sk_dir.join("project.toml"), existing).unwrap();
|
||||
|
||||
scaffold_story_kit(dir.path(), 3001).unwrap();
|
||||
|
||||
let content = fs::read_to_string(sk_dir.join("project.toml")).unwrap();
|
||||
assert_eq!(
|
||||
content, existing,
|
||||
"scaffold should not overwrite existing project.toml"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,161 @@
|
||||
//! Static template strings for project scaffolding.
|
||||
//!
|
||||
//! Includes templates for `.huskies/specs/`, default settings, agent config,
|
||||
//! and bot examples. Bot examples are loaded via `include_str!` from the
|
||||
//! `.huskies/` directory at the repo root.
|
||||
|
||||
pub(super) const STORY_KIT_README: &str = include_str!("../../../../../.huskies/README.md");
|
||||
|
||||
pub(super) const BOT_TOML_MATRIX_EXAMPLE: &str = include_str!("../../../../../.huskies/bot.toml.matrix.example");
|
||||
pub(super) const BOT_TOML_WHATSAPP_META_EXAMPLE: &str =
|
||||
include_str!("../../../../../.huskies/bot.toml.whatsapp-meta.example");
|
||||
pub(super) const BOT_TOML_WHATSAPP_TWILIO_EXAMPLE: &str =
|
||||
include_str!("../../../../../.huskies/bot.toml.whatsapp-twilio.example");
|
||||
pub(super) const BOT_TOML_SLACK_EXAMPLE: &str = include_str!("../../../../../.huskies/bot.toml.slack.example");
|
||||
|
||||
pub(super) const STORY_KIT_CONTEXT: &str = "<!-- huskies:scaffold-template -->\n\
|
||||
# Project Context\n\
|
||||
\n\
|
||||
## High-Level Goal\n\
|
||||
\n\
|
||||
TODO: Describe the high-level goal of this project.\n\
|
||||
\n\
|
||||
## Core Features\n\
|
||||
\n\
|
||||
TODO: List the core features of this project.\n\
|
||||
\n\
|
||||
## Domain Definition\n\
|
||||
\n\
|
||||
TODO: Define the key domain concepts and entities.\n\
|
||||
\n\
|
||||
## Glossary\n\
|
||||
\n\
|
||||
TODO: Define abbreviations and technical terms.\n";
|
||||
|
||||
pub(super) const STORY_KIT_STACK: &str = "<!-- huskies:scaffold-template -->\n\
|
||||
# Tech Stack & Constraints\n\
|
||||
\n\
|
||||
## Core Stack\n\
|
||||
\n\
|
||||
TODO: Describe the language, frameworks, and runtimes.\n\
|
||||
\n\
|
||||
## Coding Standards\n\
|
||||
\n\
|
||||
TODO: Describe code style, linting rules, and error handling conventions.\n\
|
||||
\n\
|
||||
## Quality Gates\n\
|
||||
\n\
|
||||
TODO: List the commands that must pass before merging (e.g., cargo test, npm run build).\n\
|
||||
\n\
|
||||
## Libraries\n\
|
||||
\n\
|
||||
TODO: List approved libraries and their purpose.\n";
|
||||
|
||||
pub(super) const STORY_KIT_SCRIPT_TEST: &str = "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's test commands here.\n# Story Kit agents invoke this script as the canonical test runner.\n# Exit 0 on success, non-zero on failure.\necho \"No tests configured\"\n";
|
||||
|
||||
pub(super) const STORY_KIT_CLAUDE_MD: &str = "<!-- huskies:scaffold-template -->\n\
|
||||
Never chain shell commands with `&&`, `||`, or `;` in a single Bash call. \
|
||||
The permission system validates the entire command string, and chained commands \
|
||||
won't match allow rules like `Bash(git *)`. Use separate Bash calls instead — \
|
||||
parallel calls work fine.\n\
|
||||
\n\
|
||||
Read .huskies/README.md to see our dev process.\n\
|
||||
\n\
|
||||
IMPORTANT: On your first conversation, call `wizard_status` to check if \
|
||||
project setup is complete. If not, read .huskies/README.md for the full \
|
||||
setup wizard instructions and guide the user through it conversationally.\n";
|
||||
|
||||
pub(super) const STORY_KIT_CLAUDE_SETTINGS: &str = r#"{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(cargo build:*)",
|
||||
"Bash(cargo check:*)",
|
||||
"Bash(git *)",
|
||||
"Bash(ls *)",
|
||||
"Bash(mkdir *)",
|
||||
"Bash(mv *)",
|
||||
"Bash(rm *)",
|
||||
"Bash(touch *)",
|
||||
"Bash(echo:*)",
|
||||
"Bash(pwd *)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(find *)",
|
||||
"Bash(head *)",
|
||||
"Bash(tail *)",
|
||||
"Bash(wc *)",
|
||||
"Bash(cat *)",
|
||||
"Read",
|
||||
"Edit",
|
||||
"Write",
|
||||
"Glob",
|
||||
"Grep",
|
||||
"mcp__huskies__*"
|
||||
]
|
||||
},
|
||||
"enabledMcpjsonServers": [
|
||||
"huskies"
|
||||
]
|
||||
}
|
||||
"#;
|
||||
|
||||
pub(super) const DEFAULT_PROJECT_SETTINGS_TOML: &str = r#"# Project-wide default QA mode: "server", "agent", or "human".
|
||||
# Per-story `qa` front matter overrides this setting.
|
||||
default_qa = "server"
|
||||
|
||||
# Maximum number of retries per story per pipeline stage before marking as blocked.
|
||||
# Set to 0 to disable retry limits.
|
||||
max_retries = 2
|
||||
|
||||
# Default model for coder-stage agents (e.g. "sonnet", "opus").
|
||||
# When set, only coder agents whose model matches this value are considered for
|
||||
# auto-assignment, so opus agents are only used when explicitly requested via
|
||||
# story front matter `agent:` field.
|
||||
# default_coder_model = "sonnet"
|
||||
|
||||
# Maximum number of concurrent coder-stage agents.
|
||||
# Stories wait in 2_current/ until a slot frees up.
|
||||
# max_coders = 3
|
||||
|
||||
# Override the base branch for worktree creation and merge operations.
|
||||
# When not set, the system auto-detects the base branch from the current HEAD.
|
||||
# base_branch = "main"
|
||||
|
||||
# Suppress soft rate-limit warning notifications in chat.
|
||||
# Hard blocks and story-blocked notifications are always sent.
|
||||
# rate_limit_notifications = true
|
||||
|
||||
# IANA timezone for timer scheduling (e.g. "Europe/London", "America/New_York").
|
||||
# Timer HH:MM inputs are interpreted in this timezone.
|
||||
# timezone = "America/New_York"
|
||||
"#;
|
||||
|
||||
pub(super) const DEFAULT_AGENTS_TOML: &str = r#"[[agent]]
|
||||
name = "coder-1"
|
||||
stage = "coder"
|
||||
role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .huskies/README.md to understand the dev process. Follow the workflow through implementation and verification. The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop.\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits.\n\nIf `script/test` still contains the generic 'No tests configured' stub, update it to run the project's actual test suite before starting implementation."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Commit all your work before finishing. Do not accept stories, move them to archived, or merge to master."
|
||||
|
||||
[[agent]]
|
||||
name = "qa"
|
||||
stage = "qa"
|
||||
role = "Reviews coder work: runs quality gates, generates testing plans, and reports findings."
|
||||
model = "sonnet"
|
||||
max_turns = 40
|
||||
max_budget_usd = 4.00
|
||||
prompt = "You are the QA agent for story {{story_id}}. Review the coder's work and produce a structured QA report. Run quality gates (linting, tests), attempt a build, and generate a manual testing plan. Do NOT modify any code."
|
||||
system_prompt = "You are a QA agent. Your job is read-only: review code quality, run tests, and produce a structured QA report. Do not modify code."
|
||||
|
||||
[[agent]]
|
||||
name = "mergemaster"
|
||||
stage = "mergemaster"
|
||||
role = "Merges completed work into master, runs quality gates, and archives stories."
|
||||
model = "sonnet"
|
||||
max_turns = 30
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are the mergemaster agent for story {{story_id}}. Call merge_agent_work(story_id='{{story_id}}') to start the merge pipeline. Then poll get_merge_status(story_id='{{story_id}}') every 15 seconds until the status is 'completed' or 'failed'. Report the final result. If the merge fails, call report_merge_failure."
|
||||
system_prompt = "You are the mergemaster agent. Call merge_agent_work to start the merge, then poll get_merge_status every 15 seconds until done. Never manually move story files. Call report_merge_failure when merges fail."
|
||||
"#;
|
||||
Reference in New Issue
Block a user