Compare commits

...

13 Commits

Author SHA1 Message Date
Timmy fd7698f0e7 Bump version to 0.10.3 2026-04-16 18:08:23 +01:00
dave 4b710b02f2 huskies: merge 591_story_gateway_chat_commands_use_active_project_root_instead_of_gateway_config_dir 2026-04-16 16:14:05 +00:00
dave e734e80da5 huskies: merge 590_story_gateway_native_mcp_tools_return_json_rpc_responses_missing_request_id 2026-04-16 11:41:52 +00:00
dave 4ddf2a4367 fix: strip front matter from show command, display useful metadata inline
Strips the YAML front matter block and shows useful fields
(depends_on, agent, blocked, retries) as a summary line at the top.
Eliminates the duplicate title problem.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-16 09:01:54 +00:00
dave 2b95388efd fix: convert markdown headings to bold in show command for Matrix rendering
Element X doesn't style <h2> tags distinctly. Convert ## headings to
**bold** text with a blank line above for consistent rendering across
all Matrix clients.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-16 08:47:41 +00:00
dave 9f0274417d huskies: merge 579_bug_matrix_bot_messages_render_markdown_headings_without_line_breaks_or_formatting 2026-04-16 08:22:34 +00:00
dave df2f20a5e5 huskies: merge 589_story_wizard_auto_detects_project_components_and_configures_scripts_accordingly 2026-04-16 00:22:53 +00:00
dave 61502f51d9 huskies: merge 588_bug_wizard_generated_script_test_misses_frontend_tests_for_projects_with_a_frontend 2026-04-15 23:57:12 +00:00
dave 4553d7215a huskies: merge 586_bug_wizard_skips_context_and_stack_generation_when_files_already_exist_from_scaffold 2026-04-15 23:52:25 +00:00
dave 4a1c6b4cfa huskies: merge 585_bug_bot_not_aware_of_actual_running_port_defaults_to_3001 2026-04-15 23:47:37 +00:00
dave 2663c5f91f huskies: merge 583_bug_add_test_that_builds_gateway_route_tree_to_catch_duplicate_route_panics 2026-04-15 19:57:12 +00:00
dave 79ee19ca5b huskies: merge 587_bug_pipeline_db_not_in_default_gitignore_novice_users_will_commit_it 2026-04-15 19:49:46 +00:00
dave 871a18f821 huskies: merge 584_bug_bot_asks_user_to_run_huskies_init_instead_of_running_wizard_automatically 2026-04-15 19:28:58 +00:00
19 changed files with 1355 additions and 138 deletions
Generated
+13 -13
View File
@@ -366,9 +366,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "aws-lc-rs"
version = "1.16.2"
version = "1.16.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a054912289d18629dc78375ba2c3726a3afe3ff71b4edba9dedfca0e3446d1fc"
checksum = "0ec6fb3fe69024a75fa7e1bfb48aa6cf59706a101658ea01bfd33b2b248a038f"
dependencies = [
"aws-lc-sys",
"zeroize",
@@ -376,9 +376,9 @@ dependencies = [
[[package]]
name = "aws-lc-sys"
version = "0.39.1"
version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83a25cf98105baa966497416dbd42565ce3a8cf8dbfd59803ec9ad46f3126399"
checksum = "f50037ee5e1e41e7b8f9d161680a725bd1626cb6f8c7e901f91f942850852fe7"
dependencies = [
"cc",
"cmake",
@@ -2288,7 +2288,7 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "huskies"
version = "0.10.2"
version = "0.10.3"
dependencies = [
"async-stream",
"async-trait",
@@ -2802,9 +2802,9 @@ dependencies = [
[[package]]
name = "konst"
version = "0.3.16"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4381b9b00c55f251f2ebe9473aef7c117e96828def1a7cb3bd3f0f903c6894e9"
checksum = "97feab15b395d1860944abe6a8dd8ed9f8eadfae01750fada8427abda531d887"
dependencies = [
"const_panic",
"konst_kernel",
@@ -6465,9 +6465,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "uuid"
version = "1.23.0"
version = "1.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9"
checksum = "ddd74a9687298c6858e9b88ec8935ec45d22e8fd5e6394fa1bd4e99a87789c76"
dependencies = [
"getrandom 0.4.2",
"js-sys",
@@ -6770,18 +6770,18 @@ dependencies = [
[[package]]
name = "webpki-root-certs"
version = "1.0.6"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "804f18a4ac2676ffb4e8b5b5fa9ae38af06df08162314f96a68d2a363e21a8ca"
checksum = "f31141ce3fc3e300ae89b78c0dd67f9708061d1d2eda54b8209346fd6be9a92c"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "webpki-roots"
version = "1.0.6"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed"
checksum = "52f5ee44c96cf55f1b349600768e3ece3a8f26010c05265ab73f945bb1a2eb9d"
dependencies = [
"rustls-pki-types",
]
+2 -2
View File
@@ -1,12 +1,12 @@
{
"name": "huskies",
"version": "0.10.2",
"version": "0.10.3",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "huskies",
"version": "0.10.2",
"version": "0.10.3",
"dependencies": {
"@types/react-syntax-highlighter": "^15.5.13",
"react": "^19.1.0",
+1 -1
View File
@@ -1,7 +1,7 @@
{
"name": "huskies",
"private": true,
"version": "0.10.2",
"version": "0.10.3",
"type": "module",
"scripts": {
"dev": "vite",
+1 -1
View File
@@ -1,6 +1,6 @@
[package]
name = "huskies"
version = "0.10.2"
version = "0.10.3"
edition = "2024"
build = "build.rs"
+15 -5
View File
@@ -59,12 +59,17 @@ fn wizard_generate_reply(ctx: &CommandContext) -> String {
}
/// Compose a status reply for the `setup` command (no args).
///
/// If no wizard state exists, automatically initializes it so the user does
/// not need to run `huskies init` manually.
fn wizard_status_reply(ctx: &CommandContext) -> String {
if WizardState::load(ctx.project_root).is_none() {
WizardState::init_if_missing(ctx.project_root);
}
match WizardState::load(ctx.project_root) {
Some(state) => format_wizard_state(&state),
None => {
"No setup wizard active. Run `huskies init` in the project root to begin.".to_string()
}
None => "Unable to initialize setup wizard. Ensure the `.huskies/` directory exists."
.to_string(),
}
}
@@ -205,13 +210,18 @@ mod tests {
}
#[test]
fn setup_no_wizard_returns_helpful_message() {
fn setup_no_wizard_auto_initializes() {
let dir = TempDir::new().unwrap();
std::fs::create_dir_all(dir.path().join(".huskies")).unwrap();
let agents = Arc::new(crate::agents::AgentPool::new_test(4000));
let rooms = Arc::new(Mutex::new(HashSet::new()));
let ctx = make_ctx("", dir.path(), &agents, &rooms);
let result = handle_setup(&ctx).unwrap();
assert!(result.contains("huskies init"));
// Bot should auto-initialize and return wizard status, not ask user to run huskies init.
assert!(result.contains("Setup wizard"));
assert!(!result.contains("huskies init"));
// Wizard state file should now exist.
assert!(WizardState::load(dir.path()).is_some());
}
#[test]
+90 -2
View File
@@ -2,6 +2,65 @@
use super::CommandContext;
/// Strip YAML front matter and return a summary of useful fields + the remaining body.
fn strip_front_matter(text: &str) -> (String, String) {
let trimmed = text.trim_start();
if !trimmed.starts_with("---") {
return (String::new(), text.to_string());
}
// Find the closing ---
if let Some(end) = trimmed[3..].find("\n---") {
let yaml_block = &trimmed[3..3 + end].trim();
let body = &trimmed[3 + end + 4..]; // skip past closing ---
// Extract useful fields from YAML (simple line-based parsing)
let mut parts = Vec::new();
for line in yaml_block.lines() {
let line = line.trim();
if line.starts_with("depends_on:") {
let val = line.trim_start_matches("depends_on:").trim();
if !val.is_empty() && val != "[]" {
parts.push(format!("**Depends on:** {val}"));
}
} else if line.starts_with("agent:") {
let val = line.trim_start_matches("agent:").trim().trim_matches('"');
if !val.is_empty() {
parts.push(format!("**Agent:** {val}"));
}
} else if line.starts_with("blocked:") {
let val = line.trim_start_matches("blocked:").trim();
if val == "true" {
parts.push("**Blocked:** yes".to_string());
}
} else if line.starts_with("retry_count:") {
let val = line.trim_start_matches("retry_count:").trim();
if val != "0" && !val.is_empty() {
parts.push(format!("**Retries:** {val}"));
}
} else if line.starts_with("qa:") {
let val = line.trim_start_matches("qa:").trim().trim_matches('"');
if val == "human" {
parts.push("**QA:** human review required".to_string());
}
} else if line.starts_with("merge_failure:") {
let val = line
.trim_start_matches("merge_failure:")
.trim()
.trim_matches('"');
if !val.is_empty() {
parts.push(format!("**Merge failure:** {val}"));
}
}
}
(parts.join(" · "), body.to_string())
} else {
// No closing ---, return as-is
(String::new(), text.to_string())
}
}
/// Display the full markdown text of a work item identified by its numeric ID.
///
/// Lookup priority: CRDT → content store → filesystem (Story 512).
@@ -34,9 +93,38 @@ pub(super) fn handle_show(ctx: &CommandContext) -> Option<String> {
// `content` comes from the CRDT / content store. If unavailable, report
// it rather than silently reading a stale on-disk copy.
Some(content.unwrap_or_else(|| {
let text = content.unwrap_or_else(|| {
format!("Story {story_id} found in pipeline but its content is unavailable.")
}))
});
// Strip front matter block and extract useful metadata to show inline.
let (front_matter_summary, body) = strip_front_matter(&text);
// Convert markdown headings to bold text for consistent rendering across
// Matrix clients. Element X doesn't style <h2> tags distinctly, but bold
// text renders consistently everywhere.
let formatted = body
.lines()
.map(|line| {
let trimmed = line.trim_start();
if let Some(rest) = trimmed.strip_prefix("### ") {
format!("\n**{}**", rest)
} else if let Some(rest) = trimmed.strip_prefix("## ") {
format!("\n**{}**", rest)
} else if let Some(rest) = trimmed.strip_prefix("# ") {
format!("\n**{}**", rest)
} else {
line.to_string()
}
})
.collect::<Vec<_>>()
.join("\n");
if front_matter_summary.is_empty() {
Some(formatted.trim().to_string())
} else {
Some(format!("{front_matter_summary}\n{}", formatted.trim()))
}
}
#[cfg(test)]
@@ -67,6 +67,23 @@ pub struct BotContext {
pub gateway_projects: Vec<String>,
}
impl BotContext {
/// Resolve the effective project root for command dispatch.
///
/// In gateway mode the bot's `project_root` is the gateway config directory.
/// Each project lives in a subdirectory named after the project, so the
/// effective root for commands is `project_root / active_project_name`.
/// In standalone (single-project) mode this returns `project_root` unchanged.
pub async fn effective_project_root(&self) -> PathBuf {
if let Some(ref ap) = self.gateway_active_project {
let name = ap.read().await.clone();
self.project_root.join(&name)
} else {
self.project_root.clone()
}
}
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
@@ -88,6 +105,126 @@ mod tests {
assert_clone::<BotContext>();
}
#[tokio::test]
async fn effective_project_root_standalone_returns_project_root() {
// In standalone mode (gateway_active_project is None), the effective root
// must equal the project_root exactly.
let (_perm_tx, perm_rx) = mpsc::unbounded_channel();
let ctx = BotContext {
bot_user_id: make_user_id("@bot:example.com"),
target_room_ids: vec![],
project_root: PathBuf::from("/projects/myapp"),
allowed_users: vec![],
history: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
history_size: 20,
bot_sent_event_ids: Arc::new(TokioMutex::new(std::collections::HashSet::new())),
perm_rx: Arc::new(TokioMutex::new(perm_rx)),
pending_perm_replies: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
permission_timeout_secs: 120,
bot_name: "Assistant".to_string(),
ambient_rooms: Arc::new(std::sync::Mutex::new(std::collections::HashSet::new())),
agents: Arc::new(crate::agents::AgentPool::new_test(3000)),
htop_sessions: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
transport: Arc::new(crate::chat::transport::whatsapp::WhatsAppTransport::new(
"test-phone".to_string(),
"test-token".to_string(),
"pipeline_notification".to_string(),
)),
timer_store: Arc::new(crate::chat::timer::TimerStore::load(
std::path::PathBuf::from("/tmp/timers.json"),
)),
gateway_active_project: None,
gateway_projects: vec![],
};
assert_eq!(
ctx.effective_project_root().await,
PathBuf::from("/projects/myapp")
);
}
#[tokio::test]
async fn effective_project_root_gateway_uses_active_project_subdir() {
// In gateway mode, the effective root must be config_dir / active_project_name.
let (_perm_tx, perm_rx) = mpsc::unbounded_channel();
let active = Arc::new(RwLock::new("huskies".to_string()));
let ctx = BotContext {
bot_user_id: make_user_id("@bot:example.com"),
target_room_ids: vec![],
project_root: PathBuf::from("/gateway"),
allowed_users: vec![],
history: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
history_size: 20,
bot_sent_event_ids: Arc::new(TokioMutex::new(std::collections::HashSet::new())),
perm_rx: Arc::new(TokioMutex::new(perm_rx)),
pending_perm_replies: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
permission_timeout_secs: 120,
bot_name: "Assistant".to_string(),
ambient_rooms: Arc::new(std::sync::Mutex::new(std::collections::HashSet::new())),
agents: Arc::new(crate::agents::AgentPool::new_test(3000)),
htop_sessions: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
transport: Arc::new(crate::chat::transport::whatsapp::WhatsAppTransport::new(
"test-phone".to_string(),
"test-token".to_string(),
"pipeline_notification".to_string(),
)),
timer_store: Arc::new(crate::chat::timer::TimerStore::load(
std::path::PathBuf::from("/tmp/timers.json"),
)),
gateway_active_project: Some(Arc::clone(&active)),
gateway_projects: vec!["huskies".into(), "robot-studio".into()],
};
assert_eq!(
ctx.effective_project_root().await,
PathBuf::from("/gateway/huskies")
);
}
#[tokio::test]
async fn effective_project_root_gateway_reflects_project_switch() {
// Switching the active project must change the effective root.
let (_perm_tx, perm_rx) = mpsc::unbounded_channel();
let active = Arc::new(RwLock::new("huskies".to_string()));
let ctx = BotContext {
bot_user_id: make_user_id("@bot:example.com"),
target_room_ids: vec![],
project_root: PathBuf::from("/gateway"),
allowed_users: vec![],
history: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
history_size: 20,
bot_sent_event_ids: Arc::new(TokioMutex::new(std::collections::HashSet::new())),
perm_rx: Arc::new(TokioMutex::new(perm_rx)),
pending_perm_replies: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
permission_timeout_secs: 120,
bot_name: "Assistant".to_string(),
ambient_rooms: Arc::new(std::sync::Mutex::new(std::collections::HashSet::new())),
agents: Arc::new(crate::agents::AgentPool::new_test(3000)),
htop_sessions: Arc::new(TokioMutex::new(std::collections::HashMap::new())),
transport: Arc::new(crate::chat::transport::whatsapp::WhatsAppTransport::new(
"test-phone".to_string(),
"test-token".to_string(),
"pipeline_notification".to_string(),
)),
timer_store: Arc::new(crate::chat::timer::TimerStore::load(
std::path::PathBuf::from("/tmp/timers.json"),
)),
gateway_active_project: Some(Arc::clone(&active)),
gateway_projects: vec!["huskies".into(), "robot-studio".into()],
};
assert_eq!(
ctx.effective_project_root().await,
PathBuf::from("/gateway/huskies")
);
// Simulate switch_project changing the active project.
*active.write().await = "robot-studio".to_string();
assert_eq!(
ctx.effective_project_root().await,
PathBuf::from("/gateway/robot-studio")
);
}
#[test]
fn bot_context_has_no_require_verified_devices_field() {
// Verification is always on — BotContext no longer has a toggle field.
@@ -96,6 +96,49 @@ mod tests {
);
}
#[test]
fn markdown_to_html_heading_renders_as_h_tag() {
let html = markdown_to_html("## Section\nContent here.");
assert!(
html.contains("<h2>Section</h2>"),
"expected <h2> heading tag: {html}"
);
assert!(
html.contains("<p>Content here.</p>"),
"expected paragraph after heading: {html}"
);
}
#[test]
fn markdown_to_html_heading_with_preceding_prose_renders_correctly() {
let html = markdown_to_html("Intro text.\n## Section\nBody.");
assert!(
html.contains("<h2>Section</h2>"),
"expected <h2> heading tag: {html}"
);
assert!(
html.contains("<p>Intro text.</p>"),
"expected intro paragraph: {html}"
);
assert!(
html.contains("<p>Body.</p>"),
"expected body paragraph: {html}"
);
}
#[test]
fn markdown_to_html_multiple_headings_each_render_as_h_tags() {
let html = markdown_to_html("## Section 1\nContent one.\n\n## Section 2\nContent two.");
assert!(
html.contains("<h2>Section 1</h2>"),
"expected first <h2>: {html}"
);
assert!(
html.contains("<h2>Section 2</h2>"),
"expected second <h2>: {html}"
);
}
#[test]
fn startup_announcement_uses_bot_name() {
assert_eq!(format_startup_announcement("Timmy"), "Timmy is online.");
@@ -174,13 +174,18 @@ pub(super) async fn on_room_message(
let user_message = body;
slog!("[matrix-bot] Message from {sender}: {user_message}");
// In gateway mode, resolve commands against the active project's root directory.
// The gateway's own project_root is the gateway config dir; each project lives in
// a subdirectory named after the project. Standalone mode is unaffected.
let effective_root = ctx.effective_project_root().await;
// Check for bot-level commands (help, status, ambient, …) before invoking
// the LLM. All commands are registered in commands.rs — no special-casing
// needed here.
let dispatch = super::super::commands::CommandDispatch {
bot_name: &ctx.bot_name,
bot_user_id: ctx.bot_user_id.as_str(),
project_root: &ctx.project_root,
project_root: &effective_root,
agents: &ctx.agents,
ambient_rooms: &ctx.ambient_rooms,
room_id: &room_id_str,
@@ -219,7 +224,7 @@ pub(super) async fn on_room_message(
&ctx.bot_name,
&story_number,
&model,
&ctx.project_root,
&effective_root,
&ctx.agents,
)
.await
@@ -287,7 +292,7 @@ pub(super) async fn on_room_message(
super::super::delete::handle_delete(
&ctx.bot_name,
&story_number,
&ctx.project_root,
&effective_root,
&ctx.agents,
)
.await
@@ -321,7 +326,7 @@ pub(super) async fn on_room_message(
super::super::rmtree::handle_rmtree(
&ctx.bot_name,
&story_number,
&ctx.project_root,
&effective_root,
&ctx.agents,
)
.await
@@ -361,7 +366,7 @@ pub(super) async fn on_room_message(
&ctx.bot_name,
&story_number,
agent_hint.as_deref(),
&ctx.project_root,
&effective_root,
&ctx.agents,
)
.await
@@ -587,7 +592,12 @@ pub(super) async fn handle_message(
let sent_any_chunk = Arc::new(AtomicBool::new(false));
let sent_any_chunk_for_callback = Arc::clone(&sent_any_chunk);
let project_root_str = ctx.project_root.to_string_lossy().to_string();
// In gateway mode, run Claude Code in the active project's directory.
let project_root_str = ctx
.effective_project_root()
.await
.to_string_lossy()
.to_string();
let chat_fut = provider.chat_stream(
&prompt,
&project_root_str,
+50 -6
View File
@@ -223,12 +223,24 @@ pub fn normalize_line_breaks(text: &str) -> String {
let prev_line = lines[i - 1];
// Insert a blank separator when both the current and previous lines
// are non-empty prose (not inside a code fence, not structured Markdown).
// ATX headings (lines starting with one or more `#` characters) always
// need a blank line before and after them so that Matrix clients render
// the heading with visual separation. Without a blank line, a single
// newline between a heading and adjacent text is swallowed by many
// Matrix clients (including Element X), joining the heading text and
// the following content on the same line without any heading formatting.
let is_cur_heading = line.trim_start().starts_with('#');
let is_prev_heading = prev_line.trim_start().starts_with('#');
// Insert a blank separator when:
// 1. Both lines are non-empty prose (standard prose-to-prose rule).
// 2. The current line is an ATX heading (adds blank line *before* it).
// 3. The previous line was an ATX heading (adds blank line *after* it).
let should_double = !line.is_empty()
&& !prev_line.is_empty()
&& !is_structured_line(line)
&& !is_structured_line(prev_line);
&& ((!is_structured_line(line) && !is_structured_line(prev_line))
|| is_cur_heading
|| is_prev_heading);
if should_double {
result.push("");
@@ -599,10 +611,42 @@ mod tests {
}
#[test]
fn normalize_heading_single_newline_preserved() {
fn normalize_heading_followed_by_prose_gets_blank_line() {
// A blank line must be inserted after a heading so Matrix clients render
// the heading with visual separation from the following paragraph.
let input = "# My Heading\nSome text below.";
let output = normalize_line_breaks(input);
assert_eq!(output, "# My Heading\nSome text below.");
assert_eq!(output, "# My Heading\n\nSome text below.");
}
#[test]
fn normalize_prose_before_heading_gets_blank_line() {
// A blank line must be inserted before a heading when prose precedes it.
let input = "Some intro text.\n## Section";
let output = normalize_line_breaks(input);
assert_eq!(output, "Some intro text.\n\n## Section");
}
#[test]
fn normalize_heading_surrounded_by_prose_gets_blank_lines_both_sides() {
let input = "Intro.\n## Heading\nContent.";
let output = normalize_line_breaks(input);
assert_eq!(output, "Intro.\n\n## Heading\n\nContent.");
}
#[test]
fn normalize_consecutive_headings_separated_by_blank_lines() {
let input = "## Section 1\n## Section 2";
let output = normalize_line_breaks(input);
assert_eq!(output, "## Section 1\n\n## Section 2");
}
#[test]
fn normalize_heading_already_separated_by_blank_line_unchanged() {
// When there is already a blank line, no extra blank is inserted.
let input = "# Heading\n\nContent.";
let output = normalize_line_breaks(input);
assert_eq!(output, "# Heading\n\nContent.");
}
#[test]
+90 -64
View File
@@ -320,7 +320,9 @@ pub async fn gateway_mcp_post_handler(
.unwrap_or("");
if GATEWAY_TOOLS.contains(&tool_name) {
to_json_response(handle_gateway_tool(tool_name, &rpc.params, &state).await)
to_json_response(
handle_gateway_tool(tool_name, &rpc.params, &state, rpc.id.clone()).await,
)
} else {
// Proxy to active project's container.
match proxy_mcp_call(&state, &bytes).await {
@@ -482,18 +484,22 @@ async fn handle_gateway_tool(
tool_name: &str,
params: &Value,
state: &GatewayState,
id: Option<Value>,
) -> JsonRpcResponse {
let id = None; // The caller wraps this in a proper response.
match tool_name {
"switch_project" => handle_switch_project(params, state).await,
"gateway_status" => handle_gateway_status(state).await,
"gateway_health" => handle_gateway_health(state).await,
"switch_project" => handle_switch_project(params, state, id).await,
"gateway_status" => handle_gateway_status(state, id).await,
"gateway_health" => handle_gateway_health(state, id).await,
_ => JsonRpcResponse::error(id, -32601, format!("Unknown gateway tool: {tool_name}")),
}
}
/// Switch the active project.
async fn handle_switch_project(params: &Value, state: &GatewayState) -> JsonRpcResponse {
async fn handle_switch_project(
params: &Value,
state: &GatewayState,
id: Option<Value>,
) -> JsonRpcResponse {
let project = params
.get("arguments")
.and_then(|a| a.get("project"))
@@ -502,7 +508,7 @@ async fn handle_switch_project(params: &Value, state: &GatewayState) -> JsonRpcR
.unwrap_or("");
if project.is_empty() {
return JsonRpcResponse::error(None, -32602, "missing required parameter: project".into());
return JsonRpcResponse::error(id, -32602, "missing required parameter: project".into());
}
let url = {
@@ -510,7 +516,7 @@ async fn handle_switch_project(params: &Value, state: &GatewayState) -> JsonRpcR
if !projects.contains_key(project) {
let available: Vec<&str> = projects.keys().map(|s| s.as_str()).collect();
return JsonRpcResponse::error(
None,
id,
-32602,
format!(
"unknown project '{project}'. Available: {}",
@@ -524,7 +530,7 @@ async fn handle_switch_project(params: &Value, state: &GatewayState) -> JsonRpcR
*state.active_project.write().await = project.to_string();
JsonRpcResponse::success(
None,
id,
json!({
"content": [{
"type": "text",
@@ -535,11 +541,11 @@ async fn handle_switch_project(params: &Value, state: &GatewayState) -> JsonRpcR
}
/// Show pipeline status for the active project by proxying `get_pipeline_status`.
async fn handle_gateway_status(state: &GatewayState) -> JsonRpcResponse {
async fn handle_gateway_status(state: &GatewayState, id: Option<Value>) -> JsonRpcResponse {
let active = state.active_project.read().await.clone();
let url = match state.active_url().await {
Ok(u) => u,
Err(e) => return JsonRpcResponse::error(None, -32603, e),
Err(e) => return JsonRpcResponse::error(id.clone(), -32603, e),
};
let mcp_url = format!("{}/mcp", url.trim_end_matches('/'));
@@ -560,7 +566,7 @@ async fn handle_gateway_status(state: &GatewayState) -> JsonRpcResponse {
// Extract the result from the upstream response and wrap it.
let pipeline = upstream.get("result").cloned().unwrap_or(json!(null));
JsonRpcResponse::success(
None,
id,
json!({
"content": [{
"type": "text",
@@ -573,16 +579,16 @@ async fn handle_gateway_status(state: &GatewayState) -> JsonRpcResponse {
)
}
Err(e) => {
JsonRpcResponse::error(None, -32603, format!("invalid upstream response: {e}"))
JsonRpcResponse::error(id, -32603, format!("invalid upstream response: {e}"))
}
}
}
Err(e) => JsonRpcResponse::error(None, -32603, format!("failed to reach {mcp_url}: {e}")),
Err(e) => JsonRpcResponse::error(id, -32603, format!("failed to reach {mcp_url}: {e}")),
}
}
/// Aggregate health checks across all registered projects.
async fn handle_gateway_health(state: &GatewayState) -> JsonRpcResponse {
async fn handle_gateway_health(state: &GatewayState, id: Option<Value>) -> JsonRpcResponse {
let mut results = BTreeMap::new();
let project_entries: Vec<(String, String)> = state
@@ -609,7 +615,7 @@ async fn handle_gateway_health(state: &GatewayState) -> JsonRpcResponse {
let active = state.active_project.read().await.clone();
JsonRpcResponse::success(
None,
id,
json!({
"content": [{
"type": "text",
@@ -1104,7 +1110,7 @@ pub async fn gateway_switch_handler(
body: Json<SwitchRequest>,
) -> Response {
let params = json!({ "arguments": { "project": body.project } });
let resp = handle_switch_project(&params, &state).await;
let resp = handle_switch_project(&params, &state, None).await;
let (ok, error) = if resp.result.is_some() {
(true, None)
@@ -1634,50 +1640,12 @@ pub async fn gateway_bot_config_page_handler() -> Response {
// ── Gateway server startup ───────────────────────────────────────────
/// Start the gateway HTTP server. This is the entry point when `--gateway` is used.
pub async fn run(config_path: &Path, port: u16) -> Result<(), std::io::Error> {
// Locate the gateway config directory (parent of `projects.toml`).
let config_dir = config_path
.parent()
.unwrap_or(std::path::Path::new("."))
.to_path_buf();
let config = GatewayConfig::load(config_path).map_err(std::io::Error::other)?;
let state =
GatewayState::new(config, config_dir.clone(), port).map_err(std::io::Error::other)?;
let state_arc = Arc::new(state);
let active = state_arc.active_project.read().await.clone();
crate::slog!("[gateway] Starting gateway on port {port}, active project: {active}");
crate::slog!(
"[gateway] Registered projects: {}",
state_arc
.projects
.read()
.await
.keys()
.cloned()
.collect::<Vec<_>>()
.join(", ")
);
// Write `.mcp.json` so that the gateway's Matrix bot's Claude Code CLI
// connects to this gateway's MCP endpoint (which proxies to the active project).
if let Err(e) = write_gateway_mcp_json(&config_dir, port) {
crate::slog!("[gateway] Warning: could not write .mcp.json: {e}");
}
// Spawn the Matrix bot if `.huskies/bot.toml` exists in the config directory.
let gateway_projects: Vec<String> = state_arc.projects.read().await.keys().cloned().collect();
let bot_abort = spawn_gateway_bot(
&config_dir,
Arc::clone(&state_arc.active_project),
gateway_projects,
port,
);
*state_arc.bot_handle.lock().await = bot_abort;
let route = poem::Route::new()
/// Build the complete gateway route tree.
///
/// Extracted from `run` so that tests can construct the full route tree and
/// catch duplicate-route panics before they reach production.
pub fn build_gateway_route(state_arc: Arc<GatewayState>) -> impl poem::Endpoint {
poem::Route::new()
.at("/bot-config", poem::get(gateway_bot_config_page_handler))
.at("/api/gateway", poem::get(gateway_api_handler))
.at("/api/gateway/switch", poem::post(gateway_switch_handler))
@@ -1732,7 +1700,53 @@ pub async fn run(config_path: &Path, port: u16) -> Result<(), std::io::Error> {
)
.at("/*path", poem::get(crate::http::assets::embedded_file))
.at("/", poem::get(crate::http::assets::embedded_index))
.data(state_arc);
.data(state_arc)
}
/// Start the gateway HTTP server. This is the entry point when `--gateway` is used.
pub async fn run(config_path: &Path, port: u16) -> Result<(), std::io::Error> {
// Locate the gateway config directory (parent of `projects.toml`).
let config_dir = config_path
.parent()
.unwrap_or(std::path::Path::new("."))
.to_path_buf();
let config = GatewayConfig::load(config_path).map_err(std::io::Error::other)?;
let state =
GatewayState::new(config, config_dir.clone(), port).map_err(std::io::Error::other)?;
let state_arc = Arc::new(state);
let active = state_arc.active_project.read().await.clone();
crate::slog!("[gateway] Starting gateway on port {port}, active project: {active}");
crate::slog!(
"[gateway] Registered projects: {}",
state_arc
.projects
.read()
.await
.keys()
.cloned()
.collect::<Vec<_>>()
.join(", ")
);
// Write `.mcp.json` so that the gateway's Matrix bot's Claude Code CLI
// connects to this gateway's MCP endpoint (which proxies to the active project).
if let Err(e) = write_gateway_mcp_json(&config_dir, port) {
crate::slog!("[gateway] Warning: could not write .mcp.json: {e}");
}
// Spawn the Matrix bot if `.huskies/bot.toml` exists in the config directory.
let gateway_projects: Vec<String> = state_arc.projects.read().await.keys().cloned().collect();
let bot_abort = spawn_gateway_bot(
&config_dir,
Arc::clone(&state_arc.active_project),
gateway_projects,
port,
);
*state_arc.bot_handle.lock().await = bot_abort;
let route = build_gateway_route(state_arc);
let host = std::env::var("HUSKIES_HOST").unwrap_or_else(|_| "127.0.0.1".to_string());
let addr = format!("{host}:{port}");
@@ -1899,7 +1913,7 @@ url = "http://localhost:3002"
let state = GatewayState::new(config, PathBuf::from("."), 3000).unwrap();
let params = json!({ "arguments": { "project": "beta" } });
let resp = handle_switch_project(&params, &state).await;
let resp = handle_switch_project(&params, &state, None).await;
assert!(resp.result.is_some());
let active = state.active_project.read().await.clone();
@@ -1919,7 +1933,7 @@ url = "http://localhost:3002"
let state = GatewayState::new(config, PathBuf::from("."), 3000).unwrap();
let params = json!({ "arguments": { "project": "nonexistent" } });
let resp = handle_switch_project(&params, &state).await;
let resp = handle_switch_project(&params, &state, None).await;
assert!(resp.error.is_some());
}
@@ -2260,4 +2274,16 @@ enabled = false
.await;
assert_eq!(resp.0.status(), StatusCode::NOT_FOUND);
}
/// Build the full gateway route tree and verify it does not panic.
///
/// Poem panics at construction time when duplicate routes are registered.
/// This test catches any regression where a duplicate route is re-introduced
/// (e.g. the `/` vs `/*path` duplicate fixed in commit 0969fb5d).
#[test]
fn gateway_route_tree_builds_without_panic() {
let state = make_test_state();
// build_gateway_route will panic if any route is registered more than once.
let _route = build_gateway_route(state);
}
}
+3 -2
View File
@@ -349,13 +349,14 @@ pub(super) fn tool_dump_crdt(args: &Value) -> Result<String, String> {
.map_err(|e| format!("Serialization error: {e}"))
}
/// MCP tool: return the server version and build hash.
pub(super) fn tool_get_version() -> Result<String, String> {
/// MCP tool: return the server version, build hash, and running port.
pub(super) fn tool_get_version(ctx: &AppContext) -> Result<String, String> {
let build_hash =
std::fs::read_to_string(".huskies/build_hash").unwrap_or_else(|_| "unknown".to_string());
serde_json::to_string_pretty(&json!({
"version": env!("CARGO_PKG_VERSION"),
"build_hash": build_hash.trim(),
"port": ctx.agents.port(),
}))
.map_err(|e| format!("Serialization error: {e}"))
}
+2 -2
View File
@@ -897,7 +897,7 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "get_version",
"description": "Return the server version and build hash.",
"description": "Return the server version, build hash, and running port.",
"inputSchema": {
"type": "object",
"properties": {}
@@ -1330,7 +1330,7 @@ async fn handle_tools_call(id: Option<Value>, params: &Value, ctx: &AppContext)
"get_pipeline_status" => story_tools::tool_get_pipeline_status(ctx),
// Diagnostics
"get_server_logs" => diagnostics::tool_get_server_logs(&args),
"get_version" => diagnostics::tool_get_version(),
"get_version" => diagnostics::tool_get_version(ctx),
// Server lifecycle
"rebuild_and_restart" => diagnostics::tool_rebuild_and_restart(ctx).await,
// Permission bridge (Claude Code → frontend dialog)
+194 -10
View File
@@ -43,6 +43,8 @@ pub(crate) fn step_output_path(
.join("STACK.md"),
),
WizardStep::TestScript => Some(project_root.join("script").join("test")),
WizardStep::BuildScript => Some(project_root.join("script").join("build")),
WizardStep::LintScript => Some(project_root.join("script").join("lint")),
WizardStep::ReleaseScript => Some(project_root.join("script").join("release")),
WizardStep::TestCoverage => Some(project_root.join("script").join("test_coverage")),
WizardStep::Scaffold => None,
@@ -52,22 +54,35 @@ pub(crate) fn step_output_path(
pub(crate) fn is_script_step(step: WizardStep) -> bool {
matches!(
step,
WizardStep::TestScript | WizardStep::ReleaseScript | WizardStep::TestCoverage
WizardStep::TestScript
| WizardStep::BuildScript
| WizardStep::LintScript
| WizardStep::ReleaseScript
| WizardStep::TestCoverage
)
}
/// Write `content` to `path` only when the file does not already exist.
/// Write `content` to `path`, skipping if the file already exists with real
/// (non-template) content.
///
/// Existing files (including `CLAUDE.md`) are never overwritten — the wizard
/// appends or skips per the acceptance criteria. For script steps the file is
/// also made executable after writing.
/// Scaffold template files (those containing [`TEMPLATE_SENTINEL`]) are treated
/// as placeholders and will be overwritten with the wizard-generated content.
/// Files with real user content are never overwritten. For script steps the
/// file is also made executable after writing.
pub(crate) fn write_if_missing(
path: &Path,
content: &str,
executable: bool,
) -> Result<bool, String> {
use crate::io::onboarding::TEMPLATE_SENTINEL;
if path.exists() {
return Ok(false); // already present — skip silently
// Overwrite scaffold template placeholders; preserve real user content.
let is_template = std::fs::read_to_string(path)
.map(|s| s.contains(TEMPLATE_SENTINEL))
.unwrap_or(false);
if !is_template {
return Ok(false); // real content already present — skip
}
}
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
@@ -247,6 +262,90 @@ pub(crate) fn generation_hint(step: WizardStep, project_root: &Path) -> String {
}
}
}
WizardStep::BuildScript => {
if bare {
"This is a bare project with no existing code. Read the STACK.md generated \
in the previous step (or ask the user about their stack if it was skipped) \
and generate a `script/build` shell script (#!/usr/bin/env bash, set -euo pipefail) \
with appropriate build commands for their chosen language and framework."
.to_string()
} else {
let has_cargo = project_root.join("Cargo.toml").exists();
let has_pkg = project_root.join("package.json").exists();
let has_pnpm = project_root.join("pnpm-lock.yaml").exists();
let has_frontend_subdir =
project_root.join("frontend").join("package.json").exists()
|| project_root.join("client").join("package.json").exists();
let has_go = project_root.join("go.mod").exists();
let mut cmds = Vec::new();
if has_cargo {
cmds.push("cargo build --release");
}
if has_pkg {
cmds.push(if has_pnpm {
"pnpm run build"
} else {
"npm run build"
});
}
if has_frontend_subdir {
cmds.push("(cd frontend && npm run build)");
}
if has_go {
cmds.push("go build ./...");
}
if cmds.is_empty() {
"Generate a `script/build` shell script (#!/usr/bin/env bash, set -euo pipefail) that builds the project.".to_string()
} else {
format!(
"Generate a `script/build` shell script (#!/usr/bin/env bash, set -euo pipefail) that runs: {}",
cmds.join(", ")
)
}
}
}
WizardStep::LintScript => {
if bare {
"This is a bare project with no existing code. Read the STACK.md generated \
in the previous step (or ask the user about their stack if it was skipped) \
and generate a `script/lint` shell script (#!/usr/bin/env bash, set -euo pipefail) \
with appropriate lint commands for their chosen language and framework."
.to_string()
} else {
let has_cargo = project_root.join("Cargo.toml").exists();
let has_pkg = project_root.join("package.json").exists();
let has_pnpm = project_root.join("pnpm-lock.yaml").exists();
let has_python = project_root.join("pyproject.toml").exists()
|| project_root.join("requirements.txt").exists();
let has_go = project_root.join("go.mod").exists();
let mut cmds = Vec::new();
if has_cargo {
cmds.push("cargo fmt --all --check");
cmds.push("cargo clippy -- -D warnings");
}
if has_pkg {
cmds.push(if has_pnpm {
"pnpm run lint"
} else {
"npm run lint"
});
}
if has_python {
cmds.push("flake8 . (or ruff check . if ruff is configured)");
}
if has_go {
cmds.push("go vet ./...");
}
if cmds.is_empty() {
"Generate a `script/lint` shell script (#!/usr/bin/env bash, set -euo pipefail) that runs the project's linters.".to_string()
} else {
format!(
"Generate a `script/lint` shell script (#!/usr/bin/env bash, set -euo pipefail) that runs: {}",
cmds.join(", ")
)
}
}
}
WizardStep::ReleaseScript => {
if bare {
"This is a bare project with no existing code. Read the STACK.md generated \
@@ -473,13 +572,13 @@ mod tests {
fn wizard_confirm_does_not_overwrite_existing_file() {
let dir = TempDir::new().unwrap();
let ctx = setup(&dir);
// Pre-create the specs directory and file.
// Pre-create the specs directory and file with real (non-template) content.
let specs_dir = dir.path().join(".huskies").join("specs");
std::fs::create_dir_all(&specs_dir).unwrap();
let context_path = specs_dir.join("00_CONTEXT.md");
std::fs::write(&context_path, "original content").unwrap();
// Stage and confirm — existing file should NOT be overwritten.
// Stage and confirm — existing real file should NOT be overwritten.
tool_wizard_generate(&serde_json::json!({"content": "new content"}), &ctx).unwrap();
let result = tool_wizard_confirm(&ctx).unwrap();
assert!(result.contains("already exists"));
@@ -489,6 +588,34 @@ mod tests {
);
}
#[test]
fn wizard_confirm_overwrites_scaffold_template_file() {
let dir = TempDir::new().unwrap();
let ctx = setup(&dir);
// Pre-create the file with scaffold template placeholder content.
let specs_dir = dir.path().join(".huskies").join("specs");
std::fs::create_dir_all(&specs_dir).unwrap();
let context_path = specs_dir.join("00_CONTEXT.md");
std::fs::write(
&context_path,
"<!-- huskies:scaffold-template -->\n# Project Context\n\nTODO: Describe...",
)
.unwrap();
// Stage and confirm — template placeholder should be overwritten with generated content.
tool_wizard_generate(
&serde_json::json!({"content": "# My Real Project\n\nThis is a real project."}),
&ctx,
)
.unwrap();
let result = tool_wizard_confirm(&ctx).unwrap();
assert!(result.contains("confirmed"));
assert_eq!(
std::fs::read_to_string(&context_path).unwrap(),
"# My Real Project\n\nThis is a real project."
);
}
#[test]
fn wizard_skip_advances_wizard() {
let dir = TempDir::new().unwrap();
@@ -517,8 +644,8 @@ mod tests {
fn wizard_complete_returns_done_message() {
let dir = TempDir::new().unwrap();
let ctx = setup(&dir);
// Skip all remaining steps.
for _ in 0..5 {
// Skip all remaining steps (scaffold is pre-confirmed, so 7 remaining).
for _ in 0..7 {
tool_wizard_skip(&ctx).unwrap();
}
let result = tool_wizard_status(&ctx).unwrap();
@@ -629,4 +756,61 @@ mod tests {
assert!(hint.contains("cargo nextest"));
assert!(!hint.contains("bare project"));
}
#[test]
fn generation_hint_bare_build_script_references_stack() {
let dir = TempDir::new().unwrap();
std::fs::create_dir_all(dir.path().join(".huskies")).unwrap();
let hint = generation_hint(WizardStep::BuildScript, dir.path());
assert!(hint.contains("bare project"));
assert!(hint.contains("STACK.md"));
}
#[test]
fn generation_hint_bare_lint_script_references_stack() {
let dir = TempDir::new().unwrap();
std::fs::create_dir_all(dir.path().join(".huskies")).unwrap();
let hint = generation_hint(WizardStep::LintScript, dir.path());
assert!(hint.contains("bare project"));
assert!(hint.contains("STACK.md"));
}
#[test]
fn generation_hint_existing_project_build_script_detects_cargo() {
let dir = TempDir::new().unwrap();
std::fs::write(dir.path().join("Cargo.toml"), "[package]").unwrap();
let hint = generation_hint(WizardStep::BuildScript, dir.path());
assert!(hint.contains("cargo build --release"));
assert!(!hint.contains("bare project"));
}
#[test]
fn generation_hint_existing_project_lint_script_detects_cargo() {
let dir = TempDir::new().unwrap();
std::fs::write(dir.path().join("Cargo.toml"), "[package]").unwrap();
let hint = generation_hint(WizardStep::LintScript, dir.path());
assert!(hint.contains("cargo fmt --all --check"));
assert!(hint.contains("cargo clippy -- -D warnings"));
assert!(!hint.contains("bare project"));
}
#[test]
fn step_output_path_build_script_returns_script_build() {
let dir = TempDir::new().unwrap();
let path = step_output_path(dir.path(), WizardStep::BuildScript).unwrap();
assert!(path.ends_with("script/build"));
}
#[test]
fn step_output_path_lint_script_returns_script_lint() {
let dir = TempDir::new().unwrap();
let path = step_output_path(dir.path(), WizardStep::LintScript).unwrap();
assert!(path.ends_with("script/lint"));
}
#[test]
fn is_script_step_includes_build_and_lint() {
assert!(is_script_step(WizardStep::BuildScript));
assert!(is_script_step(WizardStep::LintScript));
}
}
+4 -2
View File
@@ -195,7 +195,7 @@ mod tests {
let body: serde_json::Value = resp.0.into_body().into_json().await.unwrap();
assert_eq!(body["current_step_index"], 1);
assert!(!body["completed"].as_bool().unwrap());
assert_eq!(body["steps"].as_array().unwrap().len(), 6);
assert_eq!(body["steps"].as_array().unwrap().len(), 8);
assert_eq!(body["steps"][0]["status"], "confirmed");
}
@@ -279,11 +279,13 @@ mod tests {
let (dir, client) = setup();
WizardState::init_if_missing(dir.path());
// Steps 2-6 (scaffold is already confirmed)
// Steps 2-8 (scaffold is already confirmed)
let steps = [
"context",
"stack",
"test_script",
"build_script",
"lint_script",
"release_script",
"test_coverage",
];
+21 -11
View File
@@ -37,6 +37,13 @@ pub(crate) async fn ensure_project_root_with_story_kit(
if !path.join(".huskies").is_dir() {
scaffold_story_kit(&path, port)?;
}
// Always update .mcp.json with the current port so the bot connects to
// the right endpoint even when HUSKIES_PORT changes between restarts.
let mcp_content = format!(
"{{\n \"mcpServers\": {{\n \"huskies\": {{\n \"type\": \"http\",\n \"url\": \"http://localhost:{port}/mcp\"\n }}\n }}\n}}\n"
);
fs::write(path.join(".mcp.json"), mcp_content)
.map_err(|e| format!("Failed to write .mcp.json: {}", e))?;
Ok(())
})
.await
@@ -194,16 +201,15 @@ mod tests {
}
#[tokio::test]
async fn open_project_does_not_overwrite_existing_mcp_json() {
// scaffold must NOT overwrite .mcp.json when it already exists — QA
// test servers share the real project root, and re-writing would
// clobber the file with the wrong port.
async fn open_project_updates_mcp_json_with_current_port() {
// .mcp.json must always be updated with the actual running port so the
// bot connects to the right MCP endpoint even when HUSKIES_PORT changes.
let dir = tempdir().unwrap();
let project_dir = dir.path().join("myproject");
fs::create_dir_all(&project_dir).unwrap();
// Pre-write .mcp.json with a different port to simulate an already-configured project.
// Pre-write .mcp.json with a different port to simulate a stale file.
let mcp_path = project_dir.join(".mcp.json");
fs::write(&mcp_path, "{\"existing\": true}").unwrap();
fs::write(&mcp_path, "{\"stale\": true}").unwrap();
let store = make_store(&dir);
let state = SessionState::default();
@@ -211,15 +217,19 @@ mod tests {
project_dir.to_string_lossy().to_string(),
&state,
&store,
3001,
3002,
)
.await
.unwrap();
assert_eq!(
fs::read_to_string(&mcp_path).unwrap(),
"{\"existing\": true}",
"open_project must not overwrite an existing .mcp.json"
let content = fs::read_to_string(&mcp_path).unwrap();
assert!(
content.contains("3002"),
"open_project must update .mcp.json with the actual running port"
);
assert!(
content.contains("localhost"),
"mcp.json must reference localhost"
);
}
+661 -7
View File
@@ -199,33 +199,202 @@ pub fn detect_components_toml(root: &Path) -> String {
sections.join("\n")
}
/// Detect the appropriate Node.js test command for a directory containing `package.json`.
///
/// Reads the `package.json` content to identify known test runners (vitest, jest).
/// Falls back to `npm test` or `pnpm test` based on which lock file is present.
fn detect_node_test_cmd(pkg_dir: &Path) -> String {
let has_pnpm = pkg_dir.join("pnpm-lock.yaml").exists();
let content = std::fs::read_to_string(pkg_dir.join("package.json")).unwrap_or_default();
if content.contains("\"vitest\"") {
let pm = if has_pnpm { "pnpm" } else { "npx" };
return format!("{} vitest run", pm);
}
if content.contains("\"jest\"") {
let pm = if has_pnpm { "pnpm" } else { "npx" };
return format!("{} jest", pm);
}
if has_pnpm {
"pnpm test".to_string()
} else {
"npm test".to_string()
}
}
/// Detect the appropriate Node.js build command for a directory containing `package.json`.
fn detect_node_build_cmd(pkg_dir: &Path) -> String {
if pkg_dir.join("pnpm-lock.yaml").exists() {
"pnpm run build".to_string()
} else {
"npm run build".to_string()
}
}
/// Detect the appropriate Node.js lint command for a directory containing `package.json`.
///
/// Reads the `package.json` content to identify eslint. Falls back to
/// `npm run lint` or `pnpm run lint` based on which lock file is present.
fn detect_node_lint_cmd(pkg_dir: &Path) -> String {
let has_pnpm = pkg_dir.join("pnpm-lock.yaml").exists();
let content = std::fs::read_to_string(pkg_dir.join("package.json")).unwrap_or_default();
if content.contains("\"eslint\"") {
let pm = if has_pnpm { "pnpm" } else { "npx" };
return format!("{pm} eslint .");
}
if has_pnpm {
"pnpm run lint".to_string()
} else {
"npm run lint".to_string()
}
}
/// Generate `script/build` content for a new project at `root`.
///
/// Inspects well-known marker files to identify which tech stacks are present
/// and emits the appropriate build commands. Multi-stack projects get combined
/// commands run sequentially. Falls back to a generic stub when no markers
/// are found so the scaffold is always valid.
///
/// For projects with a frontend in a known subdirectory (`frontend/`, `client/`),
/// the build command is detected from the presence of `pnpm-lock.yaml`.
pub fn detect_script_build(root: &Path) -> String {
let mut commands: Vec<String> = Vec::new();
if root.join("Cargo.toml").exists() {
commands.push("cargo build --release".to_string());
}
if root.join("package.json").exists() {
commands.push(detect_node_build_cmd(root));
}
// Detect frontend in known subdirectories (e.g. frontend/, client/)
for subdir in &["frontend", "client"] {
let sub_path = root.join(subdir);
if sub_path.join("package.json").exists() {
let cmd = detect_node_build_cmd(&sub_path);
commands.push(format!("(cd {} && {})", subdir, cmd));
}
}
if root.join("pyproject.toml").exists() {
commands.push("python -m build".to_string());
}
if root.join("go.mod").exists() {
commands.push("go build ./...".to_string());
}
if commands.is_empty() {
return "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's build commands here.\necho \"No build configured\"\n".to_string();
}
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
for cmd in commands {
script.push_str(&cmd);
script.push('\n');
}
script
}
/// Generate `script/lint` content for a new project at `root`.
///
/// Inspects well-known marker files to identify which linters are present
/// and emits the appropriate lint commands. Multi-stack projects get combined
/// commands run sequentially. Falls back to a generic stub when no markers
/// are found so the scaffold is always valid.
///
/// For projects with a frontend in a known subdirectory (`frontend/`, `client/`),
/// the lint command is detected from the `package.json` (eslint, npm, pnpm).
pub fn detect_script_lint(root: &Path) -> String {
let mut commands: Vec<String> = Vec::new();
if root.join("Cargo.toml").exists() {
commands.push("cargo fmt --all --check".to_string());
commands.push("cargo clippy -- -D warnings".to_string());
}
if root.join("package.json").exists() {
commands.push(detect_node_lint_cmd(root));
}
// Detect frontend in known subdirectories (e.g. frontend/, client/)
for subdir in &["frontend", "client"] {
let sub_path = root.join(subdir);
if sub_path.join("package.json").exists() {
let cmd = detect_node_lint_cmd(&sub_path);
commands.push(format!("(cd {} && {})", subdir, cmd));
}
}
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
let mut content = std::fs::read_to_string(root.join("pyproject.toml")).unwrap_or_default();
content
.push_str(&std::fs::read_to_string(root.join("requirements.txt")).unwrap_or_default());
if content.contains("ruff") {
commands.push("ruff check .".to_string());
} else {
commands.push("flake8 .".to_string());
}
}
if root.join("go.mod").exists() {
commands.push("go vet ./...".to_string());
}
if commands.is_empty() {
return "#!/usr/bin/env bash\nset -euo pipefail\n\n# Add your project's lint commands here.\necho \"No linters configured\"\n".to_string();
}
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
for cmd in commands {
script.push_str(&cmd);
script.push('\n');
}
script
}
/// Generate `script/test` content for a new project at `root`.
///
/// Inspects well-known marker files to identify which tech stacks are present
/// and emits the appropriate test commands. Multi-stack projects get combined
/// commands run sequentially. Falls back to the generic stub when no markers
/// are found so the scaffold is always valid.
///
/// For projects with a frontend in a known subdirectory (`frontend/`, `client/`),
/// the test runner is detected from the `package.json` (vitest, jest, npm, pnpm).
pub fn detect_script_test(root: &Path) -> String {
let mut commands: Vec<&str> = Vec::new();
let mut commands: Vec<String> = Vec::new();
if root.join("Cargo.toml").exists() {
commands.push("cargo test");
commands.push("cargo test".to_string());
}
if root.join("package.json").exists() {
if root.join("pnpm-lock.yaml").exists() {
commands.push("pnpm test");
commands.push("pnpm test".to_string());
} else {
commands.push("npm test");
commands.push("npm test".to_string());
}
}
// Detect frontend in known subdirectories (e.g. frontend/, client/)
for subdir in &["frontend", "client"] {
let sub_path = root.join(subdir);
if sub_path.join("package.json").exists() {
let cmd = detect_node_test_cmd(&sub_path);
commands.push(format!("(cd {} && {})", subdir, cmd));
}
}
if root.join("pyproject.toml").exists() || root.join("requirements.txt").exists() {
commands.push("pytest");
commands.push("pytest".to_string());
}
if root.join("go.mod").exists() {
commands.push("go test ./...");
commands.push("go test ./...".to_string());
}
if commands.is_empty() {
@@ -234,7 +403,7 @@ pub fn detect_script_test(root: &Path) -> String {
let mut script = "#!/usr/bin/env bash\nset -euo pipefail\n\n".to_string();
for cmd in commands {
script.push_str(cmd);
script.push_str(&cmd);
script.push('\n');
}
script
@@ -298,6 +467,8 @@ fn write_story_kit_gitignore(root: &Path) -> Result<(), String> {
"token_usage.jsonl",
"wizard_state.json",
"store.json",
"pipeline.db",
"*.db",
];
let gitignore_path = root.join(".huskies").join(".gitignore");
@@ -411,6 +582,10 @@ pub(crate) fn scaffold_story_kit(root: &Path, port: u16) -> Result<(), String> {
write_file_if_missing(&tech_root.join("STACK.md"), STORY_KIT_STACK)?;
let script_test_content = detect_script_test(root);
write_script_if_missing(&script_root.join("test"), &script_test_content)?;
let script_build_content = detect_script_build(root);
write_script_if_missing(&script_root.join("build"), &script_build_content)?;
let script_lint_content = detect_script_lint(root);
write_script_if_missing(&script_root.join("lint"), &script_lint_content)?;
write_file_if_missing(&root.join("CLAUDE.md"), STORY_KIT_CLAUDE_MD)?;
// Write per-transport bot.toml example files so users can see all options.
@@ -744,6 +919,9 @@ mod tests {
assert!(!root_content.contains(".huskies/coverage/"));
// store.json must be in .huskies/.gitignore instead
assert!(sk_content.contains("store.json"));
// Database files must be ignored so novice users don't accidentally commit them
assert!(sk_content.contains("pipeline.db"));
assert!(sk_content.contains("*.db"));
}
#[test]
@@ -1165,6 +1343,141 @@ mod tests {
);
}
#[test]
fn detect_script_test_frontend_subdir_with_vitest_uses_npx_vitest() {
let dir = tempdir().unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(
frontend.join("package.json"),
r#"{"devDependencies":{"vitest":"^1.0.0"},"scripts":{"test":"vitest run"}}"#,
)
.unwrap();
let script = detect_script_test(dir.path());
assert!(
script.contains("vitest run"),
"frontend with vitest should emit vitest run"
);
assert!(
script.contains("cd frontend"),
"should cd into the frontend directory"
);
assert!(
!script.contains("No tests configured"),
"should not use stub when frontend is detected"
);
}
#[test]
fn detect_script_test_frontend_subdir_with_jest_uses_npx_jest() {
let dir = tempdir().unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(
frontend.join("package.json"),
r#"{"devDependencies":{"jest":"^29.0.0"},"scripts":{"test":"jest"}}"#,
)
.unwrap();
let script = detect_script_test(dir.path());
assert!(
script.contains("jest"),
"frontend with jest should emit jest"
);
assert!(
script.contains("cd frontend"),
"should cd into the frontend directory"
);
}
#[test]
fn detect_script_test_frontend_subdir_no_known_runner_uses_npm_test() {
let dir = tempdir().unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(
frontend.join("package.json"),
r#"{"scripts":{"test":"mocha"}}"#,
)
.unwrap();
let script = detect_script_test(dir.path());
assert!(
script.contains("npm test"),
"frontend without known runner should fall back to npm test"
);
assert!(script.contains("cd frontend"));
}
#[test]
fn detect_script_test_frontend_subdir_pnpm_uses_pnpm_vitest() {
let dir = tempdir().unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(
frontend.join("package.json"),
r#"{"devDependencies":{"vitest":"^1.0.0"}}"#,
)
.unwrap();
fs::write(frontend.join("pnpm-lock.yaml"), "").unwrap();
let script = detect_script_test(dir.path());
assert!(
script.contains("pnpm vitest run"),
"pnpm frontend with vitest should use pnpm vitest run"
);
}
#[test]
fn detect_script_test_rust_plus_frontend_subdir_both_included() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("Cargo.toml"),
"[package]\nname = \"server\"\n",
)
.unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(
frontend.join("package.json"),
r#"{"devDependencies":{"vitest":"^1.0.0"}}"#,
)
.unwrap();
let script = detect_script_test(dir.path());
assert!(
script.contains("cargo test"),
"Rust + frontend should include cargo test"
);
assert!(
script.contains("vitest run"),
"Rust + frontend should include vitest run"
);
assert!(
script.contains("cd frontend"),
"Rust + frontend should cd into frontend"
);
}
#[test]
fn detect_script_test_client_subdir_detected() {
let dir = tempdir().unwrap();
let client = dir.path().join("client");
fs::create_dir_all(&client).unwrap();
fs::write(
client.join("package.json"),
r#"{"scripts":{"test":"jest"}}"#,
)
.unwrap();
let script = detect_script_test(dir.path());
assert!(
script.contains("cd client"),
"client/ subdir should also be detected"
);
}
#[test]
fn detect_script_test_output_starts_with_shebang() {
let dir = tempdir().unwrap();
@@ -1211,6 +1524,347 @@ mod tests {
);
}
// --- detect_script_build ---
#[test]
fn detect_script_build_no_markers_returns_stub() {
let dir = tempdir().unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("No build configured"),
"fallback should contain the generic stub message"
);
assert!(script.starts_with("#!/usr/bin/env bash"));
}
#[test]
fn detect_script_build_cargo_toml_adds_cargo_build_release() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("cargo build --release"),
"Rust project should run cargo build --release"
);
assert!(!script.contains("No build configured"));
}
#[test]
fn detect_script_build_package_json_npm_adds_npm_run_build() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("package.json"), "{}").unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("npm run build"),
"Node project without pnpm-lock should run npm run build"
);
}
#[test]
fn detect_script_build_package_json_pnpm_adds_pnpm_run_build() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("package.json"), "{}").unwrap();
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("pnpm run build"),
"Node project with pnpm-lock should run pnpm run build"
);
assert!(
!script.lines().any(|l| l.trim() == "npm run build"),
"should not use npm when pnpm-lock.yaml is present"
);
}
#[test]
fn detect_script_build_go_mod_adds_go_build() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("go build ./..."),
"Go project should run go build ./..."
);
}
#[test]
fn detect_script_build_pyproject_toml_adds_python_build() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("pyproject.toml"),
"[project]\nname = \"x\"\n",
)
.unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("python -m build"),
"Python project should run python -m build"
);
}
#[test]
fn detect_script_build_frontend_subdir_detected() {
let dir = tempdir().unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(frontend.join("package.json"), "{}").unwrap();
let script = detect_script_build(dir.path());
assert!(
script.contains("cd frontend"),
"frontend subdir should be detected for build"
);
assert!(script.contains("npm run build"));
}
#[test]
fn detect_script_build_rust_plus_frontend_subdir_both_included() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("Cargo.toml"),
"[package]\nname = \"server\"\n",
)
.unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(frontend.join("package.json"), "{}").unwrap();
let script = detect_script_build(dir.path());
assert!(script.contains("cargo build --release"));
assert!(script.contains("cd frontend"));
assert!(script.contains("npm run build"));
}
// --- detect_script_lint ---
#[test]
fn detect_script_lint_no_markers_returns_stub() {
let dir = tempdir().unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("No linters configured"),
"fallback should contain the generic stub message"
);
assert!(script.starts_with("#!/usr/bin/env bash"));
}
#[test]
fn detect_script_lint_cargo_toml_adds_fmt_and_clippy() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("Cargo.toml"), "[package]\nname = \"x\"\n").unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("cargo fmt --all --check"),
"Rust project should check formatting"
);
assert!(
script.contains("cargo clippy -- -D warnings"),
"Rust project should run clippy"
);
assert!(!script.contains("No linters configured"));
}
#[test]
fn detect_script_lint_package_json_without_eslint_uses_npm_run_lint() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("package.json"), "{}").unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("npm run lint"),
"Node project without eslint dep should fall back to npm run lint"
);
}
#[test]
fn detect_script_lint_package_json_with_eslint_uses_npx_eslint() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("package.json"),
r#"{"devDependencies":{"eslint":"^8.0.0"}}"#,
)
.unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("npx eslint ."),
"Node project with eslint should use npx eslint ."
);
}
#[test]
fn detect_script_lint_pnpm_with_eslint_uses_pnpm_eslint() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("package.json"),
r#"{"devDependencies":{"eslint":"^8.0.0"}}"#,
)
.unwrap();
fs::write(dir.path().join("pnpm-lock.yaml"), "").unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("pnpm eslint ."),
"pnpm project with eslint should use pnpm eslint ."
);
}
#[test]
fn detect_script_lint_python_requirements_uses_flake8() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("requirements.txt"), "flask\n").unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("flake8 ."),
"Python project without ruff should use flake8"
);
}
#[test]
fn detect_script_lint_python_with_ruff_uses_ruff() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("pyproject.toml"),
"[project]\nname = \"x\"\n\n[tool.ruff]\n",
)
.unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("ruff check ."),
"Python project with ruff configured should use ruff"
);
assert!(
!script.contains("flake8"),
"should not use flake8 when ruff is configured"
);
}
#[test]
fn detect_script_lint_go_mod_adds_go_vet() {
let dir = tempdir().unwrap();
fs::write(dir.path().join("go.mod"), "module example.com/app\n").unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("go vet ./..."),
"Go project should run go vet ./..."
);
}
#[test]
fn detect_script_lint_frontend_subdir_detected() {
let dir = tempdir().unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(frontend.join("package.json"), "{}").unwrap();
let script = detect_script_lint(dir.path());
assert!(
script.contains("cd frontend"),
"frontend subdir should be detected for lint"
);
}
#[test]
fn detect_script_lint_rust_plus_frontend_subdir_both_included() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("Cargo.toml"),
"[package]\nname = \"server\"\n",
)
.unwrap();
let frontend = dir.path().join("frontend");
fs::create_dir_all(&frontend).unwrap();
fs::write(frontend.join("package.json"), "{}").unwrap();
let script = detect_script_lint(dir.path());
assert!(script.contains("cargo fmt --all --check"));
assert!(script.contains("cargo clippy -- -D warnings"));
assert!(script.contains("cd frontend"));
}
#[test]
fn scaffold_story_kit_creates_script_build_and_lint() {
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path(), 3001).unwrap();
assert!(
dir.path().join("script/build").exists(),
"script/build should be created by scaffold"
);
assert!(
dir.path().join("script/lint").exists(),
"script/lint should be created by scaffold"
);
}
#[cfg(unix)]
#[test]
fn scaffold_story_kit_creates_executable_script_build_and_lint() {
use std::os::unix::fs::PermissionsExt;
let dir = tempdir().unwrap();
scaffold_story_kit(dir.path(), 3001).unwrap();
for name in &["build", "lint"] {
let path = dir.path().join("script").join(name);
assert!(path.exists(), "script/{name} should be created");
let perms = fs::metadata(&path).unwrap().permissions();
assert!(
perms.mode() & 0o111 != 0,
"script/{name} should be executable"
);
}
}
#[test]
fn scaffold_script_build_contains_detected_commands_for_rust() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("Cargo.toml"),
"[package]\nname = \"myapp\"\n",
)
.unwrap();
scaffold_story_kit(dir.path(), 3001).unwrap();
let content = fs::read_to_string(dir.path().join("script/build")).unwrap();
assert!(
content.contains("cargo build --release"),
"Rust project scaffold should set cargo build --release in script/build"
);
}
#[test]
fn scaffold_script_lint_contains_detected_commands_for_rust() {
let dir = tempdir().unwrap();
fs::write(
dir.path().join("Cargo.toml"),
"[package]\nname = \"myapp\"\n",
)
.unwrap();
scaffold_story_kit(dir.path(), 3001).unwrap();
let content = fs::read_to_string(dir.path().join("script/lint")).unwrap();
assert!(
content.contains("cargo fmt --all --check"),
"Rust project scaffold should include fmt check in script/lint"
);
assert!(
content.contains("cargo clippy -- -D warnings"),
"Rust project scaffold should include clippy in script/lint"
);
}
// --- generate_project_toml ---
#[test]
+1 -1
View File
@@ -5,7 +5,7 @@ use std::path::Path;
/// Only untouched templates contain this marker — real project content
/// will never include it, so it avoids false positives when the project
/// itself is an "Agentic AI Code Assistant".
const TEMPLATE_SENTINEL: &str = "<!-- huskies:scaffold-template -->";
pub(crate) const TEMPLATE_SENTINEL: &str = "<!-- huskies:scaffold-template -->";
/// Marker found in the default `script/test` scaffold output.
const TEMPLATE_MARKER_SCRIPT: &str = "No tests configured";
+11 -3
View File
@@ -16,9 +16,13 @@ pub enum WizardStep {
Stack,
/// Step 4: create script/test
TestScript,
/// Step 5: create script/release
/// Step 5: create script/build
BuildScript,
/// Step 6: create script/lint
LintScript,
/// Step 7: create script/release
ReleaseScript,
/// Step 6: create script/test_coverage
/// Step 8: create script/test_coverage
TestCoverage,
}
@@ -29,6 +33,8 @@ impl WizardStep {
WizardStep::Context,
WizardStep::Stack,
WizardStep::TestScript,
WizardStep::BuildScript,
WizardStep::LintScript,
WizardStep::ReleaseScript,
WizardStep::TestCoverage,
];
@@ -40,6 +46,8 @@ impl WizardStep {
WizardStep::Context => "Generate project context (00_CONTEXT.md)",
WizardStep::Stack => "Generate tech stack spec (STACK.md)",
WizardStep::TestScript => "Create test script (script/test)",
WizardStep::BuildScript => "Create build script (script/build)",
WizardStep::LintScript => "Create lint script (script/lint)",
WizardStep::ReleaseScript => "Create release script (script/release)",
WizardStep::TestCoverage => "Create test coverage script (script/test_coverage)",
}
@@ -262,7 +270,7 @@ mod tests {
#[test]
fn default_state_has_all_steps_pending() {
let state = WizardState::default();
assert_eq!(state.steps.len(), 6);
assert_eq!(state.steps.len(), 8);
for step in &state.steps {
assert_eq!(step.status, StepStatus::Pending);
}