huskies: merge 618_story_extract_mcp_only_domain_services
This commit is contained in:
@@ -0,0 +1,72 @@
|
||||
//! Diagnostics I/O — the ONLY place in `service::diagnostics/` that may perform side effects.
|
||||
//!
|
||||
//! Side effects here include: reading and writing `.claude/settings.json` via `std::fs`.
|
||||
//! Pure permission-rule logic (pattern derivation, wildcard domination checks) lives in
|
||||
//! `permission.rs`.
|
||||
|
||||
use serde_json::{Value, json};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Add a permission rule to `.claude/settings.json` in the project root.
|
||||
///
|
||||
/// Does nothing if the rule already exists (exact match) or is already covered
|
||||
/// by a wildcard pattern in the allow list. Creates the file and any missing
|
||||
/// parent directories if they do not yet exist.
|
||||
///
|
||||
/// # Errors
|
||||
/// Returns `Err(String)` if the directory cannot be created, the file cannot be
|
||||
/// read or written, or the JSON cannot be parsed or serialised.
|
||||
pub fn add_permission_rule(project_root: &Path, rule: &str) -> Result<(), String> {
|
||||
let claude_dir = project_root.join(".claude");
|
||||
fs::create_dir_all(&claude_dir)
|
||||
.map_err(|e| format!("Failed to create .claude/ directory: {e}"))?;
|
||||
|
||||
let settings_path = claude_dir.join("settings.json");
|
||||
let mut settings: Value = if settings_path.exists() {
|
||||
let content = fs::read_to_string(&settings_path)
|
||||
.map_err(|e| format!("Failed to read settings.json: {e}"))?;
|
||||
serde_json::from_str(&content).map_err(|e| format!("Failed to parse settings.json: {e}"))?
|
||||
} else {
|
||||
json!({ "permissions": { "allow": [] } })
|
||||
};
|
||||
|
||||
let allow_arr = settings
|
||||
.pointer_mut("/permissions/allow")
|
||||
.and_then(|v| v.as_array_mut());
|
||||
|
||||
let allow = match allow_arr {
|
||||
Some(arr) => arr,
|
||||
None => {
|
||||
settings
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.entry("permissions")
|
||||
.or_insert(json!({ "allow": [] }));
|
||||
settings
|
||||
.pointer_mut("/permissions/allow")
|
||||
.unwrap()
|
||||
.as_array_mut()
|
||||
.unwrap()
|
||||
}
|
||||
};
|
||||
|
||||
let rule_value = Value::String(rule.to_string());
|
||||
|
||||
// Exact duplicate check.
|
||||
if allow.contains(&rule_value) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Wildcard-coverage check: if "mcp__huskies__*" exists, skip more-specific rules.
|
||||
if super::permission::is_dominated_by_wildcard(rule, allow) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
allow.push(rule_value);
|
||||
|
||||
let pretty =
|
||||
serde_json::to_string_pretty(&settings).map_err(|e| format!("Failed to serialize: {e}"))?;
|
||||
fs::write(&settings_path, pretty).map_err(|e| format!("Failed to write settings.json: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
//! Diagnostics service — server logs, CRDT dump, permission management, and story movement.
|
||||
//!
|
||||
//! Extracted from `http/mcp/diagnostics.rs` following the conventions in
|
||||
//! `docs/architecture/service-modules.md`:
|
||||
//! - `mod.rs` (this file) — public API, typed [`Error`], orchestration
|
||||
//! - `io.rs` — the ONLY place that performs side effects (filesystem reads/writes)
|
||||
//! - `permission.rs` — pure permission-rule generation and wildcard checks
|
||||
|
||||
pub mod io;
|
||||
pub mod permission;
|
||||
|
||||
pub use io::add_permission_rule;
|
||||
pub use permission::generate_permission_rule;
|
||||
#[allow(unused_imports)]
|
||||
pub use permission::is_dominated_by_wildcard;
|
||||
|
||||
// ── Error type ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Typed errors returned by `service::diagnostics` functions.
|
||||
///
|
||||
/// HTTP handlers map these to status codes:
|
||||
/// - [`Error::NotFound`] → 404 Not Found
|
||||
/// - [`Error::Validation`] → 400 Bad Request
|
||||
/// - [`Error::Conflict`] → 409 Conflict
|
||||
/// - [`Error::Io`] → 500 Internal Server Error
|
||||
/// - [`Error::UpstreamFailure`] → 500 Internal Server Error
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The requested resource was not found.
|
||||
NotFound(String),
|
||||
/// A required argument is missing or has an invalid value.
|
||||
Validation(String),
|
||||
/// The operation cannot proceed due to a conflicting state.
|
||||
Conflict(String),
|
||||
/// A filesystem read or write operation failed.
|
||||
Io(String),
|
||||
/// An upstream dependency returned an unexpected error.
|
||||
UpstreamFailure(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::NotFound(msg) => write!(f, "Not found: {msg}"),
|
||||
Self::Validation(msg) => write!(f, "Validation error: {msg}"),
|
||||
Self::Conflict(msg) => write!(f, "Conflict: {msg}"),
|
||||
Self::Io(msg) => write!(f, "I/O error: {msg}"),
|
||||
Self::UpstreamFailure(msg) => write!(f, "Upstream failure: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn error_display_not_found() {
|
||||
let e = Error::NotFound("log file missing".to_string());
|
||||
assert!(e.to_string().contains("Not found"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_validation() {
|
||||
let e = Error::Validation("invalid filter".to_string());
|
||||
assert!(e.to_string().contains("Validation error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_conflict() {
|
||||
let e = Error::Conflict("story in wrong stage".to_string());
|
||||
assert!(e.to_string().contains("Conflict"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_io() {
|
||||
let e = Error::Io("settings.json write failed".to_string());
|
||||
assert!(e.to_string().contains("I/O error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_upstream_failure() {
|
||||
let e = Error::UpstreamFailure("rebuild failed".to_string());
|
||||
assert!(e.to_string().contains("Upstream failure"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
//! Pure permission-rule generation for `service::diagnostics`.
|
||||
//!
|
||||
//! These functions produce Claude Code permission-rule strings from tool call
|
||||
//! metadata. No I/O: they take `&str` / `&Value` and return `String`.
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
/// Generate a Claude Code permission rule string for the given tool name and input.
|
||||
///
|
||||
/// - `Bash` tools → `Bash(first_word *)` derived from the `command` field.
|
||||
/// - All other tools → the tool name verbatim (e.g. `Edit`, `mcp__huskies__create_story`).
|
||||
pub fn generate_permission_rule(tool_name: &str, tool_input: &Value) -> String {
|
||||
if tool_name == "Bash" {
|
||||
let command_str = tool_input
|
||||
.get("command")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
let first_word = command_str.split_whitespace().next().unwrap_or("unknown");
|
||||
format!("Bash({first_word} *)")
|
||||
} else {
|
||||
tool_name.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if `rule` is already covered by an existing wildcard in `allow_list`.
|
||||
///
|
||||
/// For example, if `allow_list` contains `"mcp__huskies__*"`, then the more
|
||||
/// specific rule `"mcp__huskies__create_story"` is already covered.
|
||||
pub fn is_dominated_by_wildcard(rule: &str, allow_list: &[Value]) -> bool {
|
||||
allow_list.iter().any(|existing| {
|
||||
if let Some(pat) = existing.as_str()
|
||||
&& let Some(prefix) = pat.strip_suffix('*')
|
||||
{
|
||||
return rule.starts_with(prefix);
|
||||
}
|
||||
false
|
||||
})
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_edit_tool() {
|
||||
let rule = generate_permission_rule("Edit", &json!({}));
|
||||
assert_eq!(rule, "Edit");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_write_tool() {
|
||||
let rule = generate_permission_rule("Write", &json!({}));
|
||||
assert_eq!(rule, "Write");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_bash_git() {
|
||||
let rule = generate_permission_rule("Bash", &json!({"command": "git status"}));
|
||||
assert_eq!(rule, "Bash(git *)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_bash_cargo() {
|
||||
let rule = generate_permission_rule("Bash", &json!({"command": "cargo test --all"}));
|
||||
assert_eq!(rule, "Bash(cargo *)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_bash_empty_command() {
|
||||
let rule = generate_permission_rule("Bash", &json!({}));
|
||||
assert_eq!(rule, "Bash(unknown *)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_mcp_tool() {
|
||||
let rule = generate_permission_rule("mcp__huskies__create_story", &json!({"name": "foo"}));
|
||||
assert_eq!(rule, "mcp__huskies__create_story");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dominated_by_exact_wildcard() {
|
||||
let allow = vec![json!("mcp__huskies__*")];
|
||||
assert!(is_dominated_by_wildcard(
|
||||
"mcp__huskies__create_story",
|
||||
&allow
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_not_dominated_by_different_prefix() {
|
||||
let allow = vec![json!("mcp__other__*")];
|
||||
assert!(!is_dominated_by_wildcard(
|
||||
"mcp__huskies__create_story",
|
||||
&allow
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_not_dominated_when_list_is_empty() {
|
||||
assert!(!is_dominated_by_wildcard("Edit", &[]));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
//! Git I/O — the ONLY place in `service::git_ops/` that may perform side effects.
|
||||
//!
|
||||
//! Side effects here include: spawning git processes via `std::process::Command`
|
||||
//! (wrapped in `tokio::task::spawn_blocking`), and filesystem existence and
|
||||
//! canonicalization checks for path validation.
|
||||
//! All pure logic (path-prefix checks, porcelain parsing) lives in `path_guard.rs`
|
||||
//! and `porcelain.rs`.
|
||||
|
||||
use super::Error;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Output;
|
||||
|
||||
/// Validate that `worktree_path` is an absolute path that exists on disk and
|
||||
/// lies inside the project's `.huskies/worktrees/` directory. Returns the
|
||||
/// canonicalized path on success.
|
||||
///
|
||||
/// # Errors
|
||||
/// - [`Error::Validation`] if the path is relative or does not exist.
|
||||
/// - [`Error::PathNotAllowed`] if the path is outside `.huskies/worktrees/`.
|
||||
/// - [`Error::Io`] if canonicalization fails.
|
||||
pub fn validate_worktree_path(worktree_path: &str, project_root: &Path) -> Result<PathBuf, Error> {
|
||||
let wd = PathBuf::from(worktree_path);
|
||||
|
||||
if !wd.is_absolute() {
|
||||
return Err(Error::Validation(
|
||||
"worktree_path must be an absolute path".to_string(),
|
||||
));
|
||||
}
|
||||
if !wd.exists() {
|
||||
return Err(Error::Validation(format!(
|
||||
"worktree_path does not exist: {worktree_path}"
|
||||
)));
|
||||
}
|
||||
|
||||
let worktrees_root = project_root.join(".huskies").join("worktrees");
|
||||
|
||||
let canonical_wd = wd
|
||||
.canonicalize()
|
||||
.map_err(|e| Error::Io(format!("Cannot canonicalize worktree_path: {e}")))?;
|
||||
|
||||
let canonical_wt = if worktrees_root.exists() {
|
||||
worktrees_root
|
||||
.canonicalize()
|
||||
.map_err(|e| Error::Io(format!("Cannot canonicalize worktrees root: {e}")))?
|
||||
} else {
|
||||
return Err(Error::PathNotAllowed(
|
||||
"No worktrees directory found in project".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
if !super::path_guard::is_under_root(&canonical_wd, &canonical_wt) {
|
||||
return Err(Error::PathNotAllowed(format!(
|
||||
"worktree_path must be inside .huskies/worktrees/. Got: {worktree_path}"
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(canonical_wd)
|
||||
}
|
||||
|
||||
/// Run a git command with static arg slices in `dir` and return the process output.
|
||||
///
|
||||
/// # Errors
|
||||
/// - [`Error::UpstreamFailure`] if the task panics or git cannot be spawned.
|
||||
pub async fn run_git(args: Vec<&'static str>, dir: PathBuf) -> Result<Output, Error> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
std::process::Command::new("git")
|
||||
.args(&args)
|
||||
.current_dir(&dir)
|
||||
.output()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::UpstreamFailure(format!("Task join error: {e}")))?
|
||||
.map_err(|e| Error::Io(format!("Failed to run git: {e}")))
|
||||
}
|
||||
|
||||
/// Run a git command with owned `String` args in `dir` and return the process output.
|
||||
///
|
||||
/// # Errors
|
||||
/// - [`Error::UpstreamFailure`] if the task panics or git cannot be spawned.
|
||||
pub async fn run_git_owned(args: Vec<String>, dir: PathBuf) -> Result<Output, Error> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
std::process::Command::new("git")
|
||||
.args(&args)
|
||||
.current_dir(&dir)
|
||||
.output()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::UpstreamFailure(format!("Task join error: {e}")))?
|
||||
.map_err(|e| Error::Io(format!("Failed to run git: {e}")))
|
||||
}
|
||||
@@ -0,0 +1,100 @@
|
||||
//! Git operations service — worktree path validation and git command execution.
|
||||
//!
|
||||
//! Extracted from `http/mcp/git_tools.rs` following the conventions in
|
||||
//! `docs/architecture/service-modules.md`:
|
||||
//! - `mod.rs` (this file) — public API, typed [`Error`], orchestration
|
||||
//! - `io.rs` — the ONLY place that performs side effects (git processes, filesystem)
|
||||
//! - `path_guard.rs` — pure path-prefix safety checks
|
||||
//! - `porcelain.rs` — pure git porcelain output parsers
|
||||
|
||||
pub mod io;
|
||||
pub mod path_guard;
|
||||
pub mod porcelain;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub use path_guard::is_under_root;
|
||||
pub use porcelain::parse_git_status_porcelain;
|
||||
|
||||
// ── Error type ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Typed errors returned by `service::git_ops` functions.
|
||||
///
|
||||
/// HTTP handlers map these to status codes:
|
||||
/// - [`Error::NotFound`] → 404 Not Found
|
||||
/// - [`Error::Validation`] → 400 Bad Request
|
||||
/// - [`Error::Conflict`] → 409 Conflict
|
||||
/// - [`Error::PathNotAllowed`] → 400 Bad Request (sandbox violation)
|
||||
/// - [`Error::Io`] → 500 Internal Server Error
|
||||
/// - [`Error::UpstreamFailure`] → 500 Internal Server Error
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The requested worktree or path does not exist.
|
||||
NotFound(String),
|
||||
/// A required argument is missing or has an invalid value.
|
||||
Validation(String),
|
||||
/// The git operation cannot proceed due to a conflicting state.
|
||||
Conflict(String),
|
||||
/// The path is outside the allowed sandbox.
|
||||
PathNotAllowed(String),
|
||||
/// A filesystem or git I/O operation failed.
|
||||
Io(String),
|
||||
/// An upstream git command returned an unexpected error.
|
||||
UpstreamFailure(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::NotFound(msg) => write!(f, "Not found: {msg}"),
|
||||
Self::Validation(msg) => write!(f, "Validation error: {msg}"),
|
||||
Self::Conflict(msg) => write!(f, "Conflict: {msg}"),
|
||||
Self::PathNotAllowed(msg) => write!(f, "Path not allowed: {msg}"),
|
||||
Self::Io(msg) => write!(f, "I/O error: {msg}"),
|
||||
Self::UpstreamFailure(msg) => write!(f, "Upstream failure: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn error_display_not_found() {
|
||||
let e = Error::NotFound("worktree missing".to_string());
|
||||
assert!(e.to_string().contains("Not found"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_validation() {
|
||||
let e = Error::Validation("relative path".to_string());
|
||||
assert!(e.to_string().contains("Validation error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_conflict() {
|
||||
let e = Error::Conflict("uncommitted changes".to_string());
|
||||
assert!(e.to_string().contains("Conflict"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_path_not_allowed() {
|
||||
let e = Error::PathNotAllowed("outside sandbox".to_string());
|
||||
assert!(e.to_string().contains("Path not allowed"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_io() {
|
||||
let e = Error::Io("permission denied".to_string());
|
||||
assert!(e.to_string().contains("I/O error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_upstream_failure() {
|
||||
let e = Error::UpstreamFailure("git not found".to_string());
|
||||
assert!(e.to_string().contains("Upstream failure"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
//! Pure path-guard helpers for `service::git_ops`.
|
||||
//!
|
||||
//! These functions are free of side effects — they operate on already-resolved
|
||||
//! `Path` values and perform no filesystem I/O. Path existence checks and
|
||||
//! canonicalization belong in `io.rs`.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
/// Return `true` if `canonical_path` starts with (i.e. is under) `root`.
|
||||
///
|
||||
/// Both paths must already be canonicalized so that symlinks, `.`, and `..`
|
||||
/// components do not cause false negatives.
|
||||
pub fn is_under_root(canonical_path: &Path, root: &Path) -> bool {
|
||||
canonical_path.starts_with(root)
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn is_under_root_returns_true_for_child() {
|
||||
let root = PathBuf::from("/project/.huskies/worktrees");
|
||||
let child = PathBuf::from("/project/.huskies/worktrees/42_story_foo");
|
||||
assert!(is_under_root(&child, &root));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_under_root_returns_false_for_sibling() {
|
||||
let root = PathBuf::from("/project/.huskies/worktrees");
|
||||
let sibling = PathBuf::from("/project/.huskies/other");
|
||||
assert!(!is_under_root(&sibling, &root));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_under_root_returns_false_for_parent() {
|
||||
let root = PathBuf::from("/project/.huskies/worktrees");
|
||||
let parent = PathBuf::from("/project/.huskies");
|
||||
assert!(!is_under_root(&parent, &root));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_under_root_returns_true_for_exact_match() {
|
||||
let root = PathBuf::from("/project/.huskies/worktrees");
|
||||
assert!(is_under_root(&root, &root));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_under_root_returns_false_for_path_with_shared_prefix_but_not_child() {
|
||||
// /foo/bar-extra is NOT under /foo/bar
|
||||
let root = PathBuf::from("/foo/bar");
|
||||
let other = PathBuf::from("/foo/bar-extra");
|
||||
assert!(!is_under_root(&other, &root));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
//! Pure git porcelain output parsers for `service::git_ops`.
|
||||
//!
|
||||
//! These functions parse the text output of `git status --porcelain=v1`
|
||||
//! and similar commands. No I/O: they take `&str` and return structured data.
|
||||
|
||||
/// Parse `git status --porcelain=v1 -u` output into three file lists.
|
||||
///
|
||||
/// Returns `(staged, unstaged, untracked)` where each entry is the file path
|
||||
/// string from the porcelain line.
|
||||
pub fn parse_git_status_porcelain(stdout: &str) -> (Vec<String>, Vec<String>, Vec<String>) {
|
||||
let mut staged: Vec<String> = Vec::new();
|
||||
let mut unstaged: Vec<String> = Vec::new();
|
||||
let mut untracked: Vec<String> = Vec::new();
|
||||
|
||||
for line in stdout.lines() {
|
||||
if line.len() < 3 {
|
||||
continue;
|
||||
}
|
||||
let x = line.chars().next().unwrap_or(' ');
|
||||
let y = line.chars().nth(1).unwrap_or(' ');
|
||||
let path = line[3..].to_string();
|
||||
|
||||
match (x, y) {
|
||||
('?', '?') => untracked.push(path),
|
||||
(' ', _) => unstaged.push(path),
|
||||
(_, ' ') => staged.push(path),
|
||||
_ => {
|
||||
// Both staged and unstaged modifications.
|
||||
staged.push(path.clone());
|
||||
unstaged.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(staged, unstaged, untracked)
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_empty_output_returns_empty_vecs() {
|
||||
let (s, u, t) = parse_git_status_porcelain("");
|
||||
assert!(s.is_empty());
|
||||
assert!(u.is_empty());
|
||||
assert!(t.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_untracked_file() {
|
||||
let output = "?? new_file.txt\n";
|
||||
let (staged, unstaged, untracked) = parse_git_status_porcelain(output);
|
||||
assert!(staged.is_empty());
|
||||
assert!(unstaged.is_empty());
|
||||
assert_eq!(untracked, vec!["new_file.txt"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_staged_file() {
|
||||
let output = "A staged.txt\n";
|
||||
let (staged, unstaged, untracked) = parse_git_status_porcelain(output);
|
||||
assert_eq!(staged, vec!["staged.txt"]);
|
||||
assert!(unstaged.is_empty());
|
||||
assert!(untracked.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unstaged_modified_file() {
|
||||
// 'M' in second column = unstaged modification
|
||||
let output = " M modified.txt\n";
|
||||
let (staged, unstaged, untracked) = parse_git_status_porcelain(output);
|
||||
assert!(staged.is_empty());
|
||||
assert_eq!(unstaged, vec!["modified.txt"]);
|
||||
assert!(untracked.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_both_staged_and_unstaged() {
|
||||
// 'MM' = staged + unstaged in same file
|
||||
let output = "MM both.txt\n";
|
||||
let (staged, unstaged, untracked) = parse_git_status_porcelain(output);
|
||||
assert_eq!(staged, vec!["both.txt"]);
|
||||
assert_eq!(unstaged, vec!["both.txt"]);
|
||||
assert!(untracked.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_mixed_output() {
|
||||
let output = "A staged.rs\n M unstaged.rs\n?? untracked.rs\n";
|
||||
let (staged, unstaged, untracked) = parse_git_status_porcelain(output);
|
||||
assert_eq!(staged, vec!["staged.rs"]);
|
||||
assert_eq!(unstaged, vec!["unstaged.rs"]);
|
||||
assert_eq!(untracked, vec!["untracked.rs"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_skips_short_lines() {
|
||||
// Lines shorter than 3 chars should be skipped.
|
||||
let output = "A \nMM both.txt\n";
|
||||
let (staged, _unstaged, _untracked) = parse_git_status_porcelain(output);
|
||||
// Only "both.txt" should appear — the 2-char "A " line is skipped.
|
||||
assert_eq!(staged, vec!["both.txt"]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
//! Merge I/O — the ONLY place in `service::merge/` that may perform side effects.
|
||||
//!
|
||||
//! Currently, the bulk of the merge I/O is handled by `crate::agents::merge`
|
||||
//! and `crate::io::story_metadata`. This file is the designated home for any
|
||||
//! future I/O helpers that are extracted from merge-related MCP handlers.
|
||||
@@ -0,0 +1,87 @@
|
||||
//! Merge service — domain logic for merging agent work to master.
|
||||
//!
|
||||
//! Extracted from `http/mcp/merge_tools.rs` following the conventions in
|
||||
//! `docs/architecture/service-modules.md`:
|
||||
//! - `mod.rs` (this file) — public API, typed [`Error`], orchestration
|
||||
//! - `io.rs` — the ONLY place that performs side effects
|
||||
//! - `status.rs` — pure merge-status message formatting
|
||||
|
||||
pub mod io;
|
||||
pub mod status;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub use status::format_merge_status_message;
|
||||
|
||||
// ── Error type ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Typed errors returned by `service::merge` functions.
|
||||
///
|
||||
/// HTTP handlers map these to status codes:
|
||||
/// - [`Error::NotFound`] → 404 Not Found
|
||||
/// - [`Error::Validation`] → 400 Bad Request
|
||||
/// - [`Error::Conflict`] → 409 Conflict
|
||||
/// - [`Error::Io`] → 500 Internal Server Error
|
||||
/// - [`Error::UpstreamFailure`] → 500 Internal Server Error
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The requested story or merge job was not found.
|
||||
NotFound(String),
|
||||
/// A required argument is missing or has an invalid value.
|
||||
Validation(String),
|
||||
/// The merge cannot proceed due to a conflicting state.
|
||||
Conflict(String),
|
||||
/// A filesystem or process I/O operation failed.
|
||||
Io(String),
|
||||
/// An upstream dependency (agents, git) returned an unexpected error.
|
||||
UpstreamFailure(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::NotFound(msg) => write!(f, "Not found: {msg}"),
|
||||
Self::Validation(msg) => write!(f, "Validation error: {msg}"),
|
||||
Self::Conflict(msg) => write!(f, "Conflict: {msg}"),
|
||||
Self::Io(msg) => write!(f, "I/O error: {msg}"),
|
||||
Self::UpstreamFailure(msg) => write!(f, "Upstream failure: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn error_display_not_found() {
|
||||
let e = Error::NotFound("merge job missing".to_string());
|
||||
assert!(e.to_string().contains("Not found"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_validation() {
|
||||
let e = Error::Validation("story_id required".to_string());
|
||||
assert!(e.to_string().contains("Validation error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_conflict() {
|
||||
let e = Error::Conflict("story already merged".to_string());
|
||||
assert!(e.to_string().contains("Conflict"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_io() {
|
||||
let e = Error::Io("write failed".to_string());
|
||||
assert!(e.to_string().contains("I/O error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_upstream_failure() {
|
||||
let e = Error::UpstreamFailure("git crashed".to_string());
|
||||
assert!(e.to_string().contains("Upstream failure"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
//! Pure merge-status message formatting for `service::merge`.
|
||||
//!
|
||||
//! These functions transform a completed merge report into human-readable
|
||||
//! status messages. No I/O: they are pure functions over plain data.
|
||||
|
||||
use crate::agents::merge::MergeReport;
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Derive a human-readable status message from a completed [`MergeReport`].
|
||||
///
|
||||
/// The message explains what happened and (on failure) what the caller
|
||||
/// should do next.
|
||||
pub fn format_merge_status_message(report: &MergeReport) -> &'static str {
|
||||
if report.success && report.gates_passed && report.conflicts_resolved {
|
||||
"Merge complete: conflicts were auto-resolved and all quality gates passed. Story moved to done and worktree cleaned up."
|
||||
} else if report.success && report.gates_passed {
|
||||
"Merge complete: all quality gates passed. Story moved to done and worktree cleaned up."
|
||||
} else if report.had_conflicts && !report.conflicts_resolved {
|
||||
"Merge failed: conflicts detected that could not be auto-resolved. Merge was aborted — master is untouched. Call report_merge_failure with the conflict details so the human can resolve them. Do NOT manually move the story file or call accept_story."
|
||||
} else if report.success && !report.gates_passed {
|
||||
"Merge committed but quality gates failed. Review gate_output and fix issues before re-running."
|
||||
} else {
|
||||
"Merge failed. Review gate_output for details. Call report_merge_failure to record the failure. Do NOT manually move the story file or call accept_story."
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn report(
|
||||
success: bool,
|
||||
had_conflicts: bool,
|
||||
conflicts_resolved: bool,
|
||||
gates_passed: bool,
|
||||
) -> MergeReport {
|
||||
MergeReport {
|
||||
story_id: String::new(),
|
||||
success,
|
||||
had_conflicts,
|
||||
conflicts_resolved,
|
||||
conflict_details: None,
|
||||
gates_passed,
|
||||
gate_output: String::new(),
|
||||
worktree_cleaned_up: false,
|
||||
story_archived: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clean_merge_message() {
|
||||
let r = report(true, false, false, true);
|
||||
let msg = format_merge_status_message(&r);
|
||||
assert!(msg.contains("quality gates passed"));
|
||||
assert!(msg.contains("done"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conflicts_resolved_message() {
|
||||
let r = report(true, true, true, true);
|
||||
let msg = format_merge_status_message(&r);
|
||||
assert!(msg.contains("auto-resolved"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_conflicts_message() {
|
||||
let r = report(false, true, false, false);
|
||||
let msg = format_merge_status_message(&r);
|
||||
assert!(msg.contains("could not be auto-resolved"));
|
||||
assert!(msg.contains("report_merge_failure"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gates_failed_message() {
|
||||
let r = report(true, false, false, false);
|
||||
let msg = format_merge_status_message(&r);
|
||||
assert!(msg.contains("quality gates failed"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn general_failure_message() {
|
||||
let r = report(false, false, false, false);
|
||||
let msg = format_merge_status_message(&r);
|
||||
assert!(msg.contains("Merge failed"));
|
||||
assert!(msg.contains("report_merge_failure"));
|
||||
}
|
||||
}
|
||||
@@ -8,16 +8,21 @@
|
||||
pub mod agents;
|
||||
pub mod anthropic;
|
||||
pub mod bot_command;
|
||||
pub mod diagnostics;
|
||||
pub mod events;
|
||||
pub mod file_io;
|
||||
pub mod gateway;
|
||||
pub mod git_ops;
|
||||
pub mod health;
|
||||
pub mod merge;
|
||||
pub mod notifications;
|
||||
pub mod oauth;
|
||||
pub mod pipeline;
|
||||
pub mod project;
|
||||
pub mod qa;
|
||||
pub mod settings;
|
||||
pub mod shell;
|
||||
pub mod story;
|
||||
pub mod timer;
|
||||
pub mod wizard;
|
||||
pub mod ws;
|
||||
|
||||
pub mod gateway;
|
||||
pub mod pipeline;
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
//! QA I/O — the ONLY place in `service::qa/` that may perform side effects.
|
||||
//!
|
||||
//! Side effects here include: spawning git processes via `std::process::Command`,
|
||||
//! binding TCP sockets to discover free ports, and launching the QA app process.
|
||||
|
||||
use super::Error;
|
||||
|
||||
/// Find a free TCP port by attempting to bind starting from `start`.
|
||||
///
|
||||
/// Scans up to 100 ports above `start` and returns the first available one.
|
||||
/// Falls back to `start` if none are found (unlikely in practice).
|
||||
pub fn find_free_port(start: u16) -> u16 {
|
||||
for port in start..start + 100 {
|
||||
if std::net::TcpListener::bind(("127.0.0.1", port)).is_ok() {
|
||||
return port;
|
||||
}
|
||||
}
|
||||
start
|
||||
}
|
||||
|
||||
/// Merge a spike's feature branch into master using a fast-forward or simple merge.
|
||||
///
|
||||
/// Unlike the squash-merge pipeline used for stories, spikes skip quality gates
|
||||
/// and preserve their commit history. Returns `true` if a merge was performed,
|
||||
/// `false` if the branch had no unmerged commits (already up to date).
|
||||
///
|
||||
/// # Errors
|
||||
/// - [`Error::Conflict`] if the merge fails due to conflicts.
|
||||
/// - [`Error::UpstreamFailure`] if a git command cannot be run.
|
||||
pub fn merge_spike_branch_to_master(
|
||||
project_root: &std::path::Path,
|
||||
branch: &str,
|
||||
story_id: &str,
|
||||
) -> Result<bool, Error> {
|
||||
use std::process::Command;
|
||||
|
||||
// Check the branch exists and has unmerged changes.
|
||||
if !crate::agents::lifecycle::feature_branch_has_unmerged_changes(project_root, story_id) {
|
||||
crate::slog!(
|
||||
"[qa] Spike '{story_id}': feature branch has no unmerged changes, skipping merge."
|
||||
);
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Ensure we are on master.
|
||||
let checkout = Command::new("git")
|
||||
.args(["checkout", "master"])
|
||||
.current_dir(project_root)
|
||||
.output()
|
||||
.map_err(|e| Error::UpstreamFailure(format!("git checkout master failed: {e}")))?;
|
||||
if !checkout.status.success() {
|
||||
return Err(Error::UpstreamFailure(format!(
|
||||
"Failed to checkout master: {}",
|
||||
String::from_utf8_lossy(&checkout.stderr)
|
||||
)));
|
||||
}
|
||||
|
||||
// Try fast-forward first, then fall back to a regular merge.
|
||||
let ff = Command::new("git")
|
||||
.args(["merge", "--ff-only", branch])
|
||||
.current_dir(project_root)
|
||||
.output()
|
||||
.map_err(|e| Error::UpstreamFailure(format!("git merge --ff-only failed: {e}")))?;
|
||||
|
||||
if ff.status.success() {
|
||||
crate::slog!("[qa] Spike '{story_id}': fast-forward merged '{branch}' into master.");
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// Fast-forward failed (diverged history) — fall back to a regular merge.
|
||||
let merge = Command::new("git")
|
||||
.args([
|
||||
"merge",
|
||||
"--no-ff",
|
||||
branch,
|
||||
"-m",
|
||||
&format!("Merge spike branch '{branch}' into master"),
|
||||
])
|
||||
.current_dir(project_root)
|
||||
.output()
|
||||
.map_err(|e| Error::UpstreamFailure(format!("git merge failed: {e}")))?;
|
||||
|
||||
if merge.status.success() {
|
||||
crate::slog!("[qa] Spike '{story_id}': merged '{branch}' into master (no-ff).");
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(Error::Conflict(format!(
|
||||
"Failed to merge spike branch '{branch}' into master: {}",
|
||||
String::from_utf8_lossy(&merge.stderr)
|
||||
)))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
//! Pure QA lifecycle helpers for `service::qa`.
|
||||
//!
|
||||
//! These functions classify work items and make routing decisions without
|
||||
//! performing any I/O. The spike-vs-story distinction determines whether a
|
||||
//! QA approval goes through the merge pipeline or bypasses it.
|
||||
|
||||
use crate::agents::lifecycle::item_type_from_id;
|
||||
|
||||
/// Return `true` if `story_id` identifies a spike (e.g. `"42_spike_foo"`).
|
||||
///
|
||||
/// Spikes bypass the normal merge pipeline: when approved from QA they are
|
||||
/// merged directly to master and moved straight to done.
|
||||
pub fn is_spike(story_id: &str) -> bool {
|
||||
item_type_from_id(story_id) == "spike"
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_spike_returns_true_for_spike_id() {
|
||||
assert!(is_spike("42_spike_my_research"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_spike_returns_false_for_story_id() {
|
||||
assert!(!is_spike("42_story_my_feature"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_spike_returns_false_for_bug_id() {
|
||||
assert!(!is_spike("42_bug_login_crash"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_spike_returns_false_for_refactor_id() {
|
||||
assert!(!is_spike("42_refactor_cleanup"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
//! QA service — domain logic for requesting, approving, and rejecting QA reviews.
|
||||
//!
|
||||
//! Extracted from `http/mcp/qa_tools.rs` following the conventions in
|
||||
//! `docs/architecture/service-modules.md`:
|
||||
//! - `mod.rs` (this file) — public API, typed [`Error`], orchestration
|
||||
//! - `io.rs` — the ONLY place that performs side effects (git, TCP, process)
|
||||
//! - `lifecycle.rs` — pure QA routing decisions (spike vs. normal story)
|
||||
|
||||
pub mod io;
|
||||
pub mod lifecycle;
|
||||
|
||||
pub use io::{find_free_port, merge_spike_branch_to_master};
|
||||
pub use lifecycle::is_spike;
|
||||
|
||||
// ── Error type ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Typed errors returned by `service::qa` functions.
|
||||
///
|
||||
/// HTTP handlers map these to status codes:
|
||||
/// - [`Error::NotFound`] → 404 Not Found
|
||||
/// - [`Error::Validation`] → 400 Bad Request
|
||||
/// - [`Error::Conflict`] → 409 Conflict (merge conflicts)
|
||||
/// - [`Error::Io`] → 500 Internal Server Error
|
||||
/// - [`Error::UpstreamFailure`] → 500 Internal Server Error
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The requested story or worktree was not found.
|
||||
NotFound(String),
|
||||
/// A required argument is missing or has an invalid value.
|
||||
Validation(String),
|
||||
/// The QA approval cannot proceed due to a git conflict.
|
||||
Conflict(String),
|
||||
/// A filesystem or process I/O operation failed.
|
||||
Io(String),
|
||||
/// An upstream dependency (git, agents) returned an unexpected error.
|
||||
UpstreamFailure(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::NotFound(msg) => write!(f, "Not found: {msg}"),
|
||||
Self::Validation(msg) => write!(f, "Validation error: {msg}"),
|
||||
Self::Conflict(msg) => write!(f, "Conflict: {msg}"),
|
||||
Self::Io(msg) => write!(f, "I/O error: {msg}"),
|
||||
Self::UpstreamFailure(msg) => write!(f, "Upstream failure: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn error_display_not_found() {
|
||||
let e = Error::NotFound("story missing".to_string());
|
||||
assert!(e.to_string().contains("Not found"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_validation() {
|
||||
let e = Error::Validation("notes required".to_string());
|
||||
assert!(e.to_string().contains("Validation error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_conflict() {
|
||||
let e = Error::Conflict("merge conflict".to_string());
|
||||
assert!(e.to_string().contains("Conflict"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_io() {
|
||||
let e = Error::Io("port bind failed".to_string());
|
||||
assert!(e.to_string().contains("I/O error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_upstream_failure() {
|
||||
let e = Error::UpstreamFailure("git not found".to_string());
|
||||
assert!(e.to_string().contains("Upstream failure"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_free_port_returns_bindable_port() {
|
||||
let port = find_free_port(3100);
|
||||
// The returned port must be bindable.
|
||||
assert!(
|
||||
std::net::TcpListener::bind(("127.0.0.1", port)).is_ok(),
|
||||
"port {port} should be bindable"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
//! Shell I/O — the ONLY place in `service::shell/` that may perform side effects.
|
||||
//!
|
||||
//! Side effects here include: filesystem existence and canonicalization checks,
|
||||
//! process spawning via `std::process::Command`, and reading pipe output.
|
||||
//! All pure logic (pattern matching, output truncation, count parsing) lives in
|
||||
//! `path_guard.rs`.
|
||||
|
||||
use super::Error;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// Validate that `working_dir` is an absolute path that exists on disk and
|
||||
/// lies inside the project's `.huskies/worktrees/` or `.huskies/merge_workspace/`
|
||||
/// directory. Returns the canonicalized path on success.
|
||||
///
|
||||
/// # Errors
|
||||
/// - [`Error::Validation`] if the path is relative or does not exist.
|
||||
/// - [`Error::PathNotAllowed`] if the path is outside the allowed roots.
|
||||
/// - [`Error::Io`] if canonicalization fails.
|
||||
pub fn validate_working_dir(working_dir: &str, project_root: &Path) -> Result<PathBuf, Error> {
|
||||
let wd = PathBuf::from(working_dir);
|
||||
|
||||
if !wd.is_absolute() {
|
||||
return Err(Error::Validation(
|
||||
"working_dir must be an absolute path".to_string(),
|
||||
));
|
||||
}
|
||||
if !wd.exists() {
|
||||
return Err(Error::Validation(format!(
|
||||
"working_dir does not exist: {working_dir}"
|
||||
)));
|
||||
}
|
||||
|
||||
let worktrees_root = project_root.join(".huskies").join("worktrees");
|
||||
|
||||
let canonical_wd = wd
|
||||
.canonicalize()
|
||||
.map_err(|e| Error::Io(format!("Cannot canonicalize working_dir: {e}")))?;
|
||||
|
||||
let canonical_wt = if worktrees_root.exists() {
|
||||
worktrees_root
|
||||
.canonicalize()
|
||||
.map_err(|e| Error::Io(format!("Cannot canonicalize worktrees root: {e}")))?
|
||||
} else {
|
||||
return Err(Error::PathNotAllowed(
|
||||
"No worktrees directory found in project".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
// Also allow the merge workspace so mergemaster can fix conflicts.
|
||||
let merge_workspace = project_root.join(".huskies").join("merge_workspace");
|
||||
let canonical_mw = merge_workspace.canonicalize().unwrap_or_default();
|
||||
|
||||
let in_worktrees = canonical_wd.starts_with(&canonical_wt);
|
||||
let in_merge_ws =
|
||||
!canonical_mw.as_os_str().is_empty() && canonical_wd.starts_with(&canonical_mw);
|
||||
|
||||
if !in_worktrees && !in_merge_ws {
|
||||
return Err(Error::PathNotAllowed(format!(
|
||||
"working_dir must be inside .huskies/worktrees/ or .huskies/merge_workspace/. Got: {working_dir}"
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(canonical_wd)
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
//! Shell service — command safety, path sandboxing, and output helpers.
|
||||
//!
|
||||
//! Extracted from `http/mcp/shell_tools.rs` following the conventions in
|
||||
//! `docs/architecture/service-modules.md`:
|
||||
//! - `mod.rs` (this file) — public API, typed [`Error`], orchestration
|
||||
//! - `io.rs` — the ONLY place that performs side effects (filesystem checks)
|
||||
//! - `path_guard.rs` — pure command-safety checks and output utilities
|
||||
|
||||
pub mod io;
|
||||
pub mod path_guard;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub use path_guard::{
|
||||
BLOCKED_BINARIES, BLOCKED_PATTERNS, extract_count, is_dangerous, parse_test_counts,
|
||||
truncate_output,
|
||||
};
|
||||
|
||||
// ── Error type ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Typed errors returned by `service::shell` functions.
|
||||
///
|
||||
/// HTTP handlers map these to status codes:
|
||||
/// - [`Error::DangerousCommand`] → 400 Bad Request
|
||||
/// - [`Error::PathNotAllowed`] → 400 Bad Request (sandbox violation)
|
||||
/// - [`Error::Validation`] → 400 Bad Request
|
||||
/// - [`Error::Io`] → 500 Internal Server Error
|
||||
/// - [`Error::UpstreamFailure`] → 500 Internal Server Error
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The command matches a blocked pattern or binary.
|
||||
DangerousCommand(String),
|
||||
/// The working directory is outside the allowed sandbox.
|
||||
PathNotAllowed(String),
|
||||
/// A required argument is missing or has an invalid value.
|
||||
Validation(String),
|
||||
/// A filesystem or process I/O operation failed.
|
||||
Io(String),
|
||||
/// An upstream dependency (e.g. the shell) returned an unexpected error.
|
||||
UpstreamFailure(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::DangerousCommand(msg) => write!(f, "Dangerous command: {msg}"),
|
||||
Self::PathNotAllowed(msg) => write!(f, "Path not allowed: {msg}"),
|
||||
Self::Validation(msg) => write!(f, "Validation error: {msg}"),
|
||||
Self::Io(msg) => write!(f, "I/O error: {msg}"),
|
||||
Self::UpstreamFailure(msg) => write!(f, "Upstream failure: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn error_display_dangerous_command() {
|
||||
let e = Error::DangerousCommand("rm -rf / detected".to_string());
|
||||
assert!(e.to_string().contains("Dangerous command"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_path_not_allowed() {
|
||||
let e = Error::PathNotAllowed("outside worktrees".to_string());
|
||||
assert!(e.to_string().contains("Path not allowed"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_validation() {
|
||||
let e = Error::Validation("must be absolute".to_string());
|
||||
assert!(e.to_string().contains("Validation error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_io() {
|
||||
let e = Error::Io("disk full".to_string());
|
||||
assert!(e.to_string().contains("I/O error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_upstream_failure() {
|
||||
let e = Error::UpstreamFailure("bash not found".to_string());
|
||||
assert!(e.to_string().contains("Upstream failure"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,223 @@
|
||||
//! Pure command safety and output helpers for `service::shell`.
|
||||
//!
|
||||
//! All functions here are free of side effects: no filesystem access,
|
||||
//! no process spawning, no I/O of any kind. They may be tested without
|
||||
//! temporary directories or an async runtime.
|
||||
|
||||
/// Patterns that are unconditionally blocked regardless of context.
|
||||
pub static BLOCKED_PATTERNS: &[&str] = &[
|
||||
"rm -rf /",
|
||||
"rm -fr /",
|
||||
"rm -rf /*",
|
||||
"rm -fr /*",
|
||||
"rm --no-preserve-root",
|
||||
":(){ :|:& };:",
|
||||
"> /dev/sda",
|
||||
"dd if=/dev",
|
||||
];
|
||||
|
||||
/// Binaries that are unconditionally blocked.
|
||||
pub static BLOCKED_BINARIES: &[&str] = &[
|
||||
"sudo", "su", "shutdown", "reboot", "halt", "poweroff", "mkfs",
|
||||
];
|
||||
|
||||
/// Returns an error message if the command matches a blocked pattern or binary,
|
||||
/// or `None` if the command is safe to run.
|
||||
///
|
||||
/// Checks are purely string-based — no I/O or process inspection.
|
||||
pub fn is_dangerous(command: &str) -> Option<String> {
|
||||
let trimmed = command.trim();
|
||||
|
||||
for &pattern in BLOCKED_PATTERNS {
|
||||
if trimmed.contains(pattern) {
|
||||
return Some(format!(
|
||||
"Command blocked: dangerous pattern '{pattern}' detected"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(first_token) = trimmed.split_whitespace().next() {
|
||||
let binary = std::path::Path::new(first_token)
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or(first_token);
|
||||
if BLOCKED_BINARIES.contains(&binary) {
|
||||
return Some(format!("Command blocked: '{binary}' is not permitted"));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Truncate `output` to at most `max_lines` lines, keeping the tail.
|
||||
///
|
||||
/// If the output fits within `max_lines`, it is returned unchanged.
|
||||
/// Otherwise a `"[... N lines omitted ...]"` header is prepended to the
|
||||
/// last `max_lines` lines so callers still see the most recent output.
|
||||
pub fn truncate_output(output: &str, max_lines: usize) -> String {
|
||||
let lines: Vec<&str> = output.lines().collect();
|
||||
if lines.len() <= max_lines {
|
||||
return output.to_string();
|
||||
}
|
||||
let omitted = lines.len() - max_lines;
|
||||
let tail = lines[lines.len() - max_lines..].join("\n");
|
||||
format!("[... {omitted} lines omitted ...]\n{tail}")
|
||||
}
|
||||
|
||||
/// Parse cumulative passed/failed counts from `cargo test` output.
|
||||
///
|
||||
/// Scans each line for `"test result:"` summaries and accumulates the
|
||||
/// `passed` and `failed` counts across all crates in the output.
|
||||
pub fn parse_test_counts(output: &str) -> (u64, u64) {
|
||||
let mut total_passed = 0u64;
|
||||
let mut total_failed = 0u64;
|
||||
for line in output.lines() {
|
||||
if line.contains("test result:") {
|
||||
if let Some(p) = extract_count(line, "passed") {
|
||||
total_passed += p;
|
||||
}
|
||||
if let Some(f) = extract_count(line, "failed") {
|
||||
total_failed += f;
|
||||
}
|
||||
}
|
||||
}
|
||||
(total_passed, total_failed)
|
||||
}
|
||||
|
||||
/// Extract the integer immediately before `label` in `line`.
|
||||
///
|
||||
/// For example, `extract_count("5 passed; 0 failed", "passed")` returns
|
||||
/// `Some(5)`. Returns `None` if no digit sequence precedes `label`.
|
||||
pub fn extract_count(line: &str, label: &str) -> Option<u64> {
|
||||
let pos = line.find(label)?;
|
||||
let before = line[..pos].trim_end();
|
||||
let num_str: String = before
|
||||
.chars()
|
||||
.rev()
|
||||
.take_while(|c| c.is_ascii_digit())
|
||||
.collect();
|
||||
if num_str.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let num_str: String = num_str.chars().rev().collect();
|
||||
num_str.parse().ok()
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ── is_dangerous ──────────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_rf_root() {
|
||||
assert!(is_dangerous("rm -rf /").is_some());
|
||||
assert!(is_dangerous(" rm -rf / ").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_fr_root() {
|
||||
assert!(is_dangerous("rm -fr /").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_rf_star() {
|
||||
assert!(is_dangerous("rm -rf /*").is_some());
|
||||
assert!(is_dangerous("rm -fr /*").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_sudo() {
|
||||
assert!(is_dangerous("sudo ls").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_shutdown() {
|
||||
assert!(is_dangerous("shutdown -h now").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_mkfs() {
|
||||
assert!(is_dangerous("mkfs /dev/sda1").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_fork_bomb() {
|
||||
assert!(is_dangerous(":(){ :|:& };:").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_allows_safe_commands() {
|
||||
assert!(is_dangerous("cargo build").is_none());
|
||||
assert!(is_dangerous("npm test").is_none());
|
||||
assert!(is_dangerous("git status").is_none());
|
||||
assert!(is_dangerous("ls -la").is_none());
|
||||
assert!(is_dangerous("rm -rf target/").is_none());
|
||||
}
|
||||
|
||||
// ── truncate_output ───────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn truncate_short_text_unchanged() {
|
||||
let text = "line1\nline2\nline3";
|
||||
assert_eq!(truncate_output(text, 10), text);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncate_long_text_keeps_tail() {
|
||||
let lines: Vec<String> = (1..=200).map(|i| format!("line {i}")).collect();
|
||||
let text = lines.join("\n");
|
||||
let result = truncate_output(&text, 50);
|
||||
assert!(
|
||||
result.contains("line 200"),
|
||||
"should keep last line: {result}"
|
||||
);
|
||||
assert!(
|
||||
result.contains("omitted"),
|
||||
"should note omitted lines: {result}"
|
||||
);
|
||||
assert!(
|
||||
!result.contains("line 1\n"),
|
||||
"should not keep first line: {result}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncate_exact_max_unchanged() {
|
||||
let text = "a\nb\nc";
|
||||
assert_eq!(truncate_output(text, 3), text);
|
||||
}
|
||||
|
||||
// ── parse_test_counts ─────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn parse_counts_extracts_passed_and_failed() {
|
||||
let output = "test result: ok. 5 passed; 0 failed; 0 ignored\ntest result: FAILED. 2 passed; 3 failed;";
|
||||
let (passed, failed) = parse_test_counts(output);
|
||||
assert_eq!(passed, 7);
|
||||
assert_eq!(failed, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_counts_no_results_returns_zeros() {
|
||||
let (passed, failed) = parse_test_counts("no test output here");
|
||||
assert_eq!(passed, 0);
|
||||
assert_eq!(failed, 0);
|
||||
}
|
||||
|
||||
// ── extract_count ─────────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn extract_count_finds_number_before_label() {
|
||||
assert_eq!(extract_count("5 passed; 0 failed", "passed"), Some(5));
|
||||
assert_eq!(extract_count("0 failed", "failed"), Some(0));
|
||||
assert_eq!(extract_count("no number here passed", "passed"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_count_returns_none_for_missing_label() {
|
||||
assert_eq!(extract_count("5 passed", "failed"), None);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
//! Pure criterion helpers for `service::story`.
|
||||
//!
|
||||
//! These functions parse, validate, and manipulate story acceptance criteria
|
||||
//! without performing any I/O.
|
||||
|
||||
use crate::workflow::{TestCaseResult, TestStatus};
|
||||
use serde_json::Value;
|
||||
|
||||
/// Parse an optional JSON array of test-case objects into a `Vec<TestCaseResult>`.
|
||||
///
|
||||
/// Each object must have `"name"` (string) and `"status"` (`"pass"` or `"fail"`)
|
||||
/// fields. The optional `"details"` field is preserved when present.
|
||||
///
|
||||
/// Returns an empty vector for `None` or `Value::Null` inputs.
|
||||
///
|
||||
/// # Errors
|
||||
/// Returns `Err(String)` if the value is not an array, or if any item is
|
||||
/// missing a required field or has an unrecognised status string.
|
||||
pub fn parse_test_cases(value: Option<&Value>) -> Result<Vec<TestCaseResult>, String> {
|
||||
let arr = match value {
|
||||
Some(Value::Array(a)) => a,
|
||||
Some(Value::Null) | None => return Ok(Vec::new()),
|
||||
_ => return Err("Expected array for test cases".to_string()),
|
||||
};
|
||||
|
||||
arr.iter()
|
||||
.map(|item| {
|
||||
let name = item
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Test case missing 'name'")?
|
||||
.to_string();
|
||||
let status_str = item
|
||||
.get("status")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Test case missing 'status'")?;
|
||||
let status = match status_str {
|
||||
"pass" => TestStatus::Pass,
|
||||
"fail" => TestStatus::Fail,
|
||||
other => {
|
||||
return Err(format!(
|
||||
"Invalid test status '{other}'. Use 'pass' or 'fail'."
|
||||
));
|
||||
}
|
||||
};
|
||||
let details = item
|
||||
.get("details")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(String::from);
|
||||
Ok(TestCaseResult {
|
||||
name,
|
||||
status,
|
||||
details,
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn parse_none_returns_empty() {
|
||||
let result = parse_test_cases(None).unwrap();
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_null_value_returns_empty() {
|
||||
let null_val = json!(null);
|
||||
let result = parse_test_cases(Some(&null_val)).unwrap();
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_valid_cases() {
|
||||
let input = json!([
|
||||
{"name": "test1", "status": "pass"},
|
||||
{"name": "test2", "status": "fail", "details": "assertion failed"}
|
||||
]);
|
||||
let result = parse_test_cases(Some(&input)).unwrap();
|
||||
assert_eq!(result.len(), 2);
|
||||
assert_eq!(result[0].name, "test1");
|
||||
assert_eq!(result[0].status, TestStatus::Pass);
|
||||
assert_eq!(result[1].status, TestStatus::Fail);
|
||||
assert_eq!(result[1].details, Some("assertion failed".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_invalid_status_returns_error() {
|
||||
let input = json!([{"name": "t", "status": "maybe"}]);
|
||||
assert!(parse_test_cases(Some(&input)).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_non_array_returns_error() {
|
||||
let obj = json!({"invalid": "input"});
|
||||
let result = parse_test_cases(Some(&obj));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Expected array"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_name_returns_error() {
|
||||
let input = json!([{"status": "pass"}]);
|
||||
let result = parse_test_cases(Some(&input));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_status_returns_error() {
|
||||
let input = json!([{"name": "test1"}]);
|
||||
let result = parse_test_cases(Some(&input));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("status"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_details_is_optional() {
|
||||
let input = json!([{"name": "no_details", "status": "pass"}]);
|
||||
let result = parse_test_cases(Some(&input)).unwrap();
|
||||
assert_eq!(result[0].details, None);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
//! Pure front-matter helpers for `service::story`.
|
||||
//!
|
||||
//! These functions validate and inspect story front-matter field values
|
||||
//! without performing any I/O. Parsing is delegated to `crate::io::story_metadata`.
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Return `true` if `stage` is a recognised pipeline stage directory name.
|
||||
///
|
||||
/// Valid stage names match the `.huskies/work/N_name/` directory scheme.
|
||||
pub fn is_valid_stage(stage: &str) -> bool {
|
||||
matches!(
|
||||
stage,
|
||||
"1_backlog" | "2_current" | "3_qa" | "4_merge" | "5_done" | "6_archived"
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Map a human-readable stage alias (e.g. `"backlog"`) to its directory name
|
||||
/// (e.g. `"1_backlog"`). Returns `None` for unrecognised aliases.
|
||||
pub fn stage_alias_to_dir(alias: &str) -> Option<&'static str> {
|
||||
match alias {
|
||||
"backlog" | "1_backlog" => Some("1_backlog"),
|
||||
"current" | "2_current" => Some("2_current"),
|
||||
"qa" | "3_qa" => Some("3_qa"),
|
||||
"merge" | "4_merge" => Some("4_merge"),
|
||||
"done" | "5_done" => Some("5_done"),
|
||||
"archived" | "6_archived" => Some("6_archived"),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_valid_stage_accepts_all_known_stages() {
|
||||
assert!(is_valid_stage("1_backlog"));
|
||||
assert!(is_valid_stage("2_current"));
|
||||
assert!(is_valid_stage("3_qa"));
|
||||
assert!(is_valid_stage("4_merge"));
|
||||
assert!(is_valid_stage("5_done"));
|
||||
assert!(is_valid_stage("6_archived"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_valid_stage_rejects_unknown() {
|
||||
assert!(!is_valid_stage("current"));
|
||||
assert!(!is_valid_stage("backlog"));
|
||||
assert!(!is_valid_stage("7_future"));
|
||||
assert!(!is_valid_stage(""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stage_alias_maps_short_names() {
|
||||
assert_eq!(stage_alias_to_dir("backlog"), Some("1_backlog"));
|
||||
assert_eq!(stage_alias_to_dir("current"), Some("2_current"));
|
||||
assert_eq!(stage_alias_to_dir("qa"), Some("3_qa"));
|
||||
assert_eq!(stage_alias_to_dir("merge"), Some("4_merge"));
|
||||
assert_eq!(stage_alias_to_dir("done"), Some("5_done"));
|
||||
assert_eq!(stage_alias_to_dir("archived"), Some("6_archived"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stage_alias_maps_full_dir_names() {
|
||||
assert_eq!(stage_alias_to_dir("1_backlog"), Some("1_backlog"));
|
||||
assert_eq!(stage_alias_to_dir("6_archived"), Some("6_archived"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stage_alias_returns_none_for_unknown() {
|
||||
assert_eq!(stage_alias_to_dir("unknown"), None);
|
||||
assert_eq!(stage_alias_to_dir(""), None);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
//! Story I/O — the ONLY place in `service::story/` that may perform side effects.
|
||||
//!
|
||||
//! Currently, the bulk of story file I/O is handled by `crate::http::workflow`
|
||||
//! (story file creation, criterion editing, stage moves) and
|
||||
//! `crate::io::story_metadata` (front-matter parsing, merge-failure writes).
|
||||
//! This file is the designated home for any future story-specific I/O helpers
|
||||
//! that are extracted from those modules.
|
||||
@@ -0,0 +1,58 @@
|
||||
//! Pure story-lifecycle helpers for `service::story`.
|
||||
//!
|
||||
//! These functions reason about story IDs and dependencies without performing
|
||||
//! any I/O. They inform routing decisions in `mod.rs` and the MCP adapter.
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Extract the numeric prefix from a story ID (e.g. `"42"` from `"42_story_foo"`).
|
||||
///
|
||||
/// Returns `None` if the ID has no leading digit sequence.
|
||||
pub fn story_number(story_id: &str) -> Option<&str> {
|
||||
let num = story_id.split('_').next()?;
|
||||
if num.is_empty() || !num.chars().all(|c| c.is_ascii_digit()) {
|
||||
return None;
|
||||
}
|
||||
Some(num)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Return `true` if `story_id` has a valid `{digits}_` prefix format.
|
||||
///
|
||||
/// Valid: `"42_story_foo"`, `"1_bug_bar"`.
|
||||
/// Invalid: `"story_without_number"`, `""`, `"abc_story"`.
|
||||
pub fn has_valid_id_prefix(story_id: &str) -> bool {
|
||||
story_number(story_id).is_some()
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn story_number_extracts_prefix() {
|
||||
assert_eq!(story_number("42_story_foo"), Some("42"));
|
||||
assert_eq!(story_number("1_bug_bar"), Some("1"));
|
||||
assert_eq!(story_number("100_refactor_baz"), Some("100"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn story_number_returns_none_for_no_numeric_prefix() {
|
||||
assert_eq!(story_number("story_without_number"), None);
|
||||
assert_eq!(story_number("abc_story"), None);
|
||||
assert_eq!(story_number(""), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn has_valid_id_prefix_returns_true_for_valid_ids() {
|
||||
assert!(has_valid_id_prefix("42_story_foo"));
|
||||
assert!(has_valid_id_prefix("1_bug_bar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn has_valid_id_prefix_returns_false_for_invalid_ids() {
|
||||
assert!(!has_valid_id_prefix("story_no_number"));
|
||||
assert!(!has_valid_id_prefix(""));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
//! Story service — domain logic for creating, updating, and managing pipeline work items.
|
||||
//!
|
||||
//! Extracted from `http/mcp/story_tools.rs` following the conventions in
|
||||
//! `docs/architecture/service-modules.md`:
|
||||
//! - `mod.rs` (this file) — public API, typed [`Error`], orchestration
|
||||
//! - `io.rs` — the ONLY place that performs side effects (story file I/O)
|
||||
//! - `criteria.rs` — pure criterion parsing and validation
|
||||
//! - `lifecycle.rs` — pure story-ID and lifecycle helpers
|
||||
//! - `front_matter.rs` — pure front-matter field validation
|
||||
//! - `validation.rs` — pure story content validation
|
||||
//!
|
||||
//! # State-Mutation Inventory (Axis 3)
|
||||
//!
|
||||
//! The story service orchestrates the following state writes across all
|
||||
//! lifecycle operations:
|
||||
//!
|
||||
//! ## CRDT Writes
|
||||
//! - `crdt_state::write_item` — on story/bug/spike/refactor creation (via `http::workflow`)
|
||||
//! - `crdt_state::evict_item` — on story deletion or purge (tombstone op)
|
||||
//! - `crdt_state::move_item` — on stage transitions (backlog → current → qa → merge → done → archived)
|
||||
//!
|
||||
//! ## Filesystem Shadow Writes
|
||||
//! - `.huskies/work/1_backlog/<id>.md` — written on story/bug/spike/refactor creation
|
||||
//! - `.huskies/work/<stage>/<id>.md` — moved between stages on lifecycle transitions
|
||||
//! - `.huskies/work/<stage>/<id>.md` front matter — updated by `update_story`, `check_criterion`,
|
||||
//! `edit_criterion`, `add_criterion`, `remove_criterion`, `accept_story`, `reject_qa`
|
||||
//! - `.huskies/bugs/archive/<id>.md` — written on `close_bug`
|
||||
//! - `.huskies/work/4_merge/<id>.md` merge_failure field — written by `report_merge_failure`
|
||||
//!
|
||||
//! ## Database Shadow Table
|
||||
//! - `pipeline_items` row — updated on stage transitions and item creation/deletion
|
||||
//! - `content_store` entry — updated on story content changes, deleted on purge/delete
|
||||
|
||||
pub mod criteria;
|
||||
pub mod front_matter;
|
||||
pub mod io;
|
||||
pub mod lifecycle;
|
||||
pub mod validation;
|
||||
|
||||
pub use criteria::parse_test_cases;
|
||||
#[allow(unused_imports)]
|
||||
pub use front_matter::{is_valid_stage, stage_alias_to_dir};
|
||||
#[allow(unused_imports)]
|
||||
pub use lifecycle::{has_valid_id_prefix, story_number};
|
||||
#[allow(unused_imports)]
|
||||
pub use validation::{is_valid_story_name, validate_criterion_index};
|
||||
|
||||
// ── Error type ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Typed errors returned by `service::story` functions.
|
||||
///
|
||||
/// HTTP handlers map these to status codes:
|
||||
/// - [`Error::NotFound`] → 404 Not Found
|
||||
/// - [`Error::Validation`] → 400 Bad Request
|
||||
/// - [`Error::Conflict`] → 409 Conflict
|
||||
/// - [`Error::Io`] → 500 Internal Server Error
|
||||
/// - [`Error::UpstreamFailure`] → 500 Internal Server Error
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The requested story or work item was not found.
|
||||
NotFound(String),
|
||||
/// A required argument is missing or has an invalid value.
|
||||
Validation(String),
|
||||
/// The operation cannot proceed due to a conflicting state (e.g. unmerged branch).
|
||||
Conflict(String),
|
||||
/// A filesystem read or write operation failed.
|
||||
Io(String),
|
||||
/// An upstream dependency (agents, CRDT, git) returned an unexpected error.
|
||||
UpstreamFailure(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::NotFound(msg) => write!(f, "Not found: {msg}"),
|
||||
Self::Validation(msg) => write!(f, "Validation error: {msg}"),
|
||||
Self::Conflict(msg) => write!(f, "Conflict: {msg}"),
|
||||
Self::Io(msg) => write!(f, "I/O error: {msg}"),
|
||||
Self::UpstreamFailure(msg) => write!(f, "Upstream failure: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn error_display_not_found() {
|
||||
let e = Error::NotFound("story 42 not found".to_string());
|
||||
assert!(e.to_string().contains("Not found"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_validation() {
|
||||
let e = Error::Validation("name is required".to_string());
|
||||
assert!(e.to_string().contains("Validation error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_conflict() {
|
||||
let e = Error::Conflict("unmerged changes".to_string());
|
||||
assert!(e.to_string().contains("Conflict"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_io() {
|
||||
let e = Error::Io("story file write failed".to_string());
|
||||
assert!(e.to_string().contains("I/O error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_upstream_failure() {
|
||||
let e = Error::UpstreamFailure("CRDT eviction failed".to_string());
|
||||
assert!(e.to_string().contains("Upstream failure"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
//! Pure story validation helpers for `service::story`.
|
||||
//!
|
||||
//! These functions validate story content and metadata without performing
|
||||
//! any I/O.
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Return `true` if `name` is a valid story name.
|
||||
///
|
||||
/// A valid name must contain at least one alphanumeric character (letters or
|
||||
/// digits). Pure punctuation, symbols, or blank names are rejected.
|
||||
pub fn is_valid_story_name(name: &str) -> bool {
|
||||
name.chars().any(|c| c.is_alphanumeric())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Validate that `criterion_index` is within bounds for `total_criteria`.
|
||||
///
|
||||
/// Returns `Ok(())` if valid, or `Err(message)` if out of range.
|
||||
pub fn validate_criterion_index(
|
||||
criterion_index: usize,
|
||||
total_criteria: usize,
|
||||
) -> Result<(), String> {
|
||||
if criterion_index >= total_criteria {
|
||||
Err(format!(
|
||||
"criterion_index {criterion_index} is out of range — story has {total_criteria} criteria (0-based)"
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_valid_story_name_accepts_alphanumeric() {
|
||||
assert!(is_valid_story_name("My Story"));
|
||||
assert!(is_valid_story_name("story123"));
|
||||
assert!(is_valid_story_name("Test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_valid_story_name_rejects_symbols_only() {
|
||||
assert!(!is_valid_story_name("!!!"));
|
||||
assert!(!is_valid_story_name("---"));
|
||||
assert!(!is_valid_story_name(""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_criterion_index_in_range() {
|
||||
assert!(validate_criterion_index(0, 3).is_ok());
|
||||
assert!(validate_criterion_index(2, 3).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_criterion_index_out_of_range() {
|
||||
let e = validate_criterion_index(3, 3).unwrap_err();
|
||||
assert!(e.contains("out of range"));
|
||||
assert!(e.contains("3"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_criterion_index_zero_criteria() {
|
||||
let e = validate_criterion_index(0, 0).unwrap_err();
|
||||
assert!(e.contains("out of range"));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user