story-kit: merge 318_refactor_split_mcp_rs_into_domain_specific_tool_modules
This commit is contained in:
File diff suppressed because it is too large
Load Diff
731
server/src/http/mcp/agent_tools.rs
Normal file
731
server/src/http/mcp/agent_tools.rs
Normal file
@@ -0,0 +1,731 @@
|
||||
use crate::agents::PipelineStage;
|
||||
use crate::config::ProjectConfig;
|
||||
use crate::http::context::AppContext;
|
||||
use crate::http::settings::get_editor_command_from_store;
|
||||
use crate::slog_warn;
|
||||
use crate::worktree;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub(super) async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let agent_name = args.get("agent_name").and_then(|v| v.as_str());
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
let info = ctx
|
||||
.agents
|
||||
.start_agent(&project_root, story_id, agent_name, None)
|
||||
.await?;
|
||||
|
||||
// Snapshot coverage baseline from the most recent coverage report (best-effort).
|
||||
if let Some(pct) = read_coverage_percent_from_json(&project_root)
|
||||
&& let Err(e) = crate::http::workflow::write_coverage_baseline_to_story_file(
|
||||
&project_root,
|
||||
story_id,
|
||||
pct,
|
||||
)
|
||||
{
|
||||
slog_warn!("[start_agent] Could not write coverage baseline to story file: {e}");
|
||||
}
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": info.story_id,
|
||||
"agent_name": info.agent_name,
|
||||
"status": info.status.to_string(),
|
||||
"session_id": info.session_id,
|
||||
"worktree_path": info.worktree_path,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
/// Try to read the overall line coverage percentage from the llvm-cov JSON report.
|
||||
///
|
||||
/// Expects the file at `{project_root}/.story_kit/coverage/server.json`.
|
||||
/// Returns `None` if the file is absent, unreadable, or cannot be parsed.
|
||||
pub(super) fn read_coverage_percent_from_json(project_root: &std::path::Path) -> Option<f64> {
|
||||
let path = project_root
|
||||
.join(".story_kit")
|
||||
.join("coverage")
|
||||
.join("server.json");
|
||||
let contents = std::fs::read_to_string(&path).ok()?;
|
||||
let json: Value = serde_json::from_str(&contents).ok()?;
|
||||
// cargo llvm-cov --json format: data[0].totals.lines.percent
|
||||
json.pointer("/data/0/totals/lines/percent")
|
||||
.and_then(|v| v.as_f64())
|
||||
}
|
||||
|
||||
pub(super) async fn tool_stop_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let agent_name = args
|
||||
.get("agent_name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: agent_name")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
ctx.agents
|
||||
.stop_agent(&project_root, story_id, agent_name)
|
||||
.await?;
|
||||
|
||||
Ok(format!("Agent '{agent_name}' for story '{story_id}' stopped."))
|
||||
}
|
||||
|
||||
pub(super) fn tool_list_agents(ctx: &AppContext) -> Result<String, String> {
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state).ok();
|
||||
let agents = ctx.agents.list_agents()?;
|
||||
serde_json::to_string_pretty(&json!(agents
|
||||
.iter()
|
||||
.filter(|a| {
|
||||
project_root
|
||||
.as_deref()
|
||||
.map(|root| !crate::http::agents::story_is_archived(root, &a.story_id))
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.map(|a| json!({
|
||||
"story_id": a.story_id,
|
||||
"agent_name": a.agent_name,
|
||||
"status": a.status.to_string(),
|
||||
"session_id": a.session_id,
|
||||
"worktree_path": a.worktree_path,
|
||||
}))
|
||||
.collect::<Vec<_>>()))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_get_agent_output_poll(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let agent_name = args
|
||||
.get("agent_name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: agent_name")?;
|
||||
|
||||
// Try draining in-memory events first.
|
||||
match ctx.agents.drain_events(story_id, agent_name) {
|
||||
Ok(drained) => {
|
||||
let done = drained.iter().any(|e| {
|
||||
matches!(
|
||||
e,
|
||||
crate::agents::AgentEvent::Done { .. }
|
||||
| crate::agents::AgentEvent::Error { .. }
|
||||
)
|
||||
});
|
||||
|
||||
let events: Vec<serde_json::Value> = drained
|
||||
.into_iter()
|
||||
.filter_map(|e| serde_json::to_value(&e).ok())
|
||||
.collect();
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"events": events,
|
||||
"done": done,
|
||||
"event_count": events.len(),
|
||||
"message": if done { "Agent stream ended." } else if events.is_empty() { "No new events. Call again to continue." } else { "Events returned. Call again to continue." }
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
Err(_) => {
|
||||
// Agent not in memory — fall back to persistent log file.
|
||||
get_agent_output_from_log(story_id, agent_name, ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fall back to reading agent output from the persistent log file on disk.
|
||||
///
|
||||
/// Tries to find the log file via the agent's stored log_session_id first,
|
||||
/// then falls back to `find_latest_log` scanning the log directory.
|
||||
pub(super) fn get_agent_output_from_log(
|
||||
story_id: &str,
|
||||
agent_name: &str,
|
||||
ctx: &AppContext,
|
||||
) -> Result<String, String> {
|
||||
use crate::agent_log;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Try to find the log file: first from in-memory agent info, then by scanning.
|
||||
let log_path = ctx
|
||||
.agents
|
||||
.get_log_info(story_id, agent_name)
|
||||
.map(|(session_id, root)| agent_log::log_file_path(&root, story_id, agent_name, &session_id))
|
||||
.filter(|p| p.exists())
|
||||
.or_else(|| agent_log::find_latest_log(&project_root, story_id, agent_name));
|
||||
|
||||
let log_path = match log_path {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
return serde_json::to_string_pretty(&json!({
|
||||
"events": [],
|
||||
"done": true,
|
||||
"event_count": 0,
|
||||
"message": format!("No agent '{agent_name}' for story '{story_id}' and no log file found."),
|
||||
"source": "none",
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"));
|
||||
}
|
||||
};
|
||||
|
||||
match agent_log::read_log(&log_path) {
|
||||
Ok(entries) => {
|
||||
let events: Vec<serde_json::Value> = entries
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
let mut val = e.event;
|
||||
if let serde_json::Value::Object(ref mut map) = val {
|
||||
map.insert(
|
||||
"timestamp".to_string(),
|
||||
serde_json::Value::String(e.timestamp),
|
||||
);
|
||||
}
|
||||
val
|
||||
})
|
||||
.collect();
|
||||
|
||||
let count = events.len();
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"events": events,
|
||||
"done": true,
|
||||
"event_count": count,
|
||||
"message": "Events loaded from persistent log file.",
|
||||
"source": "log_file",
|
||||
"log_file": log_path.to_string_lossy(),
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
Err(e) => Err(format!("Failed to read log file: {e}")),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn tool_get_agent_config(ctx: &AppContext) -> Result<String, String> {
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
let config = ProjectConfig::load(&project_root)?;
|
||||
|
||||
// Collect available (idle) agent names across all stages so the caller can
|
||||
// see at a glance which agents are free to start (story 190).
|
||||
let mut available_names: std::collections::HashSet<String> =
|
||||
std::collections::HashSet::new();
|
||||
for stage in &[
|
||||
PipelineStage::Coder,
|
||||
PipelineStage::Qa,
|
||||
PipelineStage::Mergemaster,
|
||||
PipelineStage::Other,
|
||||
] {
|
||||
if let Ok(names) = ctx.agents.available_agents_for_stage(&config, stage) {
|
||||
available_names.extend(names);
|
||||
}
|
||||
}
|
||||
|
||||
serde_json::to_string_pretty(&json!(config
|
||||
.agent
|
||||
.iter()
|
||||
.map(|a| json!({
|
||||
"name": a.name,
|
||||
"role": a.role,
|
||||
"model": a.model,
|
||||
"allowed_tools": a.allowed_tools,
|
||||
"max_turns": a.max_turns,
|
||||
"max_budget_usd": a.max_budget_usd,
|
||||
"available": available_names.contains(&a.name),
|
||||
}))
|
||||
.collect::<Vec<_>>()))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_wait_for_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let agent_name = args
|
||||
.get("agent_name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: agent_name")?;
|
||||
let timeout_ms = args
|
||||
.get("timeout_ms")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(300_000); // default: 5 minutes
|
||||
|
||||
let info = ctx
|
||||
.agents
|
||||
.wait_for_agent(story_id, agent_name, timeout_ms)
|
||||
.await?;
|
||||
|
||||
let commits = match (&info.worktree_path, &info.base_branch) {
|
||||
(Some(wt_path), Some(base)) => get_worktree_commits(wt_path, base).await,
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let completion = info.completion.as_ref().map(|r| json!({
|
||||
"summary": r.summary,
|
||||
"gates_passed": r.gates_passed,
|
||||
"gate_output": r.gate_output,
|
||||
}));
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": info.story_id,
|
||||
"agent_name": info.agent_name,
|
||||
"status": info.status.to_string(),
|
||||
"session_id": info.session_id,
|
||||
"worktree_path": info.worktree_path,
|
||||
"base_branch": info.base_branch,
|
||||
"commits": commits,
|
||||
"completion": completion,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_create_worktree(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
let info = ctx.agents.create_worktree(&project_root, story_id).await?;
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"worktree_path": info.path.to_string_lossy(),
|
||||
"branch": info.branch,
|
||||
"base_branch": info.base_branch,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_list_worktrees(ctx: &AppContext) -> Result<String, String> {
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
let entries = worktree::list_worktrees(&project_root)?;
|
||||
|
||||
serde_json::to_string_pretty(&json!(entries
|
||||
.iter()
|
||||
.map(|e| json!({
|
||||
"story_id": e.story_id,
|
||||
"path": e.path.to_string_lossy(),
|
||||
}))
|
||||
.collect::<Vec<_>>()))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_remove_worktree(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
let config = ProjectConfig::load(&project_root)?;
|
||||
worktree::remove_worktree_by_story_id(&project_root, story_id, &config).await?;
|
||||
|
||||
Ok(format!("Worktree for story '{story_id}' removed."))
|
||||
}
|
||||
|
||||
pub(super) fn tool_get_editor_command(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let worktree_path = args
|
||||
.get("worktree_path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: worktree_path")?;
|
||||
|
||||
let editor = get_editor_command_from_store(ctx)
|
||||
.ok_or_else(|| "No editor configured. Set one via PUT /api/settings/editor.".to_string())?;
|
||||
|
||||
Ok(format!("{editor} {worktree_path}"))
|
||||
}
|
||||
|
||||
/// Run `git log <base>..HEAD --oneline` in the worktree and return the commit
|
||||
/// summaries, or `None` if git is unavailable or there are no new commits.
|
||||
pub(super) async fn get_worktree_commits(worktree_path: &str, base_branch: &str) -> Option<Vec<String>> {
|
||||
let wt = worktree_path.to_string();
|
||||
let base = base_branch.to_string();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let output = std::process::Command::new("git")
|
||||
.args(["log", &format!("{base}..HEAD"), "--oneline"])
|
||||
.current_dir(&wt)
|
||||
.output()
|
||||
.ok()?;
|
||||
|
||||
if output.status.success() {
|
||||
let lines: Vec<String> = String::from_utf8(output.stdout)
|
||||
.ok()?
|
||||
.lines()
|
||||
.filter(|l| !l.is_empty())
|
||||
.map(|l| l.to_string())
|
||||
.collect();
|
||||
Some(lines)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::context::AppContext;
|
||||
use crate::store::StoreOps;
|
||||
|
||||
fn test_ctx(dir: &std::path::Path) -> AppContext {
|
||||
AppContext::new_test(dir.to_path_buf())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_list_agents_empty() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_list_agents(&ctx).unwrap();
|
||||
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||||
assert!(parsed.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_agent_config_no_project_toml_returns_default_agent() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// No project.toml → default config with one fallback agent
|
||||
let result = tool_get_agent_config(&ctx).unwrap();
|
||||
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||||
// Default config contains one agent entry with default values
|
||||
assert_eq!(parsed.len(), 1, "default config should have one fallback agent");
|
||||
assert!(parsed[0].get("name").is_some());
|
||||
assert!(parsed[0].get("role").is_some());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_get_agent_output_poll_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_get_agent_output_poll(&json!({"agent_name": "bot"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_get_agent_output_poll_missing_agent_name() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result =
|
||||
tool_get_agent_output_poll(&json!({"story_id": "1_test"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("agent_name"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_get_agent_output_poll_no_agent_falls_back_to_empty_log() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// No agent registered, no log file → returns empty response from log fallback
|
||||
let result = tool_get_agent_output_poll(
|
||||
&json!({"story_id": "99_nope", "agent_name": "bot"}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["done"], true);
|
||||
assert_eq!(parsed["event_count"], 0);
|
||||
assert!(
|
||||
parsed["message"].as_str().unwrap_or("").contains("No agent"),
|
||||
"expected 'No agent' message: {parsed}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_get_agent_output_poll_with_running_agent_returns_empty_events() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// Inject a running agent — no events broadcast yet
|
||||
ctx.agents
|
||||
.inject_test_agent("10_story", "worker", crate::agents::AgentStatus::Running);
|
||||
let result = tool_get_agent_output_poll(
|
||||
&json!({"story_id": "10_story", "agent_name": "worker"}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["done"], false);
|
||||
assert_eq!(parsed["event_count"], 0);
|
||||
assert!(parsed["events"].is_array());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_stop_agent_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_stop_agent(&json!({"agent_name": "bot"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_stop_agent_missing_agent_name() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_stop_agent(&json!({"story_id": "1_test"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("agent_name"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_start_agent_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_start_agent(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_start_agent_no_agent_name_no_coder_returns_clear_error() {
|
||||
// Config has only a supervisor — start_agent without agent_name should
|
||||
// refuse rather than silently assigning supervisor.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let sk = tmp.path().join(".story_kit");
|
||||
std::fs::create_dir_all(&sk).unwrap();
|
||||
std::fs::write(
|
||||
sk.join("project.toml"),
|
||||
r#"
|
||||
[[agent]]
|
||||
name = "supervisor"
|
||||
stage = "other"
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_start_agent(&json!({"story_id": "42_my_story"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
err.contains("coder"),
|
||||
"error should mention 'coder', got: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_start_agent_no_agent_name_picks_coder_not_supervisor() {
|
||||
// Config has supervisor first, then coder-1. Without agent_name the
|
||||
// coder should be selected, not supervisor. The call will fail due to
|
||||
// missing git repo / worktree, but the error must NOT be about
|
||||
// "No coder agent configured".
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let sk = tmp.path().join(".story_kit");
|
||||
std::fs::create_dir_all(&sk).unwrap();
|
||||
std::fs::write(
|
||||
sk.join("project.toml"),
|
||||
r#"
|
||||
[[agent]]
|
||||
name = "supervisor"
|
||||
stage = "other"
|
||||
|
||||
[[agent]]
|
||||
name = "coder-1"
|
||||
stage = "coder"
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_start_agent(&json!({"story_id": "42_my_story"}), &ctx).await;
|
||||
// May succeed or fail for infrastructure reasons (no git repo), but
|
||||
// must NOT fail with "No coder agent configured".
|
||||
if let Err(err) = result {
|
||||
assert!(
|
||||
!err.contains("No coder agent configured"),
|
||||
"should not fail on agent selection, got: {err}"
|
||||
);
|
||||
// Should also not complain about supervisor being absent.
|
||||
assert!(
|
||||
!err.contains("supervisor"),
|
||||
"should not select supervisor, got: {err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_create_worktree_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_create_worktree(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_remove_worktree_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_remove_worktree(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_list_worktrees_empty_dir() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_list_worktrees(&ctx).unwrap();
|
||||
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
||||
assert!(parsed.is_empty());
|
||||
}
|
||||
|
||||
// ── Editor command tool tests ─────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn tool_get_editor_command_missing_worktree_path() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_get_editor_command(&json!({}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("worktree_path"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_editor_command_no_editor_configured() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_get_editor_command(
|
||||
&json!({"worktree_path": "/some/path"}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("No editor configured"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_editor_command_formats_correctly() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
ctx.store.set("editor_command", json!("zed"));
|
||||
|
||||
let result = tool_get_editor_command(
|
||||
&json!({"worktree_path": "/home/user/worktrees/37_my_story"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(result, "zed /home/user/worktrees/37_my_story");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_editor_command_works_with_vscode() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
ctx.store.set("editor_command", json!("code"));
|
||||
|
||||
let result = tool_get_editor_command(
|
||||
&json!({"worktree_path": "/path/to/worktree"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(result, "code /path/to/worktree");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_editor_command_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "get_editor_command");
|
||||
assert!(tool.is_some(), "get_editor_command missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
assert!(t["description"].is_string());
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"worktree_path"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn wait_for_agent_tool_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_wait_for_agent(&json!({"agent_name": "bot"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn wait_for_agent_tool_missing_agent_name() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_wait_for_agent(&json!({"story_id": "1_test"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("agent_name"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn wait_for_agent_tool_nonexistent_agent_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result =
|
||||
tool_wait_for_agent(&json!({"story_id": "99_nope", "agent_name": "bot", "timeout_ms": 50}), &ctx)
|
||||
.await;
|
||||
// No agent registered — should error
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn wait_for_agent_tool_returns_completed_agent() {
|
||||
use crate::agents::AgentStatus;
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
ctx.agents
|
||||
.inject_test_agent("41_story", "worker", AgentStatus::Completed);
|
||||
|
||||
let result = tool_wait_for_agent(
|
||||
&json!({"story_id": "41_story", "agent_name": "worker"}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["status"], "completed");
|
||||
assert_eq!(parsed["story_id"], "41_story");
|
||||
assert_eq!(parsed["agent_name"], "worker");
|
||||
// commits key present (may be null since no real worktree)
|
||||
assert!(parsed.get("commits").is_some());
|
||||
// completion key present (null for agents that didn't call report_completion)
|
||||
assert!(parsed.get("completion").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn wait_for_agent_tool_in_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let wait_tool = tools.iter().find(|t| t["name"] == "wait_for_agent");
|
||||
assert!(wait_tool.is_some(), "wait_for_agent missing from tools list");
|
||||
let t = wait_tool.unwrap();
|
||||
assert!(t["description"].as_str().unwrap().contains("block") || t["description"].as_str().unwrap().contains("Block"));
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
assert!(req_names.contains(&"agent_name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_coverage_percent_from_json_parses_llvm_cov_format() {
|
||||
use std::fs;
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cov_dir = tmp.path().join(".story_kit/coverage");
|
||||
fs::create_dir_all(&cov_dir).unwrap();
|
||||
let json_content = r#"{"data":[{"totals":{"lines":{"count":100,"covered":78,"percent":78.0}}}]}"#;
|
||||
fs::write(cov_dir.join("server.json"), json_content).unwrap();
|
||||
|
||||
let pct = read_coverage_percent_from_json(tmp.path());
|
||||
assert_eq!(pct, Some(78.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_coverage_percent_from_json_returns_none_when_absent() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let pct = read_coverage_percent_from_json(tmp.path());
|
||||
assert!(pct.is_none());
|
||||
}
|
||||
}
|
||||
819
server/src/http/mcp/diagnostics.rs
Normal file
819
server/src/http/mcp/diagnostics.rs
Normal file
@@ -0,0 +1,819 @@
|
||||
use crate::agents::{AgentStatus, move_story_to_stage};
|
||||
use crate::http::context::AppContext;
|
||||
use crate::log_buffer;
|
||||
use crate::slog;
|
||||
use crate::slog_warn;
|
||||
use serde_json::{json, Value};
|
||||
use std::fs;
|
||||
|
||||
pub(super) fn tool_get_server_logs(args: &Value) -> Result<String, String> {
|
||||
let lines_count = args
|
||||
.get("lines")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n.min(1000) as usize)
|
||||
.unwrap_or(100);
|
||||
let filter = args.get("filter").and_then(|v| v.as_str());
|
||||
let severity = args
|
||||
.get("severity")
|
||||
.and_then(|v| v.as_str())
|
||||
.and_then(log_buffer::LogLevel::from_str_ci);
|
||||
|
||||
let recent = log_buffer::global().get_recent(lines_count, filter, severity.as_ref());
|
||||
let joined = recent.join("\n");
|
||||
// Clamp to lines_count actual lines in case any entry contains embedded newlines.
|
||||
let all_lines: Vec<&str> = joined.lines().collect();
|
||||
let start = all_lines.len().saturating_sub(lines_count);
|
||||
Ok(all_lines[start..].join("\n"))
|
||||
}
|
||||
|
||||
/// Rebuild the server binary and re-exec.
|
||||
///
|
||||
/// 1. Gracefully stops all running agents (kills PTY children).
|
||||
/// 2. Runs `cargo build [-p story-kit]` from the workspace root, matching
|
||||
/// the current build profile (debug or release).
|
||||
/// 3. If the build fails, returns the build error (server stays up).
|
||||
/// 4. If the build succeeds, re-execs the process with the new binary via
|
||||
/// `std::os::unix::process::CommandExt::exec()`.
|
||||
pub(super) async fn tool_rebuild_and_restart(ctx: &AppContext) -> Result<String, String> {
|
||||
slog!("[rebuild] Rebuild and restart requested via MCP tool");
|
||||
|
||||
// 1. Gracefully stop all running agents.
|
||||
let running_agents = ctx.agents.list_agents().unwrap_or_default();
|
||||
let running_count = running_agents
|
||||
.iter()
|
||||
.filter(|a| a.status == AgentStatus::Running)
|
||||
.count();
|
||||
if running_count > 0 {
|
||||
slog!("[rebuild] Stopping {running_count} running agent(s) before rebuild");
|
||||
}
|
||||
ctx.agents.kill_all_children();
|
||||
|
||||
// 2. Find the workspace root (parent of the server binary's source).
|
||||
// CARGO_MANIFEST_DIR at compile time points to the `server/` crate;
|
||||
// the workspace root is its parent.
|
||||
let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
let workspace_root = manifest_dir
|
||||
.parent()
|
||||
.ok_or_else(|| "Cannot determine workspace root from CARGO_MANIFEST_DIR".to_string())?;
|
||||
|
||||
slog!(
|
||||
"[rebuild] Building server from workspace root: {}",
|
||||
workspace_root.display()
|
||||
);
|
||||
|
||||
// 3. Build the server binary, matching the current build profile so the
|
||||
// re-exec via current_exe() picks up the new binary.
|
||||
let build_args: Vec<&str> = if cfg!(debug_assertions) {
|
||||
vec!["build", "-p", "story-kit"]
|
||||
} else {
|
||||
vec!["build", "--release", "-p", "story-kit"]
|
||||
};
|
||||
slog!("[rebuild] cargo {}", build_args.join(" "));
|
||||
let output = tokio::task::spawn_blocking({
|
||||
let workspace_root = workspace_root.to_path_buf();
|
||||
move || {
|
||||
std::process::Command::new("cargo")
|
||||
.args(&build_args)
|
||||
.current_dir(&workspace_root)
|
||||
.output()
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Build task panicked: {e}"))?
|
||||
.map_err(|e| format!("Failed to run cargo build: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
slog!("[rebuild] Build failed:\n{stderr}");
|
||||
return Err(format!("Build failed:\n{stderr}"));
|
||||
}
|
||||
|
||||
slog!("[rebuild] Build succeeded, re-execing with new binary");
|
||||
|
||||
// 4. Re-exec with the new binary.
|
||||
// Collect current argv so we preserve any CLI arguments (e.g. project path).
|
||||
let current_exe = std::env::current_exe()
|
||||
.map_err(|e| format!("Cannot determine current executable: {e}"))?;
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
// Remove the port file before re-exec so the new process can write its own.
|
||||
if let Ok(root) = ctx.state.get_project_root() {
|
||||
let port_file = root.join(".story_kit_port");
|
||||
if port_file.exists() {
|
||||
let _ = std::fs::remove_file(&port_file);
|
||||
}
|
||||
}
|
||||
// Also check cwd for port file.
|
||||
let cwd_port_file = std::path::Path::new(".story_kit_port");
|
||||
if cwd_port_file.exists() {
|
||||
let _ = std::fs::remove_file(cwd_port_file);
|
||||
}
|
||||
|
||||
// Use exec() to replace the current process.
|
||||
// This never returns on success.
|
||||
use std::os::unix::process::CommandExt;
|
||||
let err = std::process::Command::new(¤t_exe)
|
||||
.args(&args[1..])
|
||||
.exec();
|
||||
|
||||
// If we get here, exec() failed.
|
||||
Err(format!("Failed to exec new binary: {err}"))
|
||||
}
|
||||
|
||||
/// Generate a Claude Code permission rule string for the given tool name and input.
|
||||
///
|
||||
/// - `Edit` / `Write` / `Read` / `Grep` / `Glob` etc. → just the tool name
|
||||
/// - `Bash` → `Bash(first_word *)` derived from the `command` field in `tool_input`
|
||||
/// - `mcp__*` → the full tool name (e.g. `mcp__story-kit__create_story`)
|
||||
fn generate_permission_rule(tool_name: &str, tool_input: &Value) -> String {
|
||||
if tool_name == "Bash" {
|
||||
// Extract command from tool_input.command and use first word as prefix
|
||||
let command_str = tool_input
|
||||
.get("command")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
let first_word = command_str.split_whitespace().next().unwrap_or("unknown");
|
||||
format!("Bash({first_word} *)")
|
||||
} else {
|
||||
// For Edit, Write, Read, Glob, Grep, MCP tools, etc. — use the tool name directly
|
||||
tool_name.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a permission rule to `.claude/settings.json` in the project root.
|
||||
/// Does nothing if the rule already exists. Creates the file if missing.
|
||||
pub(super) fn add_permission_rule(project_root: &std::path::Path, rule: &str) -> Result<(), String> {
|
||||
let claude_dir = project_root.join(".claude");
|
||||
fs::create_dir_all(&claude_dir)
|
||||
.map_err(|e| format!("Failed to create .claude/ directory: {e}"))?;
|
||||
|
||||
let settings_path = claude_dir.join("settings.json");
|
||||
let mut settings: Value = if settings_path.exists() {
|
||||
let content = fs::read_to_string(&settings_path)
|
||||
.map_err(|e| format!("Failed to read settings.json: {e}"))?;
|
||||
serde_json::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse settings.json: {e}"))?
|
||||
} else {
|
||||
json!({ "permissions": { "allow": [] } })
|
||||
};
|
||||
|
||||
let allow_arr = settings
|
||||
.pointer_mut("/permissions/allow")
|
||||
.and_then(|v| v.as_array_mut());
|
||||
|
||||
let allow = match allow_arr {
|
||||
Some(arr) => arr,
|
||||
None => {
|
||||
// Ensure the structure exists
|
||||
settings
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.entry("permissions")
|
||||
.or_insert(json!({ "allow": [] }));
|
||||
settings
|
||||
.pointer_mut("/permissions/allow")
|
||||
.unwrap()
|
||||
.as_array_mut()
|
||||
.unwrap()
|
||||
}
|
||||
};
|
||||
|
||||
// Check for duplicates — exact string match
|
||||
let rule_value = Value::String(rule.to_string());
|
||||
if allow.contains(&rule_value) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Also check for wildcard coverage: if "mcp__story-kit__*" exists, don't add
|
||||
// a more specific "mcp__story-kit__create_story".
|
||||
let dominated = allow.iter().any(|existing| {
|
||||
if let Some(pat) = existing.as_str()
|
||||
&& let Some(prefix) = pat.strip_suffix('*')
|
||||
{
|
||||
return rule.starts_with(prefix);
|
||||
}
|
||||
false
|
||||
});
|
||||
if dominated {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
allow.push(rule_value);
|
||||
|
||||
let pretty =
|
||||
serde_json::to_string_pretty(&settings).map_err(|e| format!("Failed to serialize: {e}"))?;
|
||||
fs::write(&settings_path, pretty)
|
||||
.map_err(|e| format!("Failed to write settings.json: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// MCP tool called by Claude Code via `--permission-prompt-tool`.
|
||||
///
|
||||
/// Forwards the permission request through the shared channel to the active
|
||||
/// WebSocket session, which presents a dialog to the user. Blocks until the
|
||||
/// user approves or denies (with a 5-minute timeout).
|
||||
pub(super) async fn tool_prompt_permission(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let tool_name = args
|
||||
.get("tool_name")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
let tool_input = args
|
||||
.get("input")
|
||||
.cloned()
|
||||
.unwrap_or(json!({}));
|
||||
|
||||
let request_id = uuid::Uuid::new_v4().to_string();
|
||||
let (response_tx, response_rx) = tokio::sync::oneshot::channel();
|
||||
|
||||
ctx.perm_tx
|
||||
.send(crate::http::context::PermissionForward {
|
||||
request_id: request_id.clone(),
|
||||
tool_name: tool_name.clone(),
|
||||
tool_input: tool_input.clone(),
|
||||
response_tx,
|
||||
})
|
||||
.map_err(|_| "No active WebSocket session to receive permission request".to_string())?;
|
||||
|
||||
use crate::http::context::PermissionDecision;
|
||||
|
||||
let decision = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(300),
|
||||
response_rx,
|
||||
)
|
||||
.await
|
||||
.map_err(|_| {
|
||||
let msg = format!("Permission request for '{tool_name}' timed out after 5 minutes");
|
||||
slog_warn!("[permission] {msg}");
|
||||
msg
|
||||
})?
|
||||
.map_err(|_| "Permission response channel closed unexpectedly".to_string())?;
|
||||
|
||||
if decision == PermissionDecision::AlwaysAllow {
|
||||
// Persist the rule so Claude Code won't prompt again for this tool.
|
||||
if let Some(root) = ctx.state.project_root.lock().unwrap().clone() {
|
||||
let rule = generate_permission_rule(&tool_name, &tool_input);
|
||||
if let Err(e) = add_permission_rule(&root, &rule) {
|
||||
slog_warn!("[permission] Failed to write always-allow rule: {e}");
|
||||
} else {
|
||||
slog!("[permission] Added always-allow rule: {rule}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if decision == PermissionDecision::Approve || decision == PermissionDecision::AlwaysAllow {
|
||||
// Claude Code SDK expects:
|
||||
// Allow: { behavior: "allow", updatedInput: <record> }
|
||||
// Deny: { behavior: "deny", message: string }
|
||||
Ok(json!({"behavior": "allow", "updatedInput": tool_input}).to_string())
|
||||
} else {
|
||||
slog_warn!("[permission] User denied permission for '{tool_name}'");
|
||||
Ok(json!({
|
||||
"behavior": "deny",
|
||||
"message": format!("User denied permission for '{tool_name}'")
|
||||
})
|
||||
.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn tool_get_token_usage(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let filter_story = args.get("story_id").and_then(|v| v.as_str());
|
||||
|
||||
let all_records = crate::agents::token_usage::read_all(&root)?;
|
||||
let records: Vec<_> = all_records
|
||||
.into_iter()
|
||||
.filter(|r| filter_story.is_none_or(|s| r.story_id == s))
|
||||
.collect();
|
||||
|
||||
let total_cost: f64 = records.iter().map(|r| r.usage.total_cost_usd).sum();
|
||||
let total_input: u64 = records.iter().map(|r| r.usage.input_tokens).sum();
|
||||
let total_output: u64 = records.iter().map(|r| r.usage.output_tokens).sum();
|
||||
let total_cache_create: u64 = records
|
||||
.iter()
|
||||
.map(|r| r.usage.cache_creation_input_tokens)
|
||||
.sum();
|
||||
let total_cache_read: u64 = records
|
||||
.iter()
|
||||
.map(|r| r.usage.cache_read_input_tokens)
|
||||
.sum();
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"records": records.iter().map(|r| json!({
|
||||
"story_id": r.story_id,
|
||||
"agent_name": r.agent_name,
|
||||
"timestamp": r.timestamp,
|
||||
"input_tokens": r.usage.input_tokens,
|
||||
"output_tokens": r.usage.output_tokens,
|
||||
"cache_creation_input_tokens": r.usage.cache_creation_input_tokens,
|
||||
"cache_read_input_tokens": r.usage.cache_read_input_tokens,
|
||||
"total_cost_usd": r.usage.total_cost_usd,
|
||||
})).collect::<Vec<_>>(),
|
||||
"totals": {
|
||||
"records": records.len(),
|
||||
"input_tokens": total_input,
|
||||
"output_tokens": total_output,
|
||||
"cache_creation_input_tokens": total_cache_create,
|
||||
"cache_read_input_tokens": total_cache_read,
|
||||
"total_cost_usd": total_cost,
|
||||
}
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_move_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let target_stage = args
|
||||
.get("target_stage")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: target_stage")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
let (from_stage, to_stage) = move_story_to_stage(&project_root, story_id, target_stage)?;
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"from_stage": from_stage,
|
||||
"to_stage": to_stage,
|
||||
"message": format!("Work item '{story_id}' moved from '{from_stage}' to '{to_stage}'.")
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::context::AppContext;
|
||||
|
||||
fn test_ctx(dir: &std::path::Path) -> AppContext {
|
||||
AppContext::new_test(dir.to_path_buf())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_server_logs_no_args_returns_string() {
|
||||
let result = tool_get_server_logs(&json!({})).unwrap();
|
||||
// Returns recent log lines (possibly empty in tests) — just verify no panic
|
||||
let _ = result;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_server_logs_with_filter_returns_matching_lines() {
|
||||
let result =
|
||||
tool_get_server_logs(&json!({"filter": "xyz_unlikely_match_999"})).unwrap();
|
||||
assert_eq!(result, "", "filter with no matches should return empty string");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_server_logs_with_line_limit() {
|
||||
let result = tool_get_server_logs(&json!({"lines": 5})).unwrap();
|
||||
assert!(result.lines().count() <= 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_server_logs_max_cap_is_1000() {
|
||||
// Lines > 1000 are capped — just verify it returns without error
|
||||
let result = tool_get_server_logs(&json!({"lines": 9999})).unwrap();
|
||||
let _ = result;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_token_usage_empty_returns_zero_totals() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_get_token_usage(&json!({}), &ctx).unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["records"].as_array().unwrap().len(), 0);
|
||||
assert_eq!(parsed["totals"]["records"], 0);
|
||||
assert_eq!(parsed["totals"]["total_cost_usd"], 0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_token_usage_returns_written_records() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let ctx = test_ctx(root);
|
||||
|
||||
let usage = crate::agents::TokenUsage {
|
||||
input_tokens: 100,
|
||||
output_tokens: 200,
|
||||
cache_creation_input_tokens: 5000,
|
||||
cache_read_input_tokens: 10000,
|
||||
total_cost_usd: 1.57,
|
||||
};
|
||||
let record =
|
||||
crate::agents::token_usage::build_record("42_story_foo", "coder-1", None, usage);
|
||||
crate::agents::token_usage::append_record(root, &record).unwrap();
|
||||
|
||||
let result = tool_get_token_usage(&json!({}), &ctx).unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["records"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(parsed["records"][0]["story_id"], "42_story_foo");
|
||||
assert_eq!(parsed["records"][0]["agent_name"], "coder-1");
|
||||
assert_eq!(parsed["records"][0]["input_tokens"], 100);
|
||||
assert_eq!(parsed["totals"]["records"], 1);
|
||||
assert!((parsed["totals"]["total_cost_usd"].as_f64().unwrap() - 1.57).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_token_usage_filters_by_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let ctx = test_ctx(root);
|
||||
|
||||
let usage = crate::agents::TokenUsage {
|
||||
input_tokens: 50,
|
||||
output_tokens: 60,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: 0,
|
||||
total_cost_usd: 0.5,
|
||||
};
|
||||
let r1 = crate::agents::token_usage::build_record("10_story_a", "coder-1", None, usage.clone());
|
||||
let r2 = crate::agents::token_usage::build_record("20_story_b", "coder-2", None, usage);
|
||||
crate::agents::token_usage::append_record(root, &r1).unwrap();
|
||||
crate::agents::token_usage::append_record(root, &r2).unwrap();
|
||||
|
||||
let result =
|
||||
tool_get_token_usage(&json!({"story_id": "10_story_a"}), &ctx).unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["records"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(parsed["records"][0]["story_id"], "10_story_a");
|
||||
assert_eq!(parsed["totals"]["records"], 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_prompt_permission_approved_returns_updated_input() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
// Spawn a task that immediately sends approval through the channel.
|
||||
let perm_rx = ctx.perm_rx.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut rx = perm_rx.lock().await;
|
||||
if let Some(forward) = rx.recv().await {
|
||||
let _ = forward.response_tx.send(crate::http::context::PermissionDecision::Approve);
|
||||
}
|
||||
});
|
||||
|
||||
let result = tool_prompt_permission(
|
||||
&json!({"tool_name": "Bash", "input": {"command": "echo hello"}}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.expect("should succeed on approval");
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).expect("result should be valid JSON");
|
||||
assert_eq!(
|
||||
parsed["behavior"], "allow",
|
||||
"approved must return behavior:allow"
|
||||
);
|
||||
assert_eq!(
|
||||
parsed["updatedInput"]["command"], "echo hello",
|
||||
"approved must return updatedInput with original tool input for Claude Code SDK compatibility"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_prompt_permission_denied_returns_deny_json() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
// Spawn a task that immediately sends denial through the channel.
|
||||
let perm_rx = ctx.perm_rx.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut rx = perm_rx.lock().await;
|
||||
if let Some(forward) = rx.recv().await {
|
||||
let _ = forward.response_tx.send(crate::http::context::PermissionDecision::Deny);
|
||||
}
|
||||
});
|
||||
|
||||
let result = tool_prompt_permission(
|
||||
&json!({"tool_name": "Write", "input": {}}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.expect("denial must return Ok, not Err");
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).expect("result should be valid JSON");
|
||||
assert_eq!(parsed["behavior"], "deny", "denied must return behavior:deny");
|
||||
assert!(parsed["message"].is_string(), "deny must include a message");
|
||||
}
|
||||
|
||||
// ── Permission rule generation tests ─────────────────────────
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_edit_tool() {
|
||||
let rule = generate_permission_rule("Edit", &json!({}));
|
||||
assert_eq!(rule, "Edit");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_write_tool() {
|
||||
let rule = generate_permission_rule("Write", &json!({}));
|
||||
assert_eq!(rule, "Write");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_bash_git() {
|
||||
let rule =
|
||||
generate_permission_rule("Bash", &json!({"command": "git status"}));
|
||||
assert_eq!(rule, "Bash(git *)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_bash_cargo() {
|
||||
let rule =
|
||||
generate_permission_rule("Bash", &json!({"command": "cargo test --all"}));
|
||||
assert_eq!(rule, "Bash(cargo *)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_bash_empty_command() {
|
||||
let rule = generate_permission_rule("Bash", &json!({}));
|
||||
assert_eq!(rule, "Bash(unknown *)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_rule_for_mcp_tool() {
|
||||
let rule = generate_permission_rule(
|
||||
"mcp__story-kit__create_story",
|
||||
&json!({"name": "foo"}),
|
||||
);
|
||||
assert_eq!(rule, "mcp__story-kit__create_story");
|
||||
}
|
||||
|
||||
// ── Settings.json writing tests ──────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn add_rule_creates_settings_file_when_missing() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
add_permission_rule(tmp.path(), "Edit").unwrap();
|
||||
|
||||
let content = fs::read_to_string(tmp.path().join(".claude/settings.json")).unwrap();
|
||||
let settings: Value = serde_json::from_str(&content).unwrap();
|
||||
let allow = settings["permissions"]["allow"].as_array().unwrap();
|
||||
assert!(allow.contains(&json!("Edit")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_rule_does_not_duplicate_existing() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
add_permission_rule(tmp.path(), "Edit").unwrap();
|
||||
add_permission_rule(tmp.path(), "Edit").unwrap();
|
||||
|
||||
let content = fs::read_to_string(tmp.path().join(".claude/settings.json")).unwrap();
|
||||
let settings: Value = serde_json::from_str(&content).unwrap();
|
||||
let allow = settings["permissions"]["allow"].as_array().unwrap();
|
||||
let count = allow.iter().filter(|v| v == &&json!("Edit")).count();
|
||||
assert_eq!(count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_rule_skips_when_wildcard_already_covers() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let claude_dir = tmp.path().join(".claude");
|
||||
fs::create_dir_all(&claude_dir).unwrap();
|
||||
fs::write(
|
||||
claude_dir.join("settings.json"),
|
||||
r#"{"permissions":{"allow":["mcp__story-kit__*"]}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
add_permission_rule(tmp.path(), "mcp__story-kit__create_story").unwrap();
|
||||
|
||||
let content = fs::read_to_string(claude_dir.join("settings.json")).unwrap();
|
||||
let settings: Value = serde_json::from_str(&content).unwrap();
|
||||
let allow = settings["permissions"]["allow"].as_array().unwrap();
|
||||
assert_eq!(allow.len(), 1);
|
||||
assert_eq!(allow[0], "mcp__story-kit__*");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_rule_appends_to_existing_rules() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let claude_dir = tmp.path().join(".claude");
|
||||
fs::create_dir_all(&claude_dir).unwrap();
|
||||
fs::write(
|
||||
claude_dir.join("settings.json"),
|
||||
r#"{"permissions":{"allow":["Edit"]}}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
add_permission_rule(tmp.path(), "Write").unwrap();
|
||||
|
||||
let content = fs::read_to_string(claude_dir.join("settings.json")).unwrap();
|
||||
let settings: Value = serde_json::from_str(&content).unwrap();
|
||||
let allow = settings["permissions"]["allow"].as_array().unwrap();
|
||||
assert_eq!(allow.len(), 2);
|
||||
assert!(allow.contains(&json!("Edit")));
|
||||
assert!(allow.contains(&json!("Write")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_rule_preserves_other_settings_fields() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let claude_dir = tmp.path().join(".claude");
|
||||
fs::create_dir_all(&claude_dir).unwrap();
|
||||
fs::write(
|
||||
claude_dir.join("settings.json"),
|
||||
r#"{"permissions":{"allow":["Edit"]},"enabledMcpjsonServers":["story-kit"]}"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
add_permission_rule(tmp.path(), "Write").unwrap();
|
||||
|
||||
let content = fs::read_to_string(claude_dir.join("settings.json")).unwrap();
|
||||
let settings: Value = serde_json::from_str(&content).unwrap();
|
||||
let servers = settings["enabledMcpjsonServers"].as_array().unwrap();
|
||||
assert_eq!(servers.len(), 1);
|
||||
assert_eq!(servers[0], "story-kit");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rebuild_and_restart_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "rebuild_and_restart");
|
||||
assert!(
|
||||
tool.is_some(),
|
||||
"rebuild_and_restart missing from tools list"
|
||||
);
|
||||
let t = tool.unwrap();
|
||||
assert!(t["description"].as_str().unwrap().contains("Rebuild"));
|
||||
assert!(t["inputSchema"].is_object());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn rebuild_and_restart_kills_agents_before_build() {
|
||||
// Verify that calling rebuild_and_restart on an empty pool doesn't
|
||||
// panic and proceeds to the build step. We can't test exec() in a
|
||||
// unit test, but we can verify the build attempt happens.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
// The build will succeed (we're running in the real workspace) and
|
||||
// then exec() will be called — which would replace our test process.
|
||||
// So we only test that the function *runs* without panicking up to
|
||||
// the agent-kill step. We do this by checking the pool is empty.
|
||||
assert_eq!(ctx.agents.list_agents().unwrap().len(), 0);
|
||||
ctx.agents.kill_all_children(); // should not panic on empty pool
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rebuild_uses_matching_build_profile() {
|
||||
// The build must use the same profile (debug/release) as the running
|
||||
// binary, otherwise cargo build outputs to a different target dir and
|
||||
// current_exe() still points at the old binary.
|
||||
let build_args: Vec<&str> = if cfg!(debug_assertions) {
|
||||
vec!["build", "-p", "story-kit"]
|
||||
} else {
|
||||
vec!["build", "--release", "-p", "story-kit"]
|
||||
};
|
||||
|
||||
// Tests always run in debug mode, so --release must NOT be present.
|
||||
assert!(
|
||||
!build_args.contains(&"--release"),
|
||||
"In debug builds, rebuild must not pass --release (would put \
|
||||
the binary in target/release/ while current_exe() points to \
|
||||
target/debug/)"
|
||||
);
|
||||
}
|
||||
|
||||
// ── move_story tool tests ─────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn move_story_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "move_story");
|
||||
assert!(tool.is_some(), "move_story missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
assert!(t["description"].is_string());
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
assert!(req_names.contains(&"target_stage"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_move_story(&json!({"target_stage": "current"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_missing_target_stage() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_move_story(&json!({"story_id": "1_story_test"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("target_stage"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_invalid_target_stage() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
// Seed project root in state so get_project_root works
|
||||
let backlog = root.join(".story_kit/work/1_backlog");
|
||||
fs::create_dir_all(&backlog).unwrap();
|
||||
fs::write(backlog.join("1_story_test.md"), "---\nname: Test\n---\n").unwrap();
|
||||
let ctx = test_ctx(root);
|
||||
let result = tool_move_story(
|
||||
&json!({"story_id": "1_story_test", "target_stage": "invalid"}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Invalid target_stage"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_moves_from_backlog_to_current() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let backlog = root.join(".story_kit/work/1_backlog");
|
||||
let current = root.join(".story_kit/work/2_current");
|
||||
fs::create_dir_all(&backlog).unwrap();
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::write(backlog.join("5_story_test.md"), "---\nname: Test\n---\n").unwrap();
|
||||
|
||||
let ctx = test_ctx(root);
|
||||
let result = tool_move_story(
|
||||
&json!({"story_id": "5_story_test", "target_stage": "current"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(!backlog.join("5_story_test.md").exists());
|
||||
assert!(current.join("5_story_test.md").exists());
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["story_id"], "5_story_test");
|
||||
assert_eq!(parsed["from_stage"], "backlog");
|
||||
assert_eq!(parsed["to_stage"], "current");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_moves_from_current_to_backlog() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let current = root.join(".story_kit/work/2_current");
|
||||
let backlog = root.join(".story_kit/work/1_backlog");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::create_dir_all(&backlog).unwrap();
|
||||
fs::write(current.join("6_story_back.md"), "---\nname: Back\n---\n").unwrap();
|
||||
|
||||
let ctx = test_ctx(root);
|
||||
let result = tool_move_story(
|
||||
&json!({"story_id": "6_story_back", "target_stage": "backlog"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(!current.join("6_story_back.md").exists());
|
||||
assert!(backlog.join("6_story_back.md").exists());
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["from_stage"], "current");
|
||||
assert_eq!(parsed["to_stage"], "backlog");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_idempotent_when_already_in_target() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let current = root.join(".story_kit/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::write(current.join("7_story_idem.md"), "---\nname: Idem\n---\n").unwrap();
|
||||
|
||||
let ctx = test_ctx(root);
|
||||
let result = tool_move_story(
|
||||
&json!({"story_id": "7_story_idem", "target_stage": "current"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(current.join("7_story_idem.md").exists());
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["from_stage"], "current");
|
||||
assert_eq!(parsed["to_stage"], "current");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_move_story_error_when_not_found() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_move_story(
|
||||
&json!({"story_id": "99_story_ghost", "target_stage": "current"}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("not found in any pipeline stage"));
|
||||
}
|
||||
}
|
||||
380
server/src/http/mcp/merge_tools.rs
Normal file
380
server/src/http/mcp/merge_tools.rs
Normal file
@@ -0,0 +1,380 @@
|
||||
use crate::agents::move_story_to_merge;
|
||||
use crate::http::context::AppContext;
|
||||
use crate::io::story_metadata::write_merge_failure;
|
||||
use crate::slog;
|
||||
use crate::slog_warn;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub(super) fn tool_merge_agent_work(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
ctx.agents.start_merge_agent_work(&project_root, story_id)?;
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"status": "started",
|
||||
"message": "Merge pipeline started. Poll get_merge_status(story_id) every 10-15 seconds until status is 'completed' or 'failed'."
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_get_merge_status(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let job = ctx.agents.get_merge_status(story_id)
|
||||
.ok_or_else(|| format!("No merge job found for story '{story_id}'. Call merge_agent_work first."))?;
|
||||
|
||||
match &job.status {
|
||||
crate::agents::merge::MergeJobStatus::Running => {
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"status": "running",
|
||||
"message": "Merge pipeline is still running. Poll again in 10-15 seconds."
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
crate::agents::merge::MergeJobStatus::Completed(report) => {
|
||||
let status_msg = if report.success && report.gates_passed && report.conflicts_resolved {
|
||||
"Merge complete: conflicts were auto-resolved and all quality gates passed. Story moved to done and worktree cleaned up."
|
||||
} else if report.success && report.gates_passed {
|
||||
"Merge complete: all quality gates passed. Story moved to done and worktree cleaned up."
|
||||
} else if report.had_conflicts && !report.conflicts_resolved {
|
||||
"Merge failed: conflicts detected that could not be auto-resolved. Merge was aborted — master is untouched. Call report_merge_failure with the conflict details so the human can resolve them. Do NOT manually move the story file or call accept_story."
|
||||
} else if report.success && !report.gates_passed {
|
||||
"Merge committed but quality gates failed. Review gate_output and fix issues before re-running."
|
||||
} else {
|
||||
"Merge failed. Review gate_output for details. Call report_merge_failure to record the failure. Do NOT manually move the story file or call accept_story."
|
||||
};
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"status": "completed",
|
||||
"success": report.success,
|
||||
"had_conflicts": report.had_conflicts,
|
||||
"conflicts_resolved": report.conflicts_resolved,
|
||||
"conflict_details": report.conflict_details,
|
||||
"gates_passed": report.gates_passed,
|
||||
"gate_output": report.gate_output,
|
||||
"worktree_cleaned_up": report.worktree_cleaned_up,
|
||||
"story_archived": report.story_archived,
|
||||
"message": status_msg,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
crate::agents::merge::MergeJobStatus::Failed(err) => {
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"status": "failed",
|
||||
"error": err,
|
||||
"message": format!("Merge pipeline failed: {err}. Call report_merge_failure to record the failure.")
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn tool_move_story_to_merge(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let agent_name = args
|
||||
.get("agent_name")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("mergemaster");
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Move story from work/2_current/ to work/4_merge/
|
||||
move_story_to_merge(&project_root, story_id)?;
|
||||
|
||||
// Start the mergemaster agent on the story worktree
|
||||
let info = ctx
|
||||
.agents
|
||||
.start_agent(&project_root, story_id, Some(agent_name), None)
|
||||
.await?;
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": info.story_id,
|
||||
"agent_name": info.agent_name,
|
||||
"status": info.status.to_string(),
|
||||
"worktree_path": info.worktree_path,
|
||||
"message": format!(
|
||||
"Story '{story_id}' moved to work/4_merge/ and mergemaster agent '{}' started.",
|
||||
info.agent_name
|
||||
),
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_report_merge_failure(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let reason = args
|
||||
.get("reason")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: reason")?;
|
||||
|
||||
slog!("[mergemaster] Merge failure reported for '{story_id}': {reason}");
|
||||
ctx.agents.set_merge_failure_reported(story_id);
|
||||
|
||||
// Broadcast the failure so the Matrix notification listener can post an
|
||||
// error message to configured rooms without coupling this tool to the bot.
|
||||
let _ = ctx.watcher_tx.send(crate::io::watcher::WatcherEvent::MergeFailure {
|
||||
story_id: story_id.to_string(),
|
||||
reason: reason.to_string(),
|
||||
});
|
||||
|
||||
// Persist the failure reason to the story file's front matter so it
|
||||
// survives server restarts and is visible in the web UI.
|
||||
if let Ok(project_root) = ctx.state.get_project_root() {
|
||||
let story_file = project_root
|
||||
.join(".story_kit")
|
||||
.join("work")
|
||||
.join("4_merge")
|
||||
.join(format!("{story_id}.md"));
|
||||
if story_file.exists() {
|
||||
if let Err(e) = write_merge_failure(&story_file, reason) {
|
||||
slog_warn!(
|
||||
"[mergemaster] Failed to persist merge_failure to story file for '{story_id}': {e}"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
slog_warn!(
|
||||
"[mergemaster] Story file not found in 4_merge/ for '{story_id}'; \
|
||||
merge_failure not persisted to front matter"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(format!(
|
||||
"Merge failure for '{story_id}' recorded. Story remains in work/4_merge/. Reason: {reason}"
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::context::AppContext;
|
||||
|
||||
fn test_ctx(dir: &std::path::Path) -> AppContext {
|
||||
AppContext::new_test(dir.to_path_buf())
|
||||
}
|
||||
|
||||
fn setup_git_repo_in(dir: &std::path::Path) {
|
||||
std::process::Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["config", "user.email", "test@test.com"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["config", "user.name", "Test"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["commit", "--allow-empty", "-m", "init"])
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge_agent_work_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "merge_agent_work");
|
||||
assert!(tool.is_some(), "merge_agent_work missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
assert!(t["description"].is_string());
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
// agent_name is optional
|
||||
assert!(!req_names.contains(&"agent_name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn move_story_to_merge_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "move_story_to_merge");
|
||||
assert!(tool.is_some(), "move_story_to_merge missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
assert!(t["description"].is_string());
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
// agent_name is optional
|
||||
assert!(!req_names.contains(&"agent_name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_merge_agent_work_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_merge_agent_work(&json!({}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_move_story_to_merge_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_move_story_to_merge(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_move_story_to_merge_moves_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_git_repo_in(tmp.path());
|
||||
let current_dir = tmp.path().join(".story_kit/work/2_current");
|
||||
std::fs::create_dir_all(¤t_dir).unwrap();
|
||||
let story_file = current_dir.join("24_story_test.md");
|
||||
std::fs::write(&story_file, "---\nname: Test\n---\n").unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["add", "."])
|
||||
.current_dir(tmp.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
std::process::Command::new("git")
|
||||
.args(["commit", "-m", "add story"])
|
||||
.current_dir(tmp.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// The agent start will fail in test (no worktree/config), but the file move should succeed
|
||||
let result = tool_move_story_to_merge(&json!({"story_id": "24_story_test"}), &ctx).await;
|
||||
// File should have been moved regardless of agent start outcome
|
||||
assert!(!story_file.exists(), "2_current file should be gone");
|
||||
assert!(
|
||||
tmp.path().join(".story_kit/work/4_merge/24_story_test.md").exists(),
|
||||
"4_merge file should exist"
|
||||
);
|
||||
// Result is either Ok (agent started) or Err (agent failed - acceptable in tests)
|
||||
let _ = result;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_merge_agent_work_returns_started() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_git_repo_in(tmp.path());
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
let result = tool_merge_agent_work(
|
||||
&json!({"story_id": "99_nonexistent", "agent_name": "coder-1"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["story_id"], "99_nonexistent");
|
||||
assert_eq!(parsed["status"], "started");
|
||||
assert!(parsed.get("message").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_get_merge_status_no_job() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_get_merge_status(&json!({"story_id": "99_nonexistent"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("No merge job"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_get_merge_status_returns_running() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
setup_git_repo_in(tmp.path());
|
||||
let ctx = test_ctx(tmp.path());
|
||||
|
||||
// Start a merge (it will run in background)
|
||||
tool_merge_agent_work(
|
||||
&json!({"story_id": "99_nonexistent"}),
|
||||
&ctx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Immediately check — should be running (or already finished if very fast)
|
||||
let result = tool_get_merge_status(&json!({"story_id": "99_nonexistent"}), &ctx).unwrap();
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
let status = parsed["status"].as_str().unwrap();
|
||||
assert!(
|
||||
status == "running" || status == "completed" || status == "failed",
|
||||
"unexpected status: {status}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn report_merge_failure_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "report_merge_failure");
|
||||
assert!(
|
||||
tool.is_some(),
|
||||
"report_merge_failure missing from tools list"
|
||||
);
|
||||
let t = tool.unwrap();
|
||||
assert!(t["description"].is_string());
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
assert!(req_names.contains(&"reason"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_report_merge_failure_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_report_merge_failure(&json!({"reason": "conflicts"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_report_merge_failure_missing_reason() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_report_merge_failure(&json!({"story_id": "42_story_foo"}), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("reason"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_report_merge_failure_returns_confirmation() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_report_merge_failure(
|
||||
&json!({
|
||||
"story_id": "42_story_foo",
|
||||
"reason": "Unresolvable merge conflicts in src/main.rs"
|
||||
}),
|
||||
&ctx,
|
||||
);
|
||||
assert!(result.is_ok());
|
||||
let msg = result.unwrap();
|
||||
assert!(msg.contains("42_story_foo"));
|
||||
assert!(msg.contains("work/4_merge/"));
|
||||
assert!(msg.contains("Unresolvable merge conflicts"));
|
||||
}
|
||||
}
|
||||
1479
server/src/http/mcp/mod.rs
Normal file
1479
server/src/http/mcp/mod.rs
Normal file
File diff suppressed because it is too large
Load Diff
293
server/src/http/mcp/qa_tools.rs
Normal file
293
server/src/http/mcp/qa_tools.rs
Normal file
@@ -0,0 +1,293 @@
|
||||
use crate::agents::{move_story_to_merge, move_story_to_qa, reject_story_from_qa};
|
||||
use crate::http::context::AppContext;
|
||||
use crate::slog;
|
||||
use crate::slog_warn;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub(super) async fn tool_request_qa(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let agent_name = args
|
||||
.get("agent_name")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("qa");
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Move story from work/2_current/ to work/3_qa/
|
||||
move_story_to_qa(&project_root, story_id)?;
|
||||
|
||||
// Start the QA agent on the story worktree
|
||||
let info = ctx
|
||||
.agents
|
||||
.start_agent(&project_root, story_id, Some(agent_name), None)
|
||||
.await?;
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": info.story_id,
|
||||
"agent_name": info.agent_name,
|
||||
"status": info.status.to_string(),
|
||||
"worktree_path": info.worktree_path,
|
||||
"message": format!(
|
||||
"Story '{story_id}' moved to work/3_qa/ and QA agent '{}' started.",
|
||||
info.agent_name
|
||||
),
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_approve_qa(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Clear review_hold before moving
|
||||
let qa_path = project_root
|
||||
.join(".story_kit/work/3_qa")
|
||||
.join(format!("{story_id}.md"));
|
||||
if qa_path.exists() {
|
||||
let _ = crate::io::story_metadata::clear_front_matter_field(&qa_path, "review_hold");
|
||||
}
|
||||
|
||||
// Move story from work/3_qa/ to work/4_merge/
|
||||
move_story_to_merge(&project_root, story_id)?;
|
||||
|
||||
// Start the mergemaster agent
|
||||
let info = ctx
|
||||
.agents
|
||||
.start_agent(&project_root, story_id, Some("mergemaster"), None)
|
||||
.await?;
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": info.story_id,
|
||||
"agent_name": info.agent_name,
|
||||
"status": info.status.to_string(),
|
||||
"message": format!(
|
||||
"Story '{story_id}' approved. Moved to work/4_merge/ and mergemaster agent '{}' started.",
|
||||
info.agent_name
|
||||
),
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_reject_qa(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let notes = args
|
||||
.get("notes")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: notes")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Move story from work/3_qa/ back to work/2_current/ with rejection notes
|
||||
reject_story_from_qa(&project_root, story_id, notes)?;
|
||||
|
||||
// Restart the coder agent with rejection context
|
||||
let story_path = project_root
|
||||
.join(".story_kit/work/2_current")
|
||||
.join(format!("{story_id}.md"));
|
||||
let agent_name = if story_path.exists() {
|
||||
let contents = std::fs::read_to_string(&story_path).unwrap_or_default();
|
||||
crate::io::story_metadata::parse_front_matter(&contents)
|
||||
.ok()
|
||||
.and_then(|meta| meta.agent)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let agent_name = agent_name.as_deref().unwrap_or("coder-opus");
|
||||
|
||||
let context = format!(
|
||||
"\n\n---\n## QA Rejection\n\
|
||||
Your previous implementation was rejected during human QA review.\n\
|
||||
Rejection notes:\n{notes}\n\n\
|
||||
Please fix the issues described above and try again."
|
||||
);
|
||||
if let Err(e) = ctx
|
||||
.agents
|
||||
.start_agent(&project_root, story_id, Some(agent_name), Some(&context))
|
||||
.await
|
||||
{
|
||||
slog_warn!("[qa] Failed to restart coder for '{story_id}' after rejection: {e}");
|
||||
}
|
||||
|
||||
Ok(format!(
|
||||
"Story '{story_id}' rejected and moved back to work/2_current/. Coder agent '{agent_name}' restarted with rejection notes."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_launch_qa_app(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Find the worktree path for this story
|
||||
let worktrees = crate::worktree::list_worktrees(&project_root)?;
|
||||
let wt = worktrees
|
||||
.iter()
|
||||
.find(|w| w.story_id == story_id)
|
||||
.ok_or_else(|| format!("No worktree found for story '{story_id}'"))?;
|
||||
let wt_path = wt.path.clone();
|
||||
|
||||
// Stop any existing QA app instance
|
||||
{
|
||||
let mut guard = ctx.qa_app_process.lock().unwrap();
|
||||
if let Some(mut child) = guard.take() {
|
||||
let _ = child.kill();
|
||||
let _ = child.wait();
|
||||
slog!("[qa-app] Stopped previous QA app instance.");
|
||||
}
|
||||
}
|
||||
|
||||
// Find a free port starting from 3100
|
||||
let port = find_free_port(3100);
|
||||
|
||||
// Write .story_kit_port so the frontend dev server knows where to connect
|
||||
let port_file = wt_path.join(".story_kit_port");
|
||||
std::fs::write(&port_file, port.to_string())
|
||||
.map_err(|e| format!("Failed to write .story_kit_port: {e}"))?;
|
||||
|
||||
// Launch the server from the worktree
|
||||
let child = std::process::Command::new("cargo")
|
||||
.args(["run"])
|
||||
.env("STORYKIT_PORT", port.to_string())
|
||||
.current_dir(&wt_path)
|
||||
.stdout(std::process::Stdio::null())
|
||||
.stderr(std::process::Stdio::null())
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to launch QA app: {e}"))?;
|
||||
|
||||
{
|
||||
let mut guard = ctx.qa_app_process.lock().unwrap();
|
||||
*guard = Some(child);
|
||||
}
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"port": port,
|
||||
"worktree_path": wt_path.to_string_lossy(),
|
||||
"message": format!("QA app launched on port {port} from worktree at {}", wt_path.display()),
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
/// Find a free TCP port starting from `start`.
|
||||
pub(super) fn find_free_port(start: u16) -> u16 {
|
||||
for port in start..start + 100 {
|
||||
if std::net::TcpListener::bind(("127.0.0.1", port)).is_ok() {
|
||||
return port;
|
||||
}
|
||||
}
|
||||
start // fallback
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::context::AppContext;
|
||||
|
||||
fn test_ctx(dir: &std::path::Path) -> AppContext {
|
||||
AppContext::new_test(dir.to_path_buf())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_qa_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "request_qa");
|
||||
assert!(tool.is_some(), "request_qa missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
// agent_name is optional
|
||||
assert!(!req_names.contains(&"agent_name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approve_qa_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "approve_qa");
|
||||
assert!(tool.is_some(), "approve_qa missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_qa_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "reject_qa");
|
||||
assert!(tool.is_some(), "reject_qa missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
assert!(req_names.contains(&"notes"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn launch_qa_app_in_tools_list() {
|
||||
use super::super::{handle_tools_list};
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
let tool = tools.iter().find(|t| t["name"] == "launch_qa_app");
|
||||
assert!(tool.is_some(), "launch_qa_app missing from tools list");
|
||||
let t = tool.unwrap();
|
||||
let required = t["inputSchema"]["required"].as_array().unwrap();
|
||||
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
||||
assert!(req_names.contains(&"story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_approve_qa_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_approve_qa(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_reject_qa_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_reject_qa(&json!({"notes": "broken"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_reject_qa_missing_notes() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_reject_qa(&json!({"story_id": "1_story_test"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("notes"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_request_qa_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_request_qa(&json!({}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("story_id"));
|
||||
}
|
||||
}
|
||||
1269
server/src/http/mcp/story_tools.rs
Normal file
1269
server/src/http/mcp/story_tools.rs
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user