810608d3d8
Add notify-based filesystem watcher for .story_kit/work/ that auto-commits changes with deterministic messages and broadcasts events over WebSocket. Push full pipeline state (Upcoming, Current, QA, To Merge) to frontend on connect and after every watcher event. Strip dead UI: remove ReviewPanel, GatePanel, TodoPanel, UpcomingPanel and all associated REST polling. Replace with 4 generic StagePanel components driven by WebSocket. Simplify AgentPanel to roster-only. Delete all 11 workflow HTTP endpoints and 16 request/response types from the server. Clean dead code from workflow module. MCP tools call Rust functions directly and need none of the HTTP layer. Net: ~4,100 lines deleted, ~400 added. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2250 lines
86 KiB
Rust
2250 lines
86 KiB
Rust
use crate::agents::{close_bug_to_archive, move_story_to_archived, move_story_to_merge, move_story_to_qa};
|
|
use crate::config::ProjectConfig;
|
|
use crate::http::context::AppContext;
|
|
use crate::http::settings::get_editor_command_from_store;
|
|
use crate::http::workflow::{
|
|
check_criterion_in_file, create_bug_file, create_story_file, list_bug_files,
|
|
load_upcoming_stories, set_test_plan_in_file, validate_story_dirs,
|
|
};
|
|
use crate::worktree;
|
|
use crate::io::story_metadata::{parse_front_matter, parse_unchecked_todos};
|
|
use crate::workflow::{evaluate_acceptance_with_coverage, TestCaseResult, TestStatus};
|
|
use poem::handler;
|
|
use poem::http::StatusCode;
|
|
use poem::web::Data;
|
|
use poem::{Body, Request, Response};
|
|
use serde::{Deserialize, Serialize};
|
|
use serde_json::{json, Value};
|
|
use std::fs;
|
|
use std::sync::Arc;
|
|
|
|
/// Returns true when the Accept header includes text/event-stream.
|
|
fn wants_sse(req: &Request) -> bool {
|
|
req.header("accept")
|
|
.unwrap_or("")
|
|
.contains("text/event-stream")
|
|
}
|
|
|
|
// ── JSON-RPC structs ──────────────────────────────────────────────
|
|
|
|
#[derive(Deserialize)]
|
|
struct JsonRpcRequest {
|
|
jsonrpc: String,
|
|
id: Option<Value>,
|
|
method: String,
|
|
#[serde(default)]
|
|
params: Value,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct JsonRpcResponse {
|
|
jsonrpc: &'static str,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
id: Option<Value>,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
result: Option<Value>,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
error: Option<JsonRpcError>,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct JsonRpcError {
|
|
code: i64,
|
|
message: String,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
data: Option<Value>,
|
|
}
|
|
|
|
impl JsonRpcResponse {
|
|
fn success(id: Option<Value>, result: Value) -> Self {
|
|
Self {
|
|
jsonrpc: "2.0",
|
|
id,
|
|
result: Some(result),
|
|
error: None,
|
|
}
|
|
}
|
|
|
|
fn error(id: Option<Value>, code: i64, message: String) -> Self {
|
|
Self {
|
|
jsonrpc: "2.0",
|
|
id,
|
|
result: None,
|
|
error: Some(JsonRpcError {
|
|
code,
|
|
message,
|
|
data: None,
|
|
}),
|
|
}
|
|
}
|
|
}
|
|
|
|
// ── Poem handlers ─────────────────────────────────────────────────
|
|
|
|
#[handler]
|
|
pub async fn mcp_get_handler() -> Response {
|
|
Response::builder()
|
|
.status(StatusCode::METHOD_NOT_ALLOWED)
|
|
.body(Body::empty())
|
|
}
|
|
|
|
#[handler]
|
|
pub async fn mcp_post_handler(req: &Request, body: Body, ctx: Data<&Arc<AppContext>>) -> Response {
|
|
// Validate Content-Type
|
|
let content_type = req.header("content-type").unwrap_or("");
|
|
if !content_type.is_empty() && !content_type.contains("application/json") {
|
|
return json_rpc_error_response(
|
|
None,
|
|
-32700,
|
|
"Unsupported Content-Type; expected application/json".into(),
|
|
);
|
|
}
|
|
|
|
let bytes = match body.into_bytes().await {
|
|
Ok(b) => b,
|
|
Err(_) => return json_rpc_error_response(None, -32700, "Parse error".into()),
|
|
};
|
|
|
|
let rpc: JsonRpcRequest = match serde_json::from_slice(&bytes) {
|
|
Ok(r) => r,
|
|
Err(_) => return json_rpc_error_response(None, -32700, "Parse error".into()),
|
|
};
|
|
|
|
if rpc.jsonrpc != "2.0" {
|
|
return json_rpc_error_response(rpc.id, -32600, "Invalid JSON-RPC version".into());
|
|
}
|
|
|
|
// Notifications (no id) — accept silently
|
|
if rpc.id.is_none() || rpc.id.as_ref() == Some(&Value::Null) {
|
|
if rpc.method.starts_with("notifications/") {
|
|
return Response::builder()
|
|
.status(StatusCode::ACCEPTED)
|
|
.body(Body::empty());
|
|
}
|
|
return json_rpc_error_response(None, -32600, "Missing id".into());
|
|
}
|
|
|
|
let sse = wants_sse(req);
|
|
|
|
// Streaming agent output over SSE
|
|
if sse && rpc.method == "tools/call" {
|
|
let tool_name = rpc
|
|
.params
|
|
.get("name")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("");
|
|
if tool_name == "get_agent_output" {
|
|
return handle_agent_output_sse(rpc.id, &rpc.params, &ctx);
|
|
}
|
|
}
|
|
|
|
let resp = match rpc.method.as_str() {
|
|
"initialize" => handle_initialize(rpc.id, &rpc.params),
|
|
"tools/list" => handle_tools_list(rpc.id),
|
|
"tools/call" => handle_tools_call(rpc.id, &rpc.params, &ctx).await,
|
|
_ => JsonRpcResponse::error(rpc.id, -32601, format!("Unknown method: {}", rpc.method)),
|
|
};
|
|
|
|
if sse {
|
|
to_sse_response(resp)
|
|
} else {
|
|
to_json_response(resp)
|
|
}
|
|
}
|
|
|
|
fn json_rpc_error_response(id: Option<Value>, code: i64, message: String) -> Response {
|
|
to_json_response(JsonRpcResponse::error(id, code, message))
|
|
}
|
|
|
|
fn to_json_response(resp: JsonRpcResponse) -> Response {
|
|
let body = serde_json::to_vec(&resp).unwrap_or_default();
|
|
Response::builder()
|
|
.status(StatusCode::OK)
|
|
.header("Content-Type", "application/json")
|
|
.body(Body::from(body))
|
|
}
|
|
|
|
fn to_sse_response(resp: JsonRpcResponse) -> Response {
|
|
let json = serde_json::to_string(&resp).unwrap_or_default();
|
|
let sse_body = format!("data: {json}\n\n");
|
|
Response::builder()
|
|
.status(StatusCode::OK)
|
|
.header("Content-Type", "text/event-stream")
|
|
.header("Cache-Control", "no-cache")
|
|
.body(Body::from_string(sse_body))
|
|
}
|
|
|
|
/// Stream agent events as SSE — each event is a separate JSON-RPC notification,
|
|
/// followed by a final JSON-RPC response with the matching request id.
|
|
fn handle_agent_output_sse(
|
|
id: Option<Value>,
|
|
params: &Value,
|
|
ctx: &AppContext,
|
|
) -> Response {
|
|
let args = params.get("arguments").cloned().unwrap_or(json!({}));
|
|
let story_id = match args.get("story_id").and_then(|v| v.as_str()) {
|
|
Some(s) => s.to_string(),
|
|
None => return to_sse_response(JsonRpcResponse::error(
|
|
id,
|
|
-32602,
|
|
"Missing required argument: story_id".into(),
|
|
)),
|
|
};
|
|
let agent_name = match args.get("agent_name").and_then(|v| v.as_str()) {
|
|
Some(s) => s.to_string(),
|
|
None => return to_sse_response(JsonRpcResponse::error(
|
|
id,
|
|
-32602,
|
|
"Missing required argument: agent_name".into(),
|
|
)),
|
|
};
|
|
let timeout_ms = args
|
|
.get("timeout_ms")
|
|
.and_then(|v| v.as_u64())
|
|
.unwrap_or(10000)
|
|
.min(30000);
|
|
|
|
let mut rx = match ctx.agents.subscribe(&story_id, &agent_name) {
|
|
Ok(rx) => rx,
|
|
Err(e) => return to_sse_response(JsonRpcResponse::success(
|
|
id,
|
|
json!({ "content": [{"type": "text", "text": e}], "isError": true }),
|
|
)),
|
|
};
|
|
|
|
let final_id = id;
|
|
let stream = async_stream::stream! {
|
|
let deadline = tokio::time::Instant::now()
|
|
+ std::time::Duration::from_millis(timeout_ms);
|
|
let mut done = false;
|
|
|
|
loop {
|
|
let remaining = deadline.saturating_duration_since(tokio::time::Instant::now());
|
|
if remaining.is_zero() {
|
|
break;
|
|
}
|
|
|
|
match tokio::time::timeout(remaining, rx.recv()).await {
|
|
Ok(Ok(event)) => {
|
|
let is_terminal = matches!(
|
|
&event,
|
|
crate::agents::AgentEvent::Done { .. }
|
|
| crate::agents::AgentEvent::Error { .. }
|
|
);
|
|
// Send each event as a JSON-RPC notification (no id)
|
|
if let Ok(event_json) = serde_json::to_value(&event) {
|
|
let notification = json!({
|
|
"jsonrpc": "2.0",
|
|
"method": "notifications/tools/progress",
|
|
"params": { "event": event_json }
|
|
});
|
|
if let Ok(s) = serde_json::to_string(¬ification) {
|
|
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
|
}
|
|
}
|
|
if is_terminal {
|
|
done = true;
|
|
break;
|
|
}
|
|
}
|
|
Ok(Err(tokio::sync::broadcast::error::RecvError::Lagged(n))) => {
|
|
let notification = json!({
|
|
"jsonrpc": "2.0",
|
|
"method": "notifications/tools/progress",
|
|
"params": { "event": {"type": "warning", "message": format!("Skipped {n} events")} }
|
|
});
|
|
if let Ok(s) = serde_json::to_string(¬ification) {
|
|
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
|
}
|
|
}
|
|
Ok(Err(tokio::sync::broadcast::error::RecvError::Closed)) => {
|
|
done = true;
|
|
break;
|
|
}
|
|
Err(_) => break, // timeout
|
|
}
|
|
}
|
|
|
|
// Final response with the request id
|
|
let final_resp = JsonRpcResponse::success(
|
|
final_id,
|
|
json!({
|
|
"content": [{
|
|
"type": "text",
|
|
"text": if done { "Agent stream ended." } else { "Stream timed out; call again to continue." }
|
|
}]
|
|
}),
|
|
);
|
|
if let Ok(s) = serde_json::to_string(&final_resp) {
|
|
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
|
}
|
|
};
|
|
|
|
Response::builder()
|
|
.status(StatusCode::OK)
|
|
.header("Content-Type", "text/event-stream")
|
|
.header("Cache-Control", "no-cache")
|
|
.body(Body::from_bytes_stream(
|
|
futures::StreamExt::map(stream, |r| r.map(bytes::Bytes::from)),
|
|
))
|
|
}
|
|
|
|
// ── MCP protocol handlers ─────────────────────────────────────────
|
|
|
|
fn handle_initialize(id: Option<Value>, params: &Value) -> JsonRpcResponse {
|
|
let _protocol_version = params
|
|
.get("protocolVersion")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("2025-03-26");
|
|
|
|
JsonRpcResponse::success(
|
|
id,
|
|
json!({
|
|
"protocolVersion": "2025-03-26",
|
|
"capabilities": {
|
|
"tools": {}
|
|
},
|
|
"serverInfo": {
|
|
"name": "story-kit",
|
|
"version": "1.0.0"
|
|
}
|
|
}),
|
|
)
|
|
}
|
|
|
|
fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
|
|
JsonRpcResponse::success(
|
|
id,
|
|
json!({
|
|
"tools": [
|
|
{
|
|
"name": "create_story",
|
|
"description": "Create a new story file with front matter in upcoming/. Returns the story_id.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"name": {
|
|
"type": "string",
|
|
"description": "Human-readable story name"
|
|
},
|
|
"user_story": {
|
|
"type": "string",
|
|
"description": "Optional user story text (As a..., I want..., so that...)"
|
|
},
|
|
"acceptance_criteria": {
|
|
"type": "array",
|
|
"items": { "type": "string" },
|
|
"description": "Optional list of acceptance criteria"
|
|
},
|
|
"commit": {
|
|
"type": "boolean",
|
|
"description": "If true, git-add and git-commit the new story file to the current branch"
|
|
}
|
|
},
|
|
"required": ["name"]
|
|
}
|
|
},
|
|
{
|
|
"name": "validate_stories",
|
|
"description": "Validate front matter on all current and upcoming story files.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "list_upcoming",
|
|
"description": "List all upcoming stories with their names and any parsing errors.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "get_story_todos",
|
|
"description": "Get unchecked acceptance criteria (todos) for a story file in current/.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (filename stem, e.g. '28_my_story')"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "record_tests",
|
|
"description": "Record test results for a story. Only one failing test at a time is allowed.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier"
|
|
},
|
|
"unit": {
|
|
"type": "array",
|
|
"items": {
|
|
"type": "object",
|
|
"properties": {
|
|
"name": { "type": "string" },
|
|
"status": { "type": "string", "enum": ["pass", "fail"] },
|
|
"details": { "type": "string" }
|
|
},
|
|
"required": ["name", "status"]
|
|
},
|
|
"description": "Unit test results"
|
|
},
|
|
"integration": {
|
|
"type": "array",
|
|
"items": {
|
|
"type": "object",
|
|
"properties": {
|
|
"name": { "type": "string" },
|
|
"status": { "type": "string", "enum": ["pass", "fail"] },
|
|
"details": { "type": "string" }
|
|
},
|
|
"required": ["name", "status"]
|
|
},
|
|
"description": "Integration test results"
|
|
}
|
|
},
|
|
"required": ["story_id", "unit", "integration"]
|
|
}
|
|
},
|
|
{
|
|
"name": "ensure_acceptance",
|
|
"description": "Check whether a story can be accepted. Returns acceptance status with reasons if blocked.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "start_agent",
|
|
"description": "Start an agent for a story. Creates a worktree, runs setup, and spawns the agent process.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (e.g. '28_my_story')"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Agent name from project.toml config. If omitted, uses the first configured agent."
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "stop_agent",
|
|
"description": "Stop a running agent. Worktree is preserved for inspection.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Agent name to stop"
|
|
}
|
|
},
|
|
"required": ["story_id", "agent_name"]
|
|
}
|
|
},
|
|
{
|
|
"name": "list_agents",
|
|
"description": "List all agents with their current status, story assignment, and worktree path.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "get_agent_config",
|
|
"description": "Get the configured agent roster from project.toml (names, roles, models, allowed tools, limits).",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "reload_agent_config",
|
|
"description": "Reload project.toml and return the updated agent roster.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "get_agent_output",
|
|
"description": "Poll recent output from a running agent. Subscribes to the agent's event stream and collects events for up to 2 seconds. Returns text output and status events. Call repeatedly to follow progress.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Agent name"
|
|
},
|
|
"timeout_ms": {
|
|
"type": "integer",
|
|
"description": "How long to wait for events in milliseconds (default: 2000, max: 10000)"
|
|
}
|
|
},
|
|
"required": ["story_id", "agent_name"]
|
|
}
|
|
},
|
|
{
|
|
"name": "wait_for_agent",
|
|
"description": "Block until the agent reaches a terminal state (completed, failed, stopped). Returns final status and summary including session_id, worktree_path, and any commits made. Use this instead of polling get_agent_output when you want to fire-and-forget and be notified on completion.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Agent name to wait for"
|
|
},
|
|
"timeout_ms": {
|
|
"type": "integer",
|
|
"description": "Maximum time to wait in milliseconds (default: 300000 = 5 minutes)"
|
|
}
|
|
},
|
|
"required": ["story_id", "agent_name"]
|
|
}
|
|
},
|
|
{
|
|
"name": "create_worktree",
|
|
"description": "Create a git worktree for a story under .story_kit/worktrees/{story_id} with deterministic naming. Writes .mcp.json and runs component setup. Returns the worktree path.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (e.g. '42_my_story')"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "list_worktrees",
|
|
"description": "List all worktrees under .story_kit/worktrees/ for the current project.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "remove_worktree",
|
|
"description": "Remove a git worktree and its feature branch for a story.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "get_editor_command",
|
|
"description": "Get the open-in-editor command for a worktree. Returns a ready-to-paste shell command like 'zed /path/to/worktree'. Requires the editor preference to be configured via PUT /api/settings/editor.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"worktree_path": {
|
|
"type": "string",
|
|
"description": "Absolute path to the worktree directory"
|
|
}
|
|
},
|
|
"required": ["worktree_path"]
|
|
}
|
|
},
|
|
{
|
|
"name": "report_completion",
|
|
"description": "Report that the agent has finished work on a story. Rejects if the worktree has uncommitted changes. Runs acceptance gates (cargo clippy + tests) automatically. Stores the completion status and gate results on the agent record for retrieval by wait_for_agent or the supervisor. Call this as your final action after committing all changes.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (e.g. '44_my_story')"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Agent name (as configured in project.toml)"
|
|
},
|
|
"summary": {
|
|
"type": "string",
|
|
"description": "Brief summary of the work completed"
|
|
}
|
|
},
|
|
"required": ["story_id", "agent_name", "summary"]
|
|
}
|
|
},
|
|
{
|
|
"name": "accept_story",
|
|
"description": "Accept a story: moves it from current/ to archived/ and auto-commits to master.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (filename stem, e.g. '28_my_story')"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "check_criterion",
|
|
"description": "Check off an acceptance criterion (- [ ] → - [x]) by 0-based index among unchecked items, then auto-commit to master. Use get_story_todos to see the current list of unchecked criteria.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (filename stem, e.g. '28_my_story')"
|
|
},
|
|
"criterion_index": {
|
|
"type": "integer",
|
|
"description": "0-based index of the unchecked criterion to check off"
|
|
}
|
|
},
|
|
"required": ["story_id", "criterion_index"]
|
|
}
|
|
},
|
|
{
|
|
"name": "set_test_plan",
|
|
"description": "Update the test_plan front-matter field of a story file and auto-commit to master. Common values: 'pending', 'approved'.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (filename stem, e.g. '28_my_story')"
|
|
},
|
|
"status": {
|
|
"type": "string",
|
|
"description": "New value for the test_plan field (e.g. 'approved', 'pending')"
|
|
}
|
|
},
|
|
"required": ["story_id", "status"]
|
|
}
|
|
},
|
|
{
|
|
"name": "create_bug",
|
|
"description": "Create a bug file in .story_kit/bugs/ with a deterministic filename and auto-commit to master. Returns the bug_id.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"name": {
|
|
"type": "string",
|
|
"description": "Short human-readable bug name"
|
|
},
|
|
"description": {
|
|
"type": "string",
|
|
"description": "Description of the bug"
|
|
},
|
|
"steps_to_reproduce": {
|
|
"type": "string",
|
|
"description": "Steps to reproduce the bug"
|
|
},
|
|
"actual_result": {
|
|
"type": "string",
|
|
"description": "What actually happens"
|
|
},
|
|
"expected_result": {
|
|
"type": "string",
|
|
"description": "What should happen"
|
|
},
|
|
"acceptance_criteria": {
|
|
"type": "array",
|
|
"items": { "type": "string" },
|
|
"description": "Optional list of acceptance criteria for the fix"
|
|
}
|
|
},
|
|
"required": ["name", "description", "steps_to_reproduce", "actual_result", "expected_result"]
|
|
}
|
|
},
|
|
{
|
|
"name": "list_bugs",
|
|
"description": "List all open bugs (files in .story_kit/bugs/ excluding archive/).",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
},
|
|
{
|
|
"name": "close_bug",
|
|
"description": "Move a bug from .story_kit/bugs/ (or current/) to .story_kit/bugs/archive/ and auto-commit to master.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"bug_id": {
|
|
"type": "string",
|
|
"description": "Bug identifier (e.g. 'bug-3-login_crash')"
|
|
}
|
|
},
|
|
"required": ["bug_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "merge_agent_work",
|
|
"description": "Trigger the mergemaster pipeline for a completed story: squash-merge the feature branch into master, run quality gates (cargo clippy, cargo test, pnpm build, pnpm test), archive the story from work/4_merge/ or work/2_current/ to work/5_archived/, and clean up the worktree and branch. Reports success/failure with details including any conflicts found and gate output.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (e.g. '52_story_mergemaster_agent_role')"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Optional: name of the coder agent whose work is being merged (for logging)"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "move_story_to_merge",
|
|
"description": "Move a story or bug from work/2_current/ to work/4_merge/ to queue it for the mergemaster pipeline. Auto-commits to master.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (filename stem, e.g. '28_my_story')"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
},
|
|
{
|
|
"name": "request_qa",
|
|
"description": "Trigger QA review of a completed story worktree: moves the item from work/2_current/ to work/3_qa/ and starts the qa agent to run quality gates, tests, and generate a manual testing plan.",
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"story_id": {
|
|
"type": "string",
|
|
"description": "Story identifier (e.g. '53_story_qa_agent_role')"
|
|
},
|
|
"agent_name": {
|
|
"type": "string",
|
|
"description": "Agent name to use for QA (defaults to 'qa')"
|
|
}
|
|
},
|
|
"required": ["story_id"]
|
|
}
|
|
}
|
|
]
|
|
}),
|
|
)
|
|
}
|
|
|
|
// ── Tool dispatch ─────────────────────────────────────────────────
|
|
|
|
async fn handle_tools_call(
|
|
id: Option<Value>,
|
|
params: &Value,
|
|
ctx: &AppContext,
|
|
) -> JsonRpcResponse {
|
|
let tool_name = params
|
|
.get("name")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("");
|
|
let args = params.get("arguments").cloned().unwrap_or(json!({}));
|
|
|
|
let result = match tool_name {
|
|
// Workflow tools
|
|
"create_story" => tool_create_story(&args, ctx),
|
|
"validate_stories" => tool_validate_stories(ctx),
|
|
"list_upcoming" => tool_list_upcoming(ctx),
|
|
"get_story_todos" => tool_get_story_todos(&args, ctx),
|
|
"record_tests" => tool_record_tests(&args, ctx),
|
|
"ensure_acceptance" => tool_ensure_acceptance(&args, ctx),
|
|
// Agent tools (async)
|
|
"start_agent" => tool_start_agent(&args, ctx).await,
|
|
"stop_agent" => tool_stop_agent(&args, ctx).await,
|
|
"list_agents" => tool_list_agents(ctx),
|
|
"get_agent_config" => tool_get_agent_config(ctx),
|
|
"reload_agent_config" => tool_get_agent_config(ctx),
|
|
"get_agent_output" => tool_get_agent_output_poll(&args, ctx).await,
|
|
"wait_for_agent" => tool_wait_for_agent(&args, ctx).await,
|
|
// Worktree tools
|
|
"create_worktree" => tool_create_worktree(&args, ctx).await,
|
|
"list_worktrees" => tool_list_worktrees(ctx),
|
|
"remove_worktree" => tool_remove_worktree(&args, ctx).await,
|
|
// Editor tools
|
|
"get_editor_command" => tool_get_editor_command(&args, ctx),
|
|
// Completion reporting
|
|
"report_completion" => tool_report_completion(&args, ctx).await,
|
|
// Lifecycle tools
|
|
"accept_story" => tool_accept_story(&args, ctx),
|
|
// Story mutation tools (auto-commit to master)
|
|
"check_criterion" => tool_check_criterion(&args, ctx),
|
|
"set_test_plan" => tool_set_test_plan(&args, ctx),
|
|
// Bug lifecycle tools
|
|
"create_bug" => tool_create_bug(&args, ctx),
|
|
"list_bugs" => tool_list_bugs(ctx),
|
|
"close_bug" => tool_close_bug(&args, ctx),
|
|
// Mergemaster tools
|
|
"merge_agent_work" => tool_merge_agent_work(&args, ctx).await,
|
|
"move_story_to_merge" => tool_move_story_to_merge(&args, ctx),
|
|
// QA tools
|
|
"request_qa" => tool_request_qa(&args, ctx).await,
|
|
_ => Err(format!("Unknown tool: {tool_name}")),
|
|
};
|
|
|
|
match result {
|
|
Ok(content) => JsonRpcResponse::success(
|
|
id,
|
|
json!({
|
|
"content": [{ "type": "text", "text": content }]
|
|
}),
|
|
),
|
|
Err(msg) => JsonRpcResponse::success(
|
|
id,
|
|
json!({
|
|
"content": [{ "type": "text", "text": msg }],
|
|
"isError": true
|
|
}),
|
|
),
|
|
}
|
|
}
|
|
|
|
// ── Tool implementations ──────────────────────────────────────────
|
|
|
|
fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let name = args
|
|
.get("name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: name")?;
|
|
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
|
let acceptance_criteria: Option<Vec<String>> = args
|
|
.get("acceptance_criteria")
|
|
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
|
// Spike 61: write the file only — the filesystem watcher detects the new
|
|
// .md file in work/1_upcoming/ and auto-commits with a deterministic message.
|
|
let commit = false;
|
|
|
|
let root = ctx.state.get_project_root()?;
|
|
let story_id = create_story_file(
|
|
&root,
|
|
name,
|
|
user_story,
|
|
acceptance_criteria.as_deref(),
|
|
commit,
|
|
)?;
|
|
|
|
Ok(format!("Created story: {story_id}"))
|
|
}
|
|
|
|
fn tool_validate_stories(ctx: &AppContext) -> Result<String, String> {
|
|
let root = ctx.state.get_project_root()?;
|
|
let results = validate_story_dirs(&root)?;
|
|
serde_json::to_string_pretty(&json!(results
|
|
.iter()
|
|
.map(|r| json!({
|
|
"story_id": r.story_id,
|
|
"valid": r.valid,
|
|
"error": r.error,
|
|
}))
|
|
.collect::<Vec<_>>()))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_list_upcoming(ctx: &AppContext) -> Result<String, String> {
|
|
let stories = load_upcoming_stories(ctx)?;
|
|
serde_json::to_string_pretty(&json!(stories
|
|
.iter()
|
|
.map(|s| json!({
|
|
"story_id": s.story_id,
|
|
"name": s.name,
|
|
"error": s.error,
|
|
}))
|
|
.collect::<Vec<_>>()))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let root = ctx.state.get_project_root()?;
|
|
let current_dir = root.join(".story_kit").join("work").join("2_current");
|
|
let filepath = current_dir.join(format!("{story_id}.md"));
|
|
|
|
if !filepath.exists() {
|
|
return Err(format!("Story file not found: {story_id}.md"));
|
|
}
|
|
|
|
let contents = fs::read_to_string(&filepath)
|
|
.map_err(|e| format!("Failed to read story file: {e}"))?;
|
|
|
|
let story_name = parse_front_matter(&contents)
|
|
.ok()
|
|
.and_then(|m| m.name);
|
|
let todos = parse_unchecked_todos(&contents);
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": story_id,
|
|
"story_name": story_name,
|
|
"todos": todos,
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let unit = parse_test_cases(args.get("unit"))?;
|
|
let integration = parse_test_cases(args.get("integration"))?;
|
|
|
|
let mut workflow = ctx
|
|
.workflow
|
|
.lock()
|
|
.map_err(|e| format!("Lock error: {e}"))?;
|
|
|
|
workflow.record_test_results_validated(story_id.to_string(), unit, integration)?;
|
|
|
|
Ok("Test results recorded.".to_string())
|
|
}
|
|
|
|
fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let workflow = ctx
|
|
.workflow
|
|
.lock()
|
|
.map_err(|e| format!("Lock error: {e}"))?;
|
|
|
|
let empty_results = Default::default();
|
|
let results = workflow.results.get(story_id).unwrap_or(&empty_results);
|
|
let coverage = workflow.coverage.get(story_id);
|
|
let decision = evaluate_acceptance_with_coverage(results, coverage);
|
|
|
|
if decision.can_accept {
|
|
Ok("Story can be accepted. All gates pass.".to_string())
|
|
} else {
|
|
let mut parts = decision.reasons;
|
|
if let Some(w) = decision.warning {
|
|
parts.push(w);
|
|
}
|
|
Err(format!("Acceptance blocked: {}", parts.join("; ")))
|
|
}
|
|
}
|
|
|
|
// ── Agent tool implementations ────────────────────────────────────
|
|
|
|
async fn tool_start_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args.get("agent_name").and_then(|v| v.as_str());
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
let info = ctx
|
|
.agents
|
|
.start_agent(&project_root, story_id, agent_name)
|
|
.await?;
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": info.story_id,
|
|
"agent_name": info.agent_name,
|
|
"status": info.status.to_string(),
|
|
"session_id": info.session_id,
|
|
"worktree_path": info.worktree_path,
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
async fn tool_stop_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args
|
|
.get("agent_name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: agent_name")?;
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
ctx.agents
|
|
.stop_agent(&project_root, story_id, agent_name)
|
|
.await?;
|
|
|
|
Ok(format!("Agent '{agent_name}' for story '{story_id}' stopped."))
|
|
}
|
|
|
|
fn tool_list_agents(ctx: &AppContext) -> Result<String, String> {
|
|
let agents = ctx.agents.list_agents()?;
|
|
serde_json::to_string_pretty(&json!(agents
|
|
.iter()
|
|
.map(|a| json!({
|
|
"story_id": a.story_id,
|
|
"agent_name": a.agent_name,
|
|
"status": a.status.to_string(),
|
|
"session_id": a.session_id,
|
|
"worktree_path": a.worktree_path,
|
|
}))
|
|
.collect::<Vec<_>>()))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
async fn tool_get_agent_output_poll(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args
|
|
.get("agent_name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: agent_name")?;
|
|
|
|
// Drain all accumulated events since the last poll.
|
|
let drained = ctx.agents.drain_events(story_id, agent_name)?;
|
|
|
|
let done = drained.iter().any(|e| {
|
|
matches!(
|
|
e,
|
|
crate::agents::AgentEvent::Done { .. } | crate::agents::AgentEvent::Error { .. }
|
|
)
|
|
});
|
|
|
|
let events: Vec<serde_json::Value> = drained
|
|
.into_iter()
|
|
.filter_map(|e| serde_json::to_value(&e).ok())
|
|
.collect();
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"events": events,
|
|
"done": done,
|
|
"event_count": events.len(),
|
|
"message": if done { "Agent stream ended." } else if events.is_empty() { "No new events. Call again to continue." } else { "Events returned. Call again to continue." }
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_get_agent_config(ctx: &AppContext) -> Result<String, String> {
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
let config = ProjectConfig::load(&project_root)?;
|
|
serde_json::to_string_pretty(&json!(config
|
|
.agent
|
|
.iter()
|
|
.map(|a| json!({
|
|
"name": a.name,
|
|
"role": a.role,
|
|
"model": a.model,
|
|
"allowed_tools": a.allowed_tools,
|
|
"max_turns": a.max_turns,
|
|
"max_budget_usd": a.max_budget_usd,
|
|
}))
|
|
.collect::<Vec<_>>()))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
async fn tool_wait_for_agent(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args
|
|
.get("agent_name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: agent_name")?;
|
|
let timeout_ms = args
|
|
.get("timeout_ms")
|
|
.and_then(|v| v.as_u64())
|
|
.unwrap_or(300_000); // default: 5 minutes
|
|
|
|
let info = ctx
|
|
.agents
|
|
.wait_for_agent(story_id, agent_name, timeout_ms)
|
|
.await?;
|
|
|
|
let commits = match (&info.worktree_path, &info.base_branch) {
|
|
(Some(wt_path), Some(base)) => get_worktree_commits(wt_path, base).await,
|
|
_ => None,
|
|
};
|
|
|
|
let completion = info.completion.as_ref().map(|r| json!({
|
|
"summary": r.summary,
|
|
"gates_passed": r.gates_passed,
|
|
"gate_output": r.gate_output,
|
|
}));
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": info.story_id,
|
|
"agent_name": info.agent_name,
|
|
"status": info.status.to_string(),
|
|
"session_id": info.session_id,
|
|
"worktree_path": info.worktree_path,
|
|
"base_branch": info.base_branch,
|
|
"commits": commits,
|
|
"completion": completion,
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
// ── Worktree tool implementations ────────────────────────────────
|
|
|
|
async fn tool_create_worktree(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
let info = ctx.agents.create_worktree(&project_root, story_id).await?;
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": story_id,
|
|
"worktree_path": info.path.to_string_lossy(),
|
|
"branch": info.branch,
|
|
"base_branch": info.base_branch,
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_list_worktrees(ctx: &AppContext) -> Result<String, String> {
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
let entries = worktree::list_worktrees(&project_root)?;
|
|
|
|
serde_json::to_string_pretty(&json!(entries
|
|
.iter()
|
|
.map(|e| json!({
|
|
"story_id": e.story_id,
|
|
"path": e.path.to_string_lossy(),
|
|
}))
|
|
.collect::<Vec<_>>()))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
async fn tool_remove_worktree(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
let config = ProjectConfig::load(&project_root)?;
|
|
worktree::remove_worktree_by_story_id(&project_root, story_id, &config).await?;
|
|
|
|
Ok(format!("Worktree for story '{story_id}' removed."))
|
|
}
|
|
|
|
// ── Editor tool implementations ───────────────────────────────────
|
|
|
|
fn tool_get_editor_command(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let worktree_path = args
|
|
.get("worktree_path")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: worktree_path")?;
|
|
|
|
let editor = get_editor_command_from_store(ctx)
|
|
.ok_or_else(|| "No editor configured. Set one via PUT /api/settings/editor.".to_string())?;
|
|
|
|
Ok(format!("{editor} {worktree_path}"))
|
|
}
|
|
|
|
async fn tool_report_completion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args
|
|
.get("agent_name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: agent_name")?;
|
|
let summary = args
|
|
.get("summary")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: summary")?;
|
|
|
|
let report = ctx
|
|
.agents
|
|
.report_completion(story_id, agent_name, summary)
|
|
.await?;
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": story_id,
|
|
"agent_name": agent_name,
|
|
"summary": report.summary,
|
|
"gates_passed": report.gates_passed,
|
|
"gate_output": report.gate_output,
|
|
"message": if report.gates_passed {
|
|
"Completion accepted. All acceptance gates passed."
|
|
} else {
|
|
"Completion recorded but acceptance gates failed. Review gate_output for details."
|
|
}
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
move_story_to_archived(&project_root, story_id)?;
|
|
|
|
Ok(format!(
|
|
"Story '{story_id}' accepted, moved to archived/, and committed to master."
|
|
))
|
|
}
|
|
|
|
fn tool_check_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let criterion_index = args
|
|
.get("criterion_index")
|
|
.and_then(|v| v.as_u64())
|
|
.ok_or("Missing required argument: criterion_index")? as usize;
|
|
|
|
let root = ctx.state.get_project_root()?;
|
|
check_criterion_in_file(&root, story_id, criterion_index)?;
|
|
|
|
Ok(format!(
|
|
"Criterion {criterion_index} checked for story '{story_id}'. Committed to master."
|
|
))
|
|
}
|
|
|
|
fn tool_set_test_plan(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let status = args
|
|
.get("status")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: status")?;
|
|
|
|
let root = ctx.state.get_project_root()?;
|
|
set_test_plan_in_file(&root, story_id, status)?;
|
|
|
|
Ok(format!(
|
|
"test_plan set to '{status}' for story '{story_id}'. Committed to master."
|
|
))
|
|
}
|
|
|
|
// ── Bug lifecycle tool implementations ───────────────────────────
|
|
|
|
fn tool_create_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let name = args
|
|
.get("name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: name")?;
|
|
let description = args
|
|
.get("description")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: description")?;
|
|
let steps_to_reproduce = args
|
|
.get("steps_to_reproduce")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: steps_to_reproduce")?;
|
|
let actual_result = args
|
|
.get("actual_result")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: actual_result")?;
|
|
let expected_result = args
|
|
.get("expected_result")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: expected_result")?;
|
|
let acceptance_criteria: Option<Vec<String>> = args
|
|
.get("acceptance_criteria")
|
|
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
|
|
|
let root = ctx.state.get_project_root()?;
|
|
let bug_id = create_bug_file(
|
|
&root,
|
|
name,
|
|
description,
|
|
steps_to_reproduce,
|
|
actual_result,
|
|
expected_result,
|
|
acceptance_criteria.as_deref(),
|
|
)?;
|
|
|
|
Ok(format!("Created bug: {bug_id}"))
|
|
}
|
|
|
|
fn tool_list_bugs(ctx: &AppContext) -> Result<String, String> {
|
|
let root = ctx.state.get_project_root()?;
|
|
let bugs = list_bug_files(&root)?;
|
|
serde_json::to_string_pretty(&json!(bugs
|
|
.iter()
|
|
.map(|(id, name)| json!({ "bug_id": id, "name": name }))
|
|
.collect::<Vec<_>>()))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_close_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let bug_id = args
|
|
.get("bug_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: bug_id")?;
|
|
|
|
let root = ctx.agents.get_project_root(&ctx.state)?;
|
|
close_bug_to_archive(&root, bug_id)?;
|
|
|
|
Ok(format!(
|
|
"Bug '{bug_id}' closed, moved to bugs/archive/, and committed to master."
|
|
))
|
|
}
|
|
|
|
// ── Mergemaster tool implementations ─────────────────────────────
|
|
|
|
async fn tool_merge_agent_work(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args.get("agent_name").and_then(|v| v.as_str());
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
let report = ctx.agents.merge_agent_work(&project_root, story_id).await?;
|
|
|
|
let status_msg = if report.success && report.gates_passed {
|
|
"Merge complete: all quality gates passed. Story archived and worktree cleaned up."
|
|
} else if report.had_conflicts {
|
|
"Merge failed: conflicts detected. Merge was aborted. Resolve conflicts manually and retry."
|
|
} else if report.success && !report.gates_passed {
|
|
"Merge committed but quality gates failed. Review gate_output and fix issues before re-running."
|
|
} else {
|
|
"Merge failed. Review gate_output for details."
|
|
};
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": story_id,
|
|
"agent_name": agent_name,
|
|
"success": report.success,
|
|
"had_conflicts": report.had_conflicts,
|
|
"conflict_details": report.conflict_details,
|
|
"gates_passed": report.gates_passed,
|
|
"gate_output": report.gate_output,
|
|
"worktree_cleaned_up": report.worktree_cleaned_up,
|
|
"story_archived": report.story_archived,
|
|
"message": status_msg,
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
fn tool_move_story_to_merge(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
move_story_to_merge(&project_root, story_id)?;
|
|
|
|
Ok(format!(
|
|
"Story '{story_id}' moved to work/4_merge/ and committed. Ready for mergemaster."
|
|
))
|
|
}
|
|
|
|
// ── QA tool implementations ───────────────────────────────────────
|
|
|
|
async fn tool_request_qa(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
let story_id = args
|
|
.get("story_id")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Missing required argument: story_id")?;
|
|
let agent_name = args
|
|
.get("agent_name")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("qa");
|
|
|
|
let project_root = ctx.agents.get_project_root(&ctx.state)?;
|
|
|
|
// Move story from work/2_current/ to work/3_qa/
|
|
move_story_to_qa(&project_root, story_id)?;
|
|
|
|
// Start the QA agent on the story worktree
|
|
let info = ctx
|
|
.agents
|
|
.start_agent(&project_root, story_id, Some(agent_name))
|
|
.await?;
|
|
|
|
serde_json::to_string_pretty(&json!({
|
|
"story_id": info.story_id,
|
|
"agent_name": info.agent_name,
|
|
"status": info.status.to_string(),
|
|
"worktree_path": info.worktree_path,
|
|
"message": format!(
|
|
"Story '{story_id}' moved to work/3_qa/ and QA agent '{}' started.",
|
|
info.agent_name
|
|
),
|
|
}))
|
|
.map_err(|e| format!("Serialization error: {e}"))
|
|
}
|
|
|
|
/// Run `git log <base>..HEAD --oneline` in the worktree and return the commit
|
|
/// summaries, or `None` if git is unavailable or there are no new commits.
|
|
async fn get_worktree_commits(worktree_path: &str, base_branch: &str) -> Option<Vec<String>> {
|
|
let wt = worktree_path.to_string();
|
|
let base = base_branch.to_string();
|
|
tokio::task::spawn_blocking(move || {
|
|
let output = std::process::Command::new("git")
|
|
.args(["log", &format!("{base}..HEAD"), "--oneline"])
|
|
.current_dir(&wt)
|
|
.output()
|
|
.ok()?;
|
|
|
|
if output.status.success() {
|
|
let lines: Vec<String> = String::from_utf8(output.stdout)
|
|
.ok()?
|
|
.lines()
|
|
.filter(|l| !l.is_empty())
|
|
.map(|l| l.to_string())
|
|
.collect();
|
|
Some(lines)
|
|
} else {
|
|
None
|
|
}
|
|
})
|
|
.await
|
|
.ok()
|
|
.flatten()
|
|
}
|
|
|
|
// ── Helpers ───────────────────────────────────────────────────────
|
|
|
|
fn parse_test_cases(value: Option<&Value>) -> Result<Vec<TestCaseResult>, String> {
|
|
let arr = match value {
|
|
Some(Value::Array(a)) => a,
|
|
Some(Value::Null) | None => return Ok(Vec::new()),
|
|
_ => return Err("Expected array for test cases".to_string()),
|
|
};
|
|
|
|
arr.iter()
|
|
.map(|item| {
|
|
let name = item
|
|
.get("name")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Test case missing 'name'")?
|
|
.to_string();
|
|
let status_str = item
|
|
.get("status")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or("Test case missing 'status'")?;
|
|
let status = match status_str {
|
|
"pass" => TestStatus::Pass,
|
|
"fail" => TestStatus::Fail,
|
|
other => return Err(format!("Invalid test status '{other}'. Use 'pass' or 'fail'.")),
|
|
};
|
|
let details = item.get("details").and_then(|v| v.as_str()).map(String::from);
|
|
Ok(TestCaseResult {
|
|
name,
|
|
status,
|
|
details,
|
|
})
|
|
})
|
|
.collect()
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::http::context::AppContext;
|
|
use crate::store::StoreOps;
|
|
|
|
// ── Unit tests ────────────────────────────────────────────────
|
|
|
|
#[test]
|
|
fn parse_test_cases_empty() {
|
|
let result = parse_test_cases(None).unwrap();
|
|
assert!(result.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn parse_test_cases_valid() {
|
|
let input = json!([
|
|
{"name": "test1", "status": "pass"},
|
|
{"name": "test2", "status": "fail", "details": "assertion failed"}
|
|
]);
|
|
let result = parse_test_cases(Some(&input)).unwrap();
|
|
assert_eq!(result.len(), 2);
|
|
assert_eq!(result[0].status, TestStatus::Pass);
|
|
assert_eq!(result[1].status, TestStatus::Fail);
|
|
assert_eq!(result[1].details, Some("assertion failed".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn parse_test_cases_invalid_status() {
|
|
let input = json!([{"name": "t", "status": "maybe"}]);
|
|
assert!(parse_test_cases(Some(&input)).is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn json_rpc_response_serializes_success() {
|
|
let resp = JsonRpcResponse::success(Some(json!(1)), json!({"ok": true}));
|
|
let s = serde_json::to_string(&resp).unwrap();
|
|
assert!(s.contains("\"result\""));
|
|
assert!(!s.contains("\"error\""));
|
|
}
|
|
|
|
#[test]
|
|
fn json_rpc_response_serializes_error() {
|
|
let resp = JsonRpcResponse::error(Some(json!(1)), -32600, "bad".into());
|
|
let s = serde_json::to_string(&resp).unwrap();
|
|
assert!(s.contains("\"error\""));
|
|
assert!(!s.contains("\"result\""));
|
|
}
|
|
|
|
// ── Protocol handler integration tests ────────────────────────
|
|
|
|
#[test]
|
|
fn initialize_returns_capabilities() {
|
|
let resp = handle_initialize(
|
|
Some(json!(1)),
|
|
&json!({"protocolVersion": "2025-03-26", "capabilities": {}, "clientInfo": {"name": "test", "version": "1.0"}}),
|
|
);
|
|
let result = resp.result.unwrap();
|
|
assert_eq!(result["protocolVersion"], "2025-03-26");
|
|
assert!(result["capabilities"]["tools"].is_object());
|
|
assert_eq!(result["serverInfo"]["name"], "story-kit");
|
|
}
|
|
|
|
#[test]
|
|
fn tools_list_returns_all_tools() {
|
|
let resp = handle_tools_list(Some(json!(2)));
|
|
let result = resp.result.unwrap();
|
|
let tools = result["tools"].as_array().unwrap();
|
|
let names: Vec<&str> = tools.iter().map(|t| t["name"].as_str().unwrap()).collect();
|
|
assert!(names.contains(&"create_story"));
|
|
assert!(names.contains(&"validate_stories"));
|
|
assert!(names.contains(&"list_upcoming"));
|
|
assert!(names.contains(&"get_story_todos"));
|
|
assert!(names.contains(&"record_tests"));
|
|
assert!(names.contains(&"ensure_acceptance"));
|
|
assert!(names.contains(&"start_agent"));
|
|
assert!(names.contains(&"stop_agent"));
|
|
assert!(names.contains(&"list_agents"));
|
|
assert!(names.contains(&"get_agent_config"));
|
|
assert!(names.contains(&"reload_agent_config"));
|
|
assert!(names.contains(&"get_agent_output"));
|
|
assert!(names.contains(&"wait_for_agent"));
|
|
assert!(names.contains(&"create_worktree"));
|
|
assert!(names.contains(&"list_worktrees"));
|
|
assert!(names.contains(&"remove_worktree"));
|
|
assert!(names.contains(&"get_editor_command"));
|
|
assert!(names.contains(&"report_completion"));
|
|
assert!(names.contains(&"accept_story"));
|
|
assert!(names.contains(&"check_criterion"));
|
|
assert!(names.contains(&"set_test_plan"));
|
|
assert!(names.contains(&"create_bug"));
|
|
assert!(names.contains(&"list_bugs"));
|
|
assert!(names.contains(&"close_bug"));
|
|
assert!(names.contains(&"merge_agent_work"));
|
|
assert!(names.contains(&"move_story_to_merge"));
|
|
assert!(names.contains(&"request_qa"));
|
|
assert_eq!(tools.len(), 27);
|
|
}
|
|
|
|
#[test]
|
|
fn tools_list_schemas_have_required_fields() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
for tool in &tools {
|
|
assert!(tool["name"].is_string(), "tool missing name");
|
|
assert!(tool["description"].is_string(), "tool missing description");
|
|
assert!(tool["inputSchema"].is_object(), "tool missing inputSchema");
|
|
assert_eq!(tool["inputSchema"]["type"], "object");
|
|
}
|
|
}
|
|
|
|
fn test_ctx(dir: &std::path::Path) -> AppContext {
|
|
AppContext::new_test(dir.to_path_buf())
|
|
}
|
|
|
|
#[test]
|
|
fn tool_validate_stories_empty_project() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_validate_stories(&ctx).unwrap();
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
|
assert!(parsed.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn tool_create_story_and_list_upcoming() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
// No git repo needed: spike 61 — create_story just writes the file;
|
|
// the filesystem watcher handles the commit asynchronously.
|
|
let ctx = test_ctx(tmp.path());
|
|
|
|
let result = tool_create_story(
|
|
&json!({"name": "Test Story", "acceptance_criteria": ["AC1", "AC2"]}),
|
|
&ctx,
|
|
)
|
|
.unwrap();
|
|
assert!(result.contains("Created story:"));
|
|
|
|
// List should return it
|
|
let list = tool_list_upcoming(&ctx).unwrap();
|
|
let parsed: Vec<Value> = serde_json::from_str(&list).unwrap();
|
|
assert_eq!(parsed.len(), 1);
|
|
assert_eq!(parsed[0]["name"], "Test Story");
|
|
}
|
|
|
|
#[test]
|
|
fn tool_create_story_rejects_empty_name() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_create_story(&json!({"name": "!!!"}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("alphanumeric"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_create_story_missing_name() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_create_story(&json!({}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("Missing required argument"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_get_story_todos_missing_file() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_get_story_todos(&json!({"story_id": "99_nonexistent"}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("not found"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_get_story_todos_returns_unchecked() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let current_dir = tmp.path().join(".story_kit").join("work").join("2_current");
|
|
fs::create_dir_all(¤t_dir).unwrap();
|
|
fs::write(
|
|
current_dir.join("1_test.md"),
|
|
"---\nname: Test\ntest_plan: approved\n---\n## AC\n- [ ] First\n- [x] Done\n- [ ] Second\n",
|
|
)
|
|
.unwrap();
|
|
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_get_story_todos(&json!({"story_id": "1_test"}), &ctx).unwrap();
|
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
|
assert_eq!(parsed["todos"].as_array().unwrap().len(), 2);
|
|
assert_eq!(parsed["story_name"], "Test");
|
|
}
|
|
|
|
#[test]
|
|
fn tool_record_tests_and_ensure_acceptance() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
|
|
// Record passing tests
|
|
let result = tool_record_tests(
|
|
&json!({
|
|
"story_id": "1_test",
|
|
"unit": [{"name": "u1", "status": "pass"}],
|
|
"integration": [{"name": "i1", "status": "pass"}]
|
|
}),
|
|
&ctx,
|
|
)
|
|
.unwrap();
|
|
assert!(result.contains("recorded"));
|
|
|
|
// Should be acceptable
|
|
let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx).unwrap();
|
|
assert!(result.contains("All gates pass"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_ensure_acceptance_blocks_on_failures() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
|
|
tool_record_tests(
|
|
&json!({
|
|
"story_id": "1_test",
|
|
"unit": [{"name": "u1", "status": "fail"}],
|
|
"integration": []
|
|
}),
|
|
&ctx,
|
|
)
|
|
.unwrap();
|
|
|
|
let result = tool_ensure_acceptance(&json!({"story_id": "1_test"}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("blocked"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_list_agents_empty() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_list_agents(&ctx).unwrap();
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
|
assert!(parsed.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn handle_tools_call_unknown_tool() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let rt = tokio::runtime::Runtime::new().unwrap();
|
|
let resp = rt.block_on(handle_tools_call(
|
|
Some(json!(1)),
|
|
&json!({"name": "bogus_tool", "arguments": {}}),
|
|
&ctx,
|
|
));
|
|
let result = resp.result.unwrap();
|
|
assert_eq!(result["isError"], true);
|
|
assert!(result["content"][0]["text"].as_str().unwrap().contains("Unknown tool"));
|
|
}
|
|
|
|
#[test]
|
|
fn to_sse_response_wraps_in_data_prefix() {
|
|
let resp = JsonRpcResponse::success(Some(json!(1)), json!({"ok": true}));
|
|
let http_resp = to_sse_response(resp);
|
|
assert_eq!(
|
|
http_resp.headers().get("content-type").unwrap(),
|
|
"text/event-stream"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn wants_sse_detects_accept_header() {
|
|
// Can't easily construct a Request in tests without TestClient,
|
|
// so test the logic indirectly via to_sse_response format
|
|
let resp = JsonRpcResponse::success(Some(json!(1)), json!("ok"));
|
|
let json_resp = to_json_response(resp);
|
|
assert_eq!(
|
|
json_resp.headers().get("content-type").unwrap(),
|
|
"application/json"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn wait_for_agent_tool_in_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let wait_tool = tools.iter().find(|t| t["name"] == "wait_for_agent");
|
|
assert!(wait_tool.is_some(), "wait_for_agent missing from tools list");
|
|
let t = wait_tool.unwrap();
|
|
assert!(t["description"].as_str().unwrap().contains("block") || t["description"].as_str().unwrap().contains("Block"));
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"story_id"));
|
|
assert!(req_names.contains(&"agent_name"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn wait_for_agent_tool_missing_story_id() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_wait_for_agent(&json!({"agent_name": "bot"}), &ctx).await;
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("story_id"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn wait_for_agent_tool_missing_agent_name() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_wait_for_agent(&json!({"story_id": "1_test"}), &ctx).await;
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("agent_name"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn wait_for_agent_tool_nonexistent_agent_returns_error() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result =
|
|
tool_wait_for_agent(&json!({"story_id": "99_nope", "agent_name": "bot", "timeout_ms": 50}), &ctx)
|
|
.await;
|
|
// No agent registered — should error
|
|
assert!(result.is_err());
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn wait_for_agent_tool_returns_completed_agent() {
|
|
use crate::agents::AgentStatus;
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
ctx.agents
|
|
.inject_test_agent("41_story", "worker", AgentStatus::Completed);
|
|
|
|
let result = tool_wait_for_agent(
|
|
&json!({"story_id": "41_story", "agent_name": "worker"}),
|
|
&ctx,
|
|
)
|
|
.await
|
|
.unwrap();
|
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
|
assert_eq!(parsed["status"], "completed");
|
|
assert_eq!(parsed["story_id"], "41_story");
|
|
assert_eq!(parsed["agent_name"], "worker");
|
|
// commits key present (may be null since no real worktree)
|
|
assert!(parsed.get("commits").is_some());
|
|
// completion key present (null for agents that didn't call report_completion)
|
|
assert!(parsed.get("completion").is_some());
|
|
}
|
|
|
|
// ── report_completion tool tests ──────────────────────────────
|
|
|
|
#[test]
|
|
fn report_completion_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools
|
|
.iter()
|
|
.find(|t| t["name"] == "report_completion")
|
|
.expect("report_completion missing from tools list");
|
|
// Schema has required fields
|
|
let required = tool["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"story_id"));
|
|
assert!(req_names.contains(&"agent_name"));
|
|
assert!(req_names.contains(&"summary"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn report_completion_tool_missing_story_id() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result =
|
|
tool_report_completion(&json!({"agent_name": "bot", "summary": "done"}), &ctx).await;
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("story_id"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn report_completion_tool_missing_agent_name() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result =
|
|
tool_report_completion(&json!({"story_id": "44_test", "summary": "done"}), &ctx).await;
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("agent_name"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn report_completion_tool_missing_summary() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_report_completion(
|
|
&json!({"story_id": "44_test", "agent_name": "bot"}),
|
|
&ctx,
|
|
)
|
|
.await;
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("summary"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn report_completion_tool_nonexistent_agent() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_report_completion(
|
|
&json!({"story_id": "99_nope", "agent_name": "bot", "summary": "done"}),
|
|
&ctx,
|
|
)
|
|
.await;
|
|
assert!(result.is_err());
|
|
let msg = result.unwrap_err();
|
|
assert!(msg.contains("No agent"), "unexpected: {msg}");
|
|
}
|
|
|
|
// ── Editor command tool tests ─────────────────────────────────
|
|
|
|
#[test]
|
|
fn tool_get_editor_command_missing_worktree_path() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_get_editor_command(&json!({}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("worktree_path"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_get_editor_command_no_editor_configured() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_get_editor_command(
|
|
&json!({"worktree_path": "/some/path"}),
|
|
&ctx,
|
|
);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("No editor configured"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_get_editor_command_formats_correctly() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
ctx.store.set("editor_command", json!("zed"));
|
|
|
|
let result = tool_get_editor_command(
|
|
&json!({"worktree_path": "/home/user/worktrees/37_my_story"}),
|
|
&ctx,
|
|
)
|
|
.unwrap();
|
|
assert_eq!(result, "zed /home/user/worktrees/37_my_story");
|
|
}
|
|
|
|
#[test]
|
|
fn tool_get_editor_command_works_with_vscode() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
ctx.store.set("editor_command", json!("code"));
|
|
|
|
let result = tool_get_editor_command(
|
|
&json!({"worktree_path": "/path/to/worktree"}),
|
|
&ctx,
|
|
)
|
|
.unwrap();
|
|
assert_eq!(result, "code /path/to/worktree");
|
|
}
|
|
|
|
#[test]
|
|
fn get_editor_command_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools.iter().find(|t| t["name"] == "get_editor_command");
|
|
assert!(tool.is_some(), "get_editor_command missing from tools list");
|
|
let t = tool.unwrap();
|
|
assert!(t["description"].is_string());
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"worktree_path"));
|
|
}
|
|
|
|
// ── Bug lifecycle tool tests ──────────────────────────────────
|
|
|
|
fn setup_git_repo_in(dir: &std::path::Path) {
|
|
std::process::Command::new("git")
|
|
.args(["init"])
|
|
.current_dir(dir)
|
|
.output()
|
|
.unwrap();
|
|
std::process::Command::new("git")
|
|
.args(["config", "user.email", "test@test.com"])
|
|
.current_dir(dir)
|
|
.output()
|
|
.unwrap();
|
|
std::process::Command::new("git")
|
|
.args(["config", "user.name", "Test"])
|
|
.current_dir(dir)
|
|
.output()
|
|
.unwrap();
|
|
std::process::Command::new("git")
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
|
.current_dir(dir)
|
|
.output()
|
|
.unwrap();
|
|
}
|
|
|
|
#[test]
|
|
fn create_bug_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools.iter().find(|t| t["name"] == "create_bug");
|
|
assert!(tool.is_some(), "create_bug missing from tools list");
|
|
let t = tool.unwrap();
|
|
assert!(t["description"].is_string());
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"name"));
|
|
assert!(req_names.contains(&"description"));
|
|
assert!(req_names.contains(&"steps_to_reproduce"));
|
|
assert!(req_names.contains(&"actual_result"));
|
|
assert!(req_names.contains(&"expected_result"));
|
|
}
|
|
|
|
#[test]
|
|
fn list_bugs_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools.iter().find(|t| t["name"] == "list_bugs");
|
|
assert!(tool.is_some(), "list_bugs missing from tools list");
|
|
}
|
|
|
|
#[test]
|
|
fn close_bug_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools.iter().find(|t| t["name"] == "close_bug");
|
|
assert!(tool.is_some(), "close_bug missing from tools list");
|
|
let t = tool.unwrap();
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"bug_id"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_create_bug_missing_name() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_create_bug(
|
|
&json!({
|
|
"description": "d",
|
|
"steps_to_reproduce": "s",
|
|
"actual_result": "a",
|
|
"expected_result": "e"
|
|
}),
|
|
&ctx,
|
|
);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("name"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_create_bug_missing_description() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_create_bug(
|
|
&json!({
|
|
"name": "Bug",
|
|
"steps_to_reproduce": "s",
|
|
"actual_result": "a",
|
|
"expected_result": "e"
|
|
}),
|
|
&ctx,
|
|
);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("description"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_create_bug_creates_file_and_commits() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
setup_git_repo_in(tmp.path());
|
|
let ctx = test_ctx(tmp.path());
|
|
|
|
let result = tool_create_bug(
|
|
&json!({
|
|
"name": "Login Crash",
|
|
"description": "The app crashes on login.",
|
|
"steps_to_reproduce": "1. Open app\n2. Click login",
|
|
"actual_result": "500 error",
|
|
"expected_result": "Successful login"
|
|
}),
|
|
&ctx,
|
|
)
|
|
.unwrap();
|
|
|
|
assert!(result.contains("1_bug_login_crash"));
|
|
let bug_file = tmp
|
|
.path()
|
|
.join(".story_kit/work/1_upcoming/1_bug_login_crash.md");
|
|
assert!(bug_file.exists());
|
|
}
|
|
|
|
#[test]
|
|
fn tool_list_bugs_empty() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_list_bugs(&ctx).unwrap();
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
|
assert!(parsed.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn tool_list_bugs_returns_open_bugs() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming");
|
|
std::fs::create_dir_all(&upcoming_dir).unwrap();
|
|
std::fs::write(
|
|
upcoming_dir.join("1_bug_crash.md"),
|
|
"# Bug 1: App Crash\n",
|
|
)
|
|
.unwrap();
|
|
std::fs::write(
|
|
upcoming_dir.join("2_bug_typo.md"),
|
|
"# Bug 2: Typo in Header\n",
|
|
)
|
|
.unwrap();
|
|
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_list_bugs(&ctx).unwrap();
|
|
let parsed: Vec<Value> = serde_json::from_str(&result).unwrap();
|
|
assert_eq!(parsed.len(), 2);
|
|
assert_eq!(parsed[0]["bug_id"], "1_bug_crash");
|
|
assert_eq!(parsed[0]["name"], "App Crash");
|
|
assert_eq!(parsed[1]["bug_id"], "2_bug_typo");
|
|
assert_eq!(parsed[1]["name"], "Typo in Header");
|
|
}
|
|
|
|
#[test]
|
|
fn tool_close_bug_missing_bug_id() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_close_bug(&json!({}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("bug_id"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_close_bug_moves_to_archive() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
setup_git_repo_in(tmp.path());
|
|
let upcoming_dir = tmp.path().join(".story_kit/work/1_upcoming");
|
|
std::fs::create_dir_all(&upcoming_dir).unwrap();
|
|
let bug_file = upcoming_dir.join("1_bug_crash.md");
|
|
std::fs::write(&bug_file, "# Bug 1: Crash\n").unwrap();
|
|
// Stage the file so it's tracked
|
|
std::process::Command::new("git")
|
|
.args(["add", "."])
|
|
.current_dir(tmp.path())
|
|
.output()
|
|
.unwrap();
|
|
std::process::Command::new("git")
|
|
.args(["commit", "-m", "add bug"])
|
|
.current_dir(tmp.path())
|
|
.output()
|
|
.unwrap();
|
|
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_close_bug(&json!({"bug_id": "1_bug_crash"}), &ctx).unwrap();
|
|
assert!(result.contains("1_bug_crash"));
|
|
assert!(!bug_file.exists());
|
|
assert!(tmp.path().join(".story_kit/work/5_archived/1_bug_crash.md").exists());
|
|
}
|
|
|
|
// ── Mergemaster tool tests ─────────────────────────────────────────────
|
|
|
|
#[test]
|
|
fn merge_agent_work_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools.iter().find(|t| t["name"] == "merge_agent_work");
|
|
assert!(tool.is_some(), "merge_agent_work missing from tools list");
|
|
let t = tool.unwrap();
|
|
assert!(t["description"].is_string());
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"story_id"));
|
|
// agent_name is optional
|
|
assert!(!req_names.contains(&"agent_name"));
|
|
}
|
|
|
|
#[test]
|
|
fn move_story_to_merge_in_tools_list() {
|
|
let resp = handle_tools_list(Some(json!(1)));
|
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
|
let tool = tools.iter().find(|t| t["name"] == "move_story_to_merge");
|
|
assert!(tool.is_some(), "move_story_to_merge missing from tools list");
|
|
let t = tool.unwrap();
|
|
assert!(t["description"].is_string());
|
|
let required = t["inputSchema"]["required"].as_array().unwrap();
|
|
let req_names: Vec<&str> = required.iter().map(|v| v.as_str().unwrap()).collect();
|
|
assert!(req_names.contains(&"story_id"));
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn tool_merge_agent_work_missing_story_id() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_merge_agent_work(&json!({}), &ctx).await;
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("story_id"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_move_story_to_merge_missing_story_id() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_move_story_to_merge(&json!({}), &ctx);
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().contains("story_id"));
|
|
}
|
|
|
|
#[test]
|
|
fn tool_move_story_to_merge_moves_file() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
setup_git_repo_in(tmp.path());
|
|
let current_dir = tmp.path().join(".story_kit/work/2_current");
|
|
std::fs::create_dir_all(¤t_dir).unwrap();
|
|
let story_file = current_dir.join("24_story_test.md");
|
|
std::fs::write(&story_file, "---\nname: Test\ntest_plan: approved\n---\n").unwrap();
|
|
std::process::Command::new("git")
|
|
.args(["add", "."])
|
|
.current_dir(tmp.path())
|
|
.output()
|
|
.unwrap();
|
|
std::process::Command::new("git")
|
|
.args(["commit", "-m", "add story"])
|
|
.current_dir(tmp.path())
|
|
.output()
|
|
.unwrap();
|
|
|
|
let ctx = test_ctx(tmp.path());
|
|
let result = tool_move_story_to_merge(&json!({"story_id": "24_story_test"}), &ctx).unwrap();
|
|
assert!(result.contains("4_merge"));
|
|
assert!(!story_file.exists(), "2_current file should be gone");
|
|
assert!(
|
|
tmp.path().join(".story_kit/work/4_merge/24_story_test.md").exists(),
|
|
"4_merge file should exist"
|
|
);
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn tool_merge_agent_work_returns_coherent_report() {
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
setup_git_repo_in(tmp.path());
|
|
let ctx = test_ctx(tmp.path());
|
|
|
|
// Try to merge a non-existent branch — should return a report (not panic)
|
|
let result = tool_merge_agent_work(
|
|
&json!({"story_id": "99_nonexistent", "agent_name": "coder-1"}),
|
|
&ctx,
|
|
)
|
|
.await
|
|
.unwrap();
|
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
|
assert_eq!(parsed["story_id"], "99_nonexistent");
|
|
assert_eq!(parsed["agent_name"], "coder-1");
|
|
assert!(parsed.get("success").is_some());
|
|
assert!(parsed.get("had_conflicts").is_some());
|
|
assert!(parsed.get("gates_passed").is_some());
|
|
assert!(parsed.get("gate_output").is_some());
|
|
assert!(parsed.get("message").is_some());
|
|
}
|
|
}
|