Revert "refactor: split top-5 largest files into mod.rs + tests.rs"
This reverts commit 65a3767a7a.
This commit is contained in:
+477
-1
@@ -1403,4 +1403,480 @@ async fn handle_tools_call(id: Option<Value>, params: &Value, ctx: &AppContext)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
|
||||
#[test]
|
||||
fn json_rpc_response_serializes_success() {
|
||||
let resp = JsonRpcResponse::success(Some(json!(1)), json!({"ok": true}));
|
||||
let s = serde_json::to_string(&resp).unwrap();
|
||||
assert!(s.contains("\"result\""));
|
||||
assert!(!s.contains("\"error\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_rpc_response_serializes_error() {
|
||||
let resp = JsonRpcResponse::error(Some(json!(1)), -32600, "bad".into());
|
||||
let s = serde_json::to_string(&resp).unwrap();
|
||||
assert!(s.contains("\"error\""));
|
||||
assert!(!s.contains("\"result\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn initialize_returns_capabilities() {
|
||||
let resp = handle_initialize(
|
||||
Some(json!(1)),
|
||||
&json!({"protocolVersion": "2025-03-26", "capabilities": {}, "clientInfo": {"name": "test", "version": "1.0"}}),
|
||||
);
|
||||
let result = resp.result.unwrap();
|
||||
assert_eq!(result["protocolVersion"], "2025-03-26");
|
||||
assert!(result["capabilities"]["tools"].is_object());
|
||||
assert_eq!(result["serverInfo"]["name"], "huskies");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tools_list_returns_all_tools() {
|
||||
let resp = handle_tools_list(Some(json!(2)));
|
||||
let result = resp.result.unwrap();
|
||||
let tools = result["tools"].as_array().unwrap();
|
||||
let names: Vec<&str> = tools.iter().map(|t| t["name"].as_str().unwrap()).collect();
|
||||
assert!(names.contains(&"create_story"));
|
||||
assert!(names.contains(&"validate_stories"));
|
||||
assert!(names.contains(&"list_upcoming"));
|
||||
assert!(names.contains(&"get_story_todos"));
|
||||
assert!(names.contains(&"record_tests"));
|
||||
assert!(names.contains(&"ensure_acceptance"));
|
||||
assert!(names.contains(&"start_agent"));
|
||||
assert!(names.contains(&"stop_agent"));
|
||||
assert!(names.contains(&"list_agents"));
|
||||
assert!(names.contains(&"get_agent_config"));
|
||||
assert!(names.contains(&"reload_agent_config"));
|
||||
assert!(names.contains(&"get_agent_output"));
|
||||
assert!(names.contains(&"wait_for_agent"));
|
||||
assert!(names.contains(&"get_agent_remaining_turns_and_budget"));
|
||||
assert!(names.contains(&"create_worktree"));
|
||||
assert!(names.contains(&"list_worktrees"));
|
||||
assert!(names.contains(&"remove_worktree"));
|
||||
assert!(names.contains(&"get_editor_command"));
|
||||
assert!(!names.contains(&"report_completion"));
|
||||
assert!(names.contains(&"accept_story"));
|
||||
assert!(names.contains(&"check_criterion"));
|
||||
assert!(names.contains(&"add_criterion"));
|
||||
assert!(names.contains(&"update_story"));
|
||||
assert!(names.contains(&"create_spike"));
|
||||
assert!(names.contains(&"create_bug"));
|
||||
assert!(names.contains(&"list_bugs"));
|
||||
assert!(names.contains(&"close_bug"));
|
||||
assert!(names.contains(&"create_refactor"));
|
||||
assert!(names.contains(&"list_refactors"));
|
||||
assert!(names.contains(&"merge_agent_work"));
|
||||
assert!(names.contains(&"get_merge_status"));
|
||||
assert!(names.contains(&"move_story_to_merge"));
|
||||
assert!(names.contains(&"report_merge_failure"));
|
||||
assert!(names.contains(&"request_qa"));
|
||||
assert!(names.contains(&"approve_qa"));
|
||||
assert!(names.contains(&"reject_qa"));
|
||||
assert!(names.contains(&"launch_qa_app"));
|
||||
assert!(names.contains(&"get_server_logs"));
|
||||
assert!(names.contains(&"prompt_permission"));
|
||||
assert!(names.contains(&"get_pipeline_status"));
|
||||
assert!(names.contains(&"rebuild_and_restart"));
|
||||
assert!(names.contains(&"get_token_usage"));
|
||||
assert!(names.contains(&"move_story"));
|
||||
assert!(names.contains(&"unblock_story"));
|
||||
assert!(names.contains(&"delete_story"));
|
||||
assert!(names.contains(&"run_command"));
|
||||
assert!(names.contains(&"run_tests"));
|
||||
assert!(names.contains(&"get_test_result"));
|
||||
assert!(names.contains(&"run_build"));
|
||||
assert!(names.contains(&"run_lint"));
|
||||
assert!(names.contains(&"git_status"));
|
||||
assert!(names.contains(&"git_diff"));
|
||||
assert!(names.contains(&"git_add"));
|
||||
assert!(names.contains(&"git_commit"));
|
||||
assert!(names.contains(&"git_log"));
|
||||
assert!(names.contains(&"status"));
|
||||
assert!(names.contains(&"loc_file"));
|
||||
assert!(names.contains(&"dump_crdt"));
|
||||
assert!(names.contains(&"get_version"));
|
||||
assert!(names.contains(&"remove_criterion"));
|
||||
assert_eq!(tools.len(), 66);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tools_list_schemas_have_required_fields() {
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
for tool in &tools {
|
||||
assert!(tool["name"].is_string(), "tool missing name");
|
||||
assert!(tool["description"].is_string(), "tool missing description");
|
||||
assert!(tool["inputSchema"].is_object(), "tool missing inputSchema");
|
||||
assert_eq!(tool["inputSchema"]["type"], "object");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handle_tools_call_unknown_tool() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
let resp = rt.block_on(handle_tools_call(
|
||||
Some(json!(1)),
|
||||
&json!({"name": "bogus_tool", "arguments": {}}),
|
||||
&ctx,
|
||||
));
|
||||
let result = resp.result.unwrap();
|
||||
assert_eq!(result["isError"], true);
|
||||
assert!(
|
||||
result["content"][0]["text"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.contains("Unknown tool")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_sse_response_wraps_in_data_prefix() {
|
||||
let resp = JsonRpcResponse::success(Some(json!(1)), json!({"ok": true}));
|
||||
let http_resp = to_sse_response(resp);
|
||||
assert_eq!(
|
||||
http_resp.headers().get("content-type").unwrap(),
|
||||
"text/event-stream"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn wants_sse_detects_accept_header() {
|
||||
// Can't easily construct a Request in tests without TestClient,
|
||||
// so test the logic indirectly via to_sse_response format
|
||||
let resp = JsonRpcResponse::success(Some(json!(1)), json!("ok"));
|
||||
let json_resp = to_json_response(resp);
|
||||
assert_eq!(
|
||||
json_resp.headers().get("content-type").unwrap(),
|
||||
"application/json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_rpc_error_response_builds_json_response() {
|
||||
let resp = json_rpc_error_response(Some(json!(42)), -32600, "test error".into());
|
||||
assert_eq!(resp.status(), poem::http::StatusCode::OK);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/json"
|
||||
);
|
||||
}
|
||||
|
||||
// ── HTTP handler tests (TestClient) ───────────────────────────
|
||||
|
||||
fn test_mcp_app(ctx: std::sync::Arc<AppContext>) -> impl poem::Endpoint {
|
||||
use poem::EndpointExt;
|
||||
poem::Route::new()
|
||||
.at("/mcp", poem::post(mcp_post_handler).get(mcp_get_handler))
|
||||
.data(ctx)
|
||||
}
|
||||
|
||||
async fn read_body_json(resp: poem::test::TestResponse) -> Value {
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
serde_json::from_str(&body).unwrap()
|
||||
}
|
||||
|
||||
async fn post_json_mcp<E: poem::Endpoint>(
|
||||
cli: &poem::test::TestClient<E>,
|
||||
payload: &str,
|
||||
) -> Value {
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body(payload.to_string())
|
||||
.send()
|
||||
.await;
|
||||
read_body_json(resp).await
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_get_handler_returns_405() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli.get("/mcp").send().await;
|
||||
assert_eq!(resp.0.status(), poem::http::StatusCode::METHOD_NOT_ALLOWED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_invalid_content_type_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "text/plain")
|
||||
.body("{}")
|
||||
.send()
|
||||
.await;
|
||||
let body = read_body_json(resp).await;
|
||||
assert!(body.get("error").is_some(), "expected error field: {body}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_invalid_json_returns_parse_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body("not-valid-json")
|
||||
.send()
|
||||
.await;
|
||||
let body = read_body_json(resp).await;
|
||||
assert!(body.get("error").is_some(), "expected error field: {body}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_wrong_jsonrpc_version_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"1.0","id":1,"method":"initialize","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
body["error"]["message"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.contains("version"),
|
||||
"expected version error: {body}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_notification_with_null_id_returns_accepted() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body(r#"{"jsonrpc":"2.0","method":"notifications/initialized","params":{}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(resp.0.status(), poem::http::StatusCode::ACCEPTED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_notification_with_explicit_null_id_returns_accepted() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body(r#"{"jsonrpc":"2.0","id":null,"method":"notifications/initialized","params":{}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(resp.0.status(), poem::http::StatusCode::ACCEPTED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_missing_id_non_notification_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","method":"initialize","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(body.get("error").is_some(), "expected error: {body}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_unknown_method_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","id":1,"method":"bogus/method","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
body["error"]["message"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.contains("Unknown method"),
|
||||
"expected unknown method error: {body}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_initialize_returns_capabilities() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2025-03-26","capabilities":{},"clientInfo":{"name":"test","version":"1.0"}}}"#,
|
||||
)
|
||||
.await;
|
||||
assert_eq!(body["result"]["protocolVersion"], "2025-03-26");
|
||||
assert_eq!(body["result"]["serverInfo"]["name"], "huskies");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_tools_list_returns_tools() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(body["result"]["tools"].is_array());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_returns_event_stream_content_type() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(
|
||||
resp.0.headers().get("content-type").unwrap(),
|
||||
"text/event-stream"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(
|
||||
resp.0.headers().get("content-type").unwrap(),
|
||||
"text/event-stream",
|
||||
"expected SSE content-type"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_without_agent_name_returns_disk_content() {
|
||||
// Without agent_name the SSE live-streaming intercept is skipped and
|
||||
// the disk-based handler runs. The transport still wraps the result in
|
||||
// SSE format (data: …\n\n) because the client sent Accept: text/event-stream,
|
||||
// but the content should be a valid JSON-RPC result, not a subscribe error.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"1_test"}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
// Body is SSE-wrapped: "data: {…}\n\n" — strip the prefix and verify it's
|
||||
// a valid JSON-RPC result (not an error about missing agent_name).
|
||||
let json_part = body
|
||||
.trim_start_matches("data: ")
|
||||
.trim_end_matches("\n\n")
|
||||
.trim();
|
||||
let parsed: serde_json::Value = serde_json::from_str(json_part)
|
||||
.unwrap_or_else(|_| panic!("expected JSON-RPC in SSE body, got: {body}"));
|
||||
assert!(
|
||||
parsed.get("result").is_some(),
|
||||
"expected JSON-RPC result (disk-based handler ran): {parsed}"
|
||||
);
|
||||
// Must NOT be an error about missing agent_name (agent_name is now optional)
|
||||
assert!(
|
||||
parsed.get("error").is_none(),
|
||||
"unexpected error when agent_name omitted: {parsed}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_no_agent_no_logs_returns_not_found() {
|
||||
// Agent not in pool and no log files → SSE success with "No log files found" message.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"99_nope","agent_name":"bot"}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(
|
||||
resp.0.headers().get("content-type").unwrap(),
|
||||
"text/event-stream"
|
||||
);
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
assert!(body.contains("data:"), "expected SSE data prefix: {body}");
|
||||
// Must NOT return isError — should be a success result with "No log files found"
|
||||
assert!(
|
||||
!body.contains("isError"),
|
||||
"expected no isError for missing agent: {body}"
|
||||
);
|
||||
assert!(
|
||||
body.contains("No log files found"),
|
||||
"expected not-found message: {body}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_exited_agent_reads_disk_logs() {
|
||||
use crate::agent_log::AgentLogWriter;
|
||||
use crate::agents::AgentEvent;
|
||||
// Agent has exited (not in pool) but wrote logs to disk.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let mut writer = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-sse").unwrap();
|
||||
writer
|
||||
.write_event(&AgentEvent::Output {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
text: "disk output".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
drop(writer);
|
||||
|
||||
let ctx = std::sync::Arc::new(test_ctx(root));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"42_story_foo","agent_name":"coder-1"}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
assert!(
|
||||
body.contains("disk output"),
|
||||
"expected disk log content in SSE response: {body}"
|
||||
);
|
||||
assert!(
|
||||
!body.contains("isError"),
|
||||
"expected no error for exited agent with logs: {body}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,725 +0,0 @@
|
||||
//! MCP story tools — create, update, move, and manage stories, bugs, and refactors via MCP.
|
||||
//!
|
||||
//! This file is a thin adapter: it deserialises MCP payloads, delegates to
|
||||
//! `crate::service::story` and `crate::http::workflow` for business logic,
|
||||
//! and serialises responses.
|
||||
use crate::agents::{
|
||||
close_bug_to_archive, feature_branch_has_unmerged_changes, move_story_to_done,
|
||||
};
|
||||
use crate::http::context::AppContext;
|
||||
use crate::http::workflow::{
|
||||
add_criterion_to_file, check_criterion_in_file, create_bug_file, create_refactor_file,
|
||||
create_spike_file, create_story_file, edit_criterion_in_file, list_bug_files,
|
||||
list_refactor_files, load_pipeline_state, load_upcoming_stories, remove_criterion_from_file,
|
||||
update_story_in_file, validate_story_dirs,
|
||||
};
|
||||
use crate::io::story_metadata::{
|
||||
check_archived_deps, check_archived_deps_from_list, parse_front_matter, parse_unchecked_todos,
|
||||
};
|
||||
use crate::service::story::parse_test_cases;
|
||||
use crate::slog_warn;
|
||||
#[allow(unused_imports)]
|
||||
use crate::workflow::{TestCaseResult, TestStatus, evaluate_acceptance_with_coverage};
|
||||
use serde_json::{Value, json};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
|
||||
pub(super) fn tool_create_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let name = args
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: name")?;
|
||||
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
||||
let description = args.get("description").and_then(|v| v.as_str());
|
||||
let acceptance_criteria: Option<Vec<String>> = args
|
||||
.get("acceptance_criteria")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
let depends_on: Option<Vec<u32>> = args
|
||||
.get("depends_on")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
// Spike 61: write the file only — the filesystem watcher detects the new
|
||||
// .md file in work/1_backlog/ and auto-commits with a deterministic message.
|
||||
let commit = false;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let story_id = create_story_file(
|
||||
&root,
|
||||
name,
|
||||
user_story,
|
||||
description,
|
||||
acceptance_criteria.as_deref(),
|
||||
depends_on.as_deref(),
|
||||
commit,
|
||||
)?;
|
||||
|
||||
// Bug 503: warn at creation time if any depends_on points at an already-archived story.
|
||||
// Archived = satisfied semantics: the dep will resolve immediately on the next promotion
|
||||
// tick, which is surprising if the archived story was abandoned rather than cleanly done.
|
||||
let archived_deps = depends_on
|
||||
.as_deref()
|
||||
.map(|deps| check_archived_deps_from_list(&root, deps))
|
||||
.unwrap_or_default();
|
||||
if !archived_deps.is_empty() {
|
||||
slog_warn!(
|
||||
"[create-story] Story '{story_id}' depends_on {archived_deps:?} which \
|
||||
are already in 6_archived. The dep will be treated as satisfied on the \
|
||||
next promotion tick. If these deps were abandoned (not cleanly completed), \
|
||||
consider removing the depends_on or keeping the story in backlog manually."
|
||||
);
|
||||
return Ok(format!(
|
||||
"Created story: {story_id}\n\n\
|
||||
WARNING: depends_on {archived_deps:?} point at stories already in \
|
||||
6_archived. These deps are treated as satisfied (archived = satisfied \
|
||||
semantics), so this story may be auto-promoted from backlog immediately. \
|
||||
If the archived deps were abandoned rather than completed, remove the \
|
||||
depends_on or move the story back to backlog manually after promotion."
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!("Created story: {story_id}"))
|
||||
}
|
||||
|
||||
/// Purge a story from the in-memory CRDT by writing a tombstone op (story 521).
|
||||
///
|
||||
/// This is the eviction primitive for the four-state-machine drift problem
|
||||
/// we hit on 2026-04-09 — when a story gets stuck in the running server's
|
||||
/// in-memory CRDT and can't be cleared by sqlite deletes alone (because the
|
||||
/// in-memory state outlives any pipeline_items / crdt_ops manipulation),
|
||||
/// this tool writes a proper CRDT delete op via `crdt_state::evict_item`.
|
||||
///
|
||||
/// The tombstone op:
|
||||
/// - Marks the in-memory CRDT item as `is_deleted = true` immediately
|
||||
/// (so subsequent `read_all_items` / `read_item` calls skip it)
|
||||
/// - Is persisted to `crdt_ops` so the eviction survives a server restart
|
||||
/// - Drops the in-memory `CONTENT_STORE` entry for the story
|
||||
///
|
||||
/// This tool does NOT touch: running agents, worktrees, the `pipeline_items`
|
||||
/// shadow table, `timers.json`, or filesystem shadows. Compose with
|
||||
/// `stop_agent`, `remove_worktree`, etc. as needed for a full purge — or
|
||||
/// see story 514 (delete_story full cleanup) for a future "do it all" tool.
|
||||
pub(super) fn tool_purge_story(args: &Value, _ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
crate::crdt_state::evict_item(story_id)?;
|
||||
|
||||
Ok(format!(
|
||||
"Evicted '{story_id}' from in-memory CRDT (tombstone op persisted to crdt_ops; CONTENT_STORE entry dropped)."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_validate_stories(ctx: &AppContext) -> Result<String, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let results = validate_story_dirs(&root)?;
|
||||
serde_json::to_string_pretty(&json!(
|
||||
results
|
||||
.iter()
|
||||
.map(|r| json!({
|
||||
"story_id": r.story_id,
|
||||
"valid": r.valid,
|
||||
"error": r.error,
|
||||
}))
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_list_upcoming(ctx: &AppContext) -> Result<String, String> {
|
||||
let stories = load_upcoming_stories(ctx)?;
|
||||
serde_json::to_string_pretty(&json!(
|
||||
stories
|
||||
.iter()
|
||||
.map(|s| json!({
|
||||
"story_id": s.story_id,
|
||||
"name": s.name,
|
||||
"error": s.error,
|
||||
}))
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_get_pipeline_status(ctx: &AppContext) -> Result<String, String> {
|
||||
let state = load_pipeline_state(ctx)?;
|
||||
|
||||
fn map_items(items: &[crate::http::workflow::UpcomingStory], stage: &str) -> Vec<Value> {
|
||||
items
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let mut item = json!({
|
||||
"story_id": s.story_id,
|
||||
"name": s.name,
|
||||
"stage": stage,
|
||||
"agent": s.agent.as_ref().map(|a| json!({
|
||||
"agent_name": a.agent_name,
|
||||
"model": a.model,
|
||||
"status": a.status,
|
||||
})),
|
||||
});
|
||||
// Include blocked/retry_count when present so callers can
|
||||
// identify stories stuck in the pipeline.
|
||||
if let Some(true) = s.blocked {
|
||||
item["blocked"] = json!(true);
|
||||
}
|
||||
if let Some(rc) = s.retry_count {
|
||||
item["retry_count"] = json!(rc);
|
||||
}
|
||||
if let Some(ref mf) = s.merge_failure {
|
||||
item["merge_failure"] = json!(mf);
|
||||
}
|
||||
item
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let mut active: Vec<Value> = Vec::new();
|
||||
active.extend(map_items(&state.current, "current"));
|
||||
active.extend(map_items(&state.qa, "qa"));
|
||||
active.extend(map_items(&state.merge, "merge"));
|
||||
active.extend(map_items(&state.done, "done"));
|
||||
|
||||
let backlog: Vec<Value> = state
|
||||
.backlog
|
||||
.iter()
|
||||
.map(|s| json!({ "story_id": s.story_id, "name": s.name }))
|
||||
.collect();
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"active": active,
|
||||
"backlog": backlog,
|
||||
"backlog_count": backlog.len(),
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
|
||||
// Read from DB content store, falling back to filesystem.
|
||||
let contents = crate::http::workflow::read_story_content(&root, story_id)
|
||||
.map_err(|_| format!("Story file not found: {story_id}.md"))?;
|
||||
|
||||
let story_name = parse_front_matter(&contents).ok().and_then(|m| m.name);
|
||||
let todos = parse_unchecked_todos(&contents);
|
||||
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"story_id": story_id,
|
||||
"story_name": story_name,
|
||||
"todos": todos,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_record_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let unit = parse_test_cases(args.get("unit"))?;
|
||||
let integration = parse_test_cases(args.get("integration"))?;
|
||||
|
||||
let mut workflow = ctx
|
||||
.workflow
|
||||
.lock()
|
||||
.map_err(|e| format!("Lock error: {e}"))?;
|
||||
|
||||
workflow.record_test_results_validated(story_id.to_string(), unit, integration)?;
|
||||
|
||||
// Persist to story file (best-effort — file write errors are warnings, not failures).
|
||||
if let Ok(project_root) = ctx.state.get_project_root()
|
||||
&& let Some(results) = workflow.results.get(story_id)
|
||||
&& let Err(e) = crate::http::workflow::write_test_results_to_story_file(
|
||||
&project_root,
|
||||
story_id,
|
||||
results,
|
||||
)
|
||||
{
|
||||
slog_warn!("[record_tests] Could not persist results to story file: {e}");
|
||||
}
|
||||
|
||||
Ok("Test results recorded.".to_string())
|
||||
}
|
||||
|
||||
pub(super) fn tool_ensure_acceptance(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let workflow = ctx
|
||||
.workflow
|
||||
.lock()
|
||||
.map_err(|e| format!("Lock error: {e}"))?;
|
||||
|
||||
// Use in-memory results if present; otherwise fall back to file-persisted results.
|
||||
let file_results;
|
||||
let results = if let Some(r) = workflow.results.get(story_id) {
|
||||
r
|
||||
} else {
|
||||
let project_root = ctx.state.get_project_root().ok();
|
||||
file_results = project_root.as_deref().and_then(|root| {
|
||||
crate::http::workflow::read_test_results_from_story_file(root, story_id)
|
||||
});
|
||||
file_results.as_ref().map_or_else(
|
||||
|| {
|
||||
// No results anywhere — use empty default for the acceptance check
|
||||
// (it will fail with "No test results recorded")
|
||||
static EMPTY: std::sync::OnceLock<crate::workflow::StoryTestResults> =
|
||||
std::sync::OnceLock::new();
|
||||
EMPTY.get_or_init(Default::default)
|
||||
},
|
||||
|r| r,
|
||||
)
|
||||
};
|
||||
|
||||
let coverage = workflow.coverage.get(story_id);
|
||||
let decision = evaluate_acceptance_with_coverage(results, coverage);
|
||||
|
||||
if decision.can_accept {
|
||||
Ok("Story can be accepted. All gates pass.".to_string())
|
||||
} else {
|
||||
let mut parts = decision.reasons;
|
||||
if let Some(w) = decision.warning {
|
||||
parts.push(w);
|
||||
}
|
||||
Err(format!("Acceptance blocked: {}", parts.join("; ")))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn tool_accept_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
// Bug 226: Refuse to accept if the feature branch has unmerged code.
|
||||
// The code must be squash-merged via merge_agent_work first.
|
||||
if feature_branch_has_unmerged_changes(&project_root, story_id) {
|
||||
return Err(format!(
|
||||
"Cannot accept story '{story_id}': feature branch 'feature/story-{story_id}' \
|
||||
has unmerged changes. Use merge_agent_work to squash-merge the code into \
|
||||
master first."
|
||||
));
|
||||
}
|
||||
|
||||
move_story_to_done(&project_root, story_id)?;
|
||||
ctx.services.agents.remove_agents_for_story(story_id);
|
||||
|
||||
Ok(format!(
|
||||
"Story '{story_id}' accepted, moved to done/, and committed to master."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_check_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let criterion_index = args
|
||||
.get("criterion_index")
|
||||
.and_then(|v| v.as_u64())
|
||||
.ok_or("Missing required argument: criterion_index")? as usize;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
check_criterion_in_file(&root, story_id, criterion_index)?;
|
||||
|
||||
Ok(format!(
|
||||
"Criterion {criterion_index} checked for story '{story_id}'. Committed to master."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_edit_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let criterion_index = args
|
||||
.get("criterion_index")
|
||||
.and_then(|v| v.as_u64())
|
||||
.ok_or("Missing required argument: criterion_index")? as usize;
|
||||
let new_text = args
|
||||
.get("new_text")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: new_text")?;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
edit_criterion_in_file(&root, story_id, criterion_index, new_text)?;
|
||||
|
||||
Ok(format!(
|
||||
"Criterion {criterion_index} updated for story '{story_id}'."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_add_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let criterion = args
|
||||
.get("criterion")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: criterion")?;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
add_criterion_to_file(&root, story_id, criterion)?;
|
||||
|
||||
Ok(format!(
|
||||
"Added criterion to story '{story_id}': - [ ] {criterion}"
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_remove_criterion(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let criterion_index = args
|
||||
.get("criterion_index")
|
||||
.and_then(|v| v.as_u64())
|
||||
.ok_or("Missing required argument: criterion_index")? as usize;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
remove_criterion_from_file(&root, story_id, criterion_index)?;
|
||||
|
||||
Ok(format!(
|
||||
"Removed criterion {criterion_index} from story '{story_id}'."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_update_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
let user_story = args.get("user_story").and_then(|v| v.as_str());
|
||||
let description = args.get("description").and_then(|v| v.as_str());
|
||||
|
||||
// Collect front matter fields: explicit `agent` param + arbitrary `front_matter` object.
|
||||
// Values are passed as serde_json::Value so native booleans, numbers, and arrays are
|
||||
// preserved and encoded correctly as unquoted YAML by update_story_in_file.
|
||||
let mut front_matter: HashMap<String, Value> = HashMap::new();
|
||||
if let Some(agent) = args.get("agent").and_then(|v| v.as_str()) {
|
||||
front_matter.insert("agent".to_string(), Value::String(agent.to_string()));
|
||||
}
|
||||
if let Some(obj) = args.get("front_matter").and_then(|v| v.as_object()) {
|
||||
for (k, v) in obj {
|
||||
front_matter.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
let front_matter_opt = if front_matter.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(&front_matter)
|
||||
};
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
update_story_in_file(&root, story_id, user_story, description, front_matter_opt)?;
|
||||
|
||||
// Bug 503: warn if any depends_on in the (now updated) story points at an archived story.
|
||||
let stage = crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|i| i.stage.dir_name().to_string())
|
||||
.unwrap_or_else(|| "1_backlog".to_string());
|
||||
let archived_deps = check_archived_deps(&root, &stage, story_id);
|
||||
if !archived_deps.is_empty() {
|
||||
slog_warn!(
|
||||
"[update-story] Story '{story_id}' depends_on {archived_deps:?} which \
|
||||
are already in 6_archived. The dep will be treated as satisfied on the \
|
||||
next promotion tick. If these deps were abandoned (not cleanly completed), \
|
||||
consider removing the depends_on or keeping the story in backlog manually."
|
||||
);
|
||||
return Ok(format!(
|
||||
"Updated story '{story_id}'.\n\n\
|
||||
WARNING: depends_on {archived_deps:?} point at stories already in \
|
||||
6_archived. These deps are treated as satisfied (archived = satisfied \
|
||||
semantics), so this story may be auto-promoted from backlog immediately. \
|
||||
If the archived deps were abandoned rather than completed, remove the \
|
||||
depends_on or move the story back to backlog manually after promotion."
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!("Updated story '{story_id}'."))
|
||||
}
|
||||
|
||||
pub(super) fn tool_create_spike(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let name = args
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: name")?;
|
||||
let description = args.get("description").and_then(|v| v.as_str());
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let spike_id = create_spike_file(&root, name, description)?;
|
||||
|
||||
Ok(format!("Created spike: {spike_id}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_create_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let name = args
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: name")?;
|
||||
let description = args
|
||||
.get("description")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: description")?;
|
||||
let steps_to_reproduce = args
|
||||
.get("steps_to_reproduce")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: steps_to_reproduce")?;
|
||||
let actual_result = args
|
||||
.get("actual_result")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: actual_result")?;
|
||||
let expected_result = args
|
||||
.get("expected_result")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: expected_result")?;
|
||||
let acceptance_criteria: Option<Vec<String>> = args
|
||||
.get("acceptance_criteria")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
let depends_on: Option<Vec<u32>> = args
|
||||
.get("depends_on")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let bug_id = create_bug_file(
|
||||
&root,
|
||||
name,
|
||||
description,
|
||||
steps_to_reproduce,
|
||||
actual_result,
|
||||
expected_result,
|
||||
acceptance_criteria.as_deref(),
|
||||
depends_on.as_deref(),
|
||||
)?;
|
||||
|
||||
Ok(format!("Created bug: {bug_id}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_list_bugs(ctx: &AppContext) -> Result<String, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let bugs = list_bug_files(&root)?;
|
||||
serde_json::to_string_pretty(&json!(
|
||||
bugs.iter()
|
||||
.map(|(id, name)| json!({ "bug_id": id, "name": name }))
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_close_bug(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let bug_id = args
|
||||
.get("bug_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: bug_id")?;
|
||||
|
||||
let root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
close_bug_to_archive(&root, bug_id)?;
|
||||
ctx.services.agents.remove_agents_for_story(bug_id);
|
||||
|
||||
Ok(format!(
|
||||
"Bug '{bug_id}' closed, moved to bugs/archive/, and committed to master."
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) fn tool_unblock_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
|
||||
// Extract the numeric prefix (e.g. "42" from "42_story_foo")
|
||||
let story_number = story_id
|
||||
.split('_')
|
||||
.next()
|
||||
.filter(|s| !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()))
|
||||
.ok_or_else(|| format!("Invalid story_id format: '{story_id}'. Expected a numeric prefix (e.g. '42_story_foo')."))?;
|
||||
|
||||
Ok(crate::chat::commands::unblock::unblock_by_number(
|
||||
&root,
|
||||
story_number,
|
||||
))
|
||||
}
|
||||
|
||||
pub(super) async fn tool_delete_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let story_id = args
|
||||
.get("story_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
let mut failed_steps: Vec<String> = Vec::new();
|
||||
|
||||
// 0. Cancel any pending rate-limit retry timers for this story (bug 514).
|
||||
// Must happen before stopping agents so the tick loop cannot re-spawn
|
||||
// an agent after we tear everything else down.
|
||||
let timer_removed = ctx.timer_store.remove(story_id);
|
||||
if timer_removed {
|
||||
slog_warn!("[delete_story] Cancelled pending timer for '{story_id}'");
|
||||
} else {
|
||||
slog_warn!("[delete_story] No pending timer found for '{story_id}'");
|
||||
}
|
||||
|
||||
// 1. Stop any running agents for this story (best-effort).
|
||||
if let Ok(agents) = ctx.services.agents.list_agents() {
|
||||
for agent in agents.iter().filter(|a| a.story_id == story_id) {
|
||||
match ctx
|
||||
.services
|
||||
.agents
|
||||
.stop_agent(&project_root, story_id, &agent.agent_name)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Stopped agent '{}' for '{story_id}'",
|
||||
agent.agent_name
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Failed to stop agent '{}' for '{story_id}': {e}",
|
||||
agent.agent_name
|
||||
);
|
||||
failed_steps.push(format!("stop_agent({}): {e}", agent.agent_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Remove agent pool entries.
|
||||
let removed_count = ctx.services.agents.remove_agents_for_story(story_id);
|
||||
slog_warn!("[delete_story] Removed {removed_count} agent pool entries for '{story_id}'");
|
||||
|
||||
// 3. Remove worktree (best-effort).
|
||||
if let Ok(config) = crate::config::ProjectConfig::load(&project_root) {
|
||||
match crate::worktree::remove_worktree_by_story_id(&project_root, story_id, &config).await {
|
||||
Ok(()) => slog_warn!("[delete_story] Removed worktree for '{story_id}'"),
|
||||
Err(e) => slog_warn!("[delete_story] Worktree removal for '{story_id}': {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Write a CRDT tombstone op so the story is evicted from the in-memory
|
||||
// state machine and the deletion is persisted to crdt_ops (survives
|
||||
// restart). Best-effort: legacy filesystem-only stories may not have a
|
||||
// CRDT entry, so a "not found" error is expected and non-fatal.
|
||||
match crate::crdt_state::evict_item(story_id) {
|
||||
Ok(()) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Evicted '{story_id}' from CRDT (tombstone persisted to crdt_ops)"
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
slog_warn!("[delete_story] CRDT eviction for '{story_id}': {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Delete from database content store and shadow table.
|
||||
let found_in_db = crate::db::read_content(story_id).is_some()
|
||||
|| crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some();
|
||||
crate::db::delete_item(story_id);
|
||||
slog_warn!("[delete_story] Deleted '{story_id}' from content store / shadow table");
|
||||
|
||||
// 6. Remove the filesystem shadow file from work/N_stage/.
|
||||
let sk = project_root.join(".huskies").join("work");
|
||||
let stage_dirs = [
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
];
|
||||
let mut deleted_from_fs = false;
|
||||
for stage in &stage_dirs {
|
||||
let path = sk.join(stage).join(format!("{story_id}.md"));
|
||||
if path.exists() {
|
||||
match fs::remove_file(&path) {
|
||||
Ok(()) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Deleted filesystem shadow '{story_id}' from work/{stage}/"
|
||||
);
|
||||
deleted_from_fs = true;
|
||||
}
|
||||
Err(e) => {
|
||||
slog_warn!(
|
||||
"[delete_story] Failed to delete filesystem shadow '{story_id}' from work/{stage}/: {e}"
|
||||
);
|
||||
failed_steps.push(format!("delete_filesystem({stage}): {e}"));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found_in_db && !deleted_from_fs && !timer_removed {
|
||||
return Err(format!(
|
||||
"Story '{story_id}' not found in any pipeline stage."
|
||||
));
|
||||
}
|
||||
|
||||
if !failed_steps.is_empty() {
|
||||
return Err(format!(
|
||||
"Story '{story_id}' partially deleted. Failed steps: {}.",
|
||||
failed_steps.join("; ")
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!("Story '{story_id}' deleted from pipeline."))
|
||||
}
|
||||
|
||||
pub(super) fn tool_create_refactor(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let name = args
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: name")?;
|
||||
let description = args.get("description").and_then(|v| v.as_str());
|
||||
let acceptance_criteria: Option<Vec<String>> = args
|
||||
.get("acceptance_criteria")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
let depends_on: Option<Vec<u32>> = args
|
||||
.get("depends_on")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok());
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let refactor_id = create_refactor_file(
|
||||
&root,
|
||||
name,
|
||||
description,
|
||||
acceptance_criteria.as_deref(),
|
||||
depends_on.as_deref(),
|
||||
)?;
|
||||
|
||||
Ok(format!("Created refactor: {refactor_id}"))
|
||||
}
|
||||
|
||||
pub(super) fn tool_list_refactors(ctx: &AppContext) -> Result<String, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let refactors = list_refactor_files(&root)?;
|
||||
serde_json::to_string_pretty(&json!(
|
||||
refactors
|
||||
.iter()
|
||||
.map(|(id, name)| json!({ "refactor_id": id, "name": name }))
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,472 +0,0 @@
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
|
||||
#[test]
|
||||
fn json_rpc_response_serializes_success() {
|
||||
let resp = JsonRpcResponse::success(Some(json!(1)), json!({"ok": true}));
|
||||
let s = serde_json::to_string(&resp).unwrap();
|
||||
assert!(s.contains("\"result\""));
|
||||
assert!(!s.contains("\"error\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_rpc_response_serializes_error() {
|
||||
let resp = JsonRpcResponse::error(Some(json!(1)), -32600, "bad".into());
|
||||
let s = serde_json::to_string(&resp).unwrap();
|
||||
assert!(s.contains("\"error\""));
|
||||
assert!(!s.contains("\"result\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn initialize_returns_capabilities() {
|
||||
let resp = handle_initialize(
|
||||
Some(json!(1)),
|
||||
&json!({"protocolVersion": "2025-03-26", "capabilities": {}, "clientInfo": {"name": "test", "version": "1.0"}}),
|
||||
);
|
||||
let result = resp.result.unwrap();
|
||||
assert_eq!(result["protocolVersion"], "2025-03-26");
|
||||
assert!(result["capabilities"]["tools"].is_object());
|
||||
assert_eq!(result["serverInfo"]["name"], "huskies");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tools_list_returns_all_tools() {
|
||||
let resp = handle_tools_list(Some(json!(2)));
|
||||
let result = resp.result.unwrap();
|
||||
let tools = result["tools"].as_array().unwrap();
|
||||
let names: Vec<&str> = tools.iter().map(|t| t["name"].as_str().unwrap()).collect();
|
||||
assert!(names.contains(&"create_story"));
|
||||
assert!(names.contains(&"validate_stories"));
|
||||
assert!(names.contains(&"list_upcoming"));
|
||||
assert!(names.contains(&"get_story_todos"));
|
||||
assert!(names.contains(&"record_tests"));
|
||||
assert!(names.contains(&"ensure_acceptance"));
|
||||
assert!(names.contains(&"start_agent"));
|
||||
assert!(names.contains(&"stop_agent"));
|
||||
assert!(names.contains(&"list_agents"));
|
||||
assert!(names.contains(&"get_agent_config"));
|
||||
assert!(names.contains(&"reload_agent_config"));
|
||||
assert!(names.contains(&"get_agent_output"));
|
||||
assert!(names.contains(&"wait_for_agent"));
|
||||
assert!(names.contains(&"get_agent_remaining_turns_and_budget"));
|
||||
assert!(names.contains(&"create_worktree"));
|
||||
assert!(names.contains(&"list_worktrees"));
|
||||
assert!(names.contains(&"remove_worktree"));
|
||||
assert!(names.contains(&"get_editor_command"));
|
||||
assert!(!names.contains(&"report_completion"));
|
||||
assert!(names.contains(&"accept_story"));
|
||||
assert!(names.contains(&"check_criterion"));
|
||||
assert!(names.contains(&"add_criterion"));
|
||||
assert!(names.contains(&"update_story"));
|
||||
assert!(names.contains(&"create_spike"));
|
||||
assert!(names.contains(&"create_bug"));
|
||||
assert!(names.contains(&"list_bugs"));
|
||||
assert!(names.contains(&"close_bug"));
|
||||
assert!(names.contains(&"create_refactor"));
|
||||
assert!(names.contains(&"list_refactors"));
|
||||
assert!(names.contains(&"merge_agent_work"));
|
||||
assert!(names.contains(&"get_merge_status"));
|
||||
assert!(names.contains(&"move_story_to_merge"));
|
||||
assert!(names.contains(&"report_merge_failure"));
|
||||
assert!(names.contains(&"request_qa"));
|
||||
assert!(names.contains(&"approve_qa"));
|
||||
assert!(names.contains(&"reject_qa"));
|
||||
assert!(names.contains(&"launch_qa_app"));
|
||||
assert!(names.contains(&"get_server_logs"));
|
||||
assert!(names.contains(&"prompt_permission"));
|
||||
assert!(names.contains(&"get_pipeline_status"));
|
||||
assert!(names.contains(&"rebuild_and_restart"));
|
||||
assert!(names.contains(&"get_token_usage"));
|
||||
assert!(names.contains(&"move_story"));
|
||||
assert!(names.contains(&"unblock_story"));
|
||||
assert!(names.contains(&"delete_story"));
|
||||
assert!(names.contains(&"run_command"));
|
||||
assert!(names.contains(&"run_tests"));
|
||||
assert!(names.contains(&"get_test_result"));
|
||||
assert!(names.contains(&"run_build"));
|
||||
assert!(names.contains(&"run_lint"));
|
||||
assert!(names.contains(&"git_status"));
|
||||
assert!(names.contains(&"git_diff"));
|
||||
assert!(names.contains(&"git_add"));
|
||||
assert!(names.contains(&"git_commit"));
|
||||
assert!(names.contains(&"git_log"));
|
||||
assert!(names.contains(&"status"));
|
||||
assert!(names.contains(&"loc_file"));
|
||||
assert!(names.contains(&"dump_crdt"));
|
||||
assert!(names.contains(&"get_version"));
|
||||
assert!(names.contains(&"remove_criterion"));
|
||||
assert_eq!(tools.len(), 66);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tools_list_schemas_have_required_fields() {
|
||||
let resp = handle_tools_list(Some(json!(1)));
|
||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||
for tool in &tools {
|
||||
assert!(tool["name"].is_string(), "tool missing name");
|
||||
assert!(tool["description"].is_string(), "tool missing description");
|
||||
assert!(tool["inputSchema"].is_object(), "tool missing inputSchema");
|
||||
assert_eq!(tool["inputSchema"]["type"], "object");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handle_tools_call_unknown_tool() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
let resp = rt.block_on(handle_tools_call(
|
||||
Some(json!(1)),
|
||||
&json!({"name": "bogus_tool", "arguments": {}}),
|
||||
&ctx,
|
||||
));
|
||||
let result = resp.result.unwrap();
|
||||
assert_eq!(result["isError"], true);
|
||||
assert!(
|
||||
result["content"][0]["text"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.contains("Unknown tool")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_sse_response_wraps_in_data_prefix() {
|
||||
let resp = JsonRpcResponse::success(Some(json!(1)), json!({"ok": true}));
|
||||
let http_resp = to_sse_response(resp);
|
||||
assert_eq!(
|
||||
http_resp.headers().get("content-type").unwrap(),
|
||||
"text/event-stream"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn wants_sse_detects_accept_header() {
|
||||
// Can't easily construct a Request in tests without TestClient,
|
||||
// so test the logic indirectly via to_sse_response format
|
||||
let resp = JsonRpcResponse::success(Some(json!(1)), json!("ok"));
|
||||
let json_resp = to_json_response(resp);
|
||||
assert_eq!(
|
||||
json_resp.headers().get("content-type").unwrap(),
|
||||
"application/json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_rpc_error_response_builds_json_response() {
|
||||
let resp = json_rpc_error_response(Some(json!(42)), -32600, "test error".into());
|
||||
assert_eq!(resp.status(), poem::http::StatusCode::OK);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/json"
|
||||
);
|
||||
}
|
||||
|
||||
// ── HTTP handler tests (TestClient) ───────────────────────────
|
||||
|
||||
fn test_mcp_app(ctx: std::sync::Arc<AppContext>) -> impl poem::Endpoint {
|
||||
use poem::EndpointExt;
|
||||
poem::Route::new()
|
||||
.at("/mcp", poem::post(mcp_post_handler).get(mcp_get_handler))
|
||||
.data(ctx)
|
||||
}
|
||||
|
||||
async fn read_body_json(resp: poem::test::TestResponse) -> Value {
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
serde_json::from_str(&body).unwrap()
|
||||
}
|
||||
|
||||
async fn post_json_mcp<E: poem::Endpoint>(cli: &poem::test::TestClient<E>, payload: &str) -> Value {
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body(payload.to_string())
|
||||
.send()
|
||||
.await;
|
||||
read_body_json(resp).await
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_get_handler_returns_405() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli.get("/mcp").send().await;
|
||||
assert_eq!(resp.0.status(), poem::http::StatusCode::METHOD_NOT_ALLOWED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_invalid_content_type_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "text/plain")
|
||||
.body("{}")
|
||||
.send()
|
||||
.await;
|
||||
let body = read_body_json(resp).await;
|
||||
assert!(body.get("error").is_some(), "expected error field: {body}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_invalid_json_returns_parse_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body("not-valid-json")
|
||||
.send()
|
||||
.await;
|
||||
let body = read_body_json(resp).await;
|
||||
assert!(body.get("error").is_some(), "expected error field: {body}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_wrong_jsonrpc_version_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"1.0","id":1,"method":"initialize","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
body["error"]["message"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.contains("version"),
|
||||
"expected version error: {body}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_notification_with_null_id_returns_accepted() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body(r#"{"jsonrpc":"2.0","method":"notifications/initialized","params":{}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(resp.0.status(), poem::http::StatusCode::ACCEPTED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_notification_with_explicit_null_id_returns_accepted() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.body(r#"{"jsonrpc":"2.0","id":null,"method":"notifications/initialized","params":{}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(resp.0.status(), poem::http::StatusCode::ACCEPTED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_missing_id_non_notification_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","method":"initialize","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(body.get("error").is_some(), "expected error: {body}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_unknown_method_returns_error() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","id":1,"method":"bogus/method","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
body["error"]["message"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.contains("Unknown method"),
|
||||
"expected unknown method error: {body}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_initialize_returns_capabilities() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2025-03-26","capabilities":{},"clientInfo":{"name":"test","version":"1.0"}}}"#,
|
||||
)
|
||||
.await;
|
||||
assert_eq!(body["result"]["protocolVersion"], "2025-03-26");
|
||||
assert_eq!(body["result"]["serverInfo"]["name"], "huskies");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_tools_list_returns_tools() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let body = post_json_mcp(
|
||||
&cli,
|
||||
r#"{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}"#,
|
||||
)
|
||||
.await;
|
||||
assert!(body["result"]["tools"].is_array());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_returns_event_stream_content_type() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(
|
||||
resp.0.headers().get("content-type").unwrap(),
|
||||
"text/event-stream"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_missing_story_id() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(
|
||||
resp.0.headers().get("content-type").unwrap(),
|
||||
"text/event-stream",
|
||||
"expected SSE content-type"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_without_agent_name_returns_disk_content() {
|
||||
// Without agent_name the SSE live-streaming intercept is skipped and
|
||||
// the disk-based handler runs. The transport still wraps the result in
|
||||
// SSE format (data: …\n\n) because the client sent Accept: text/event-stream,
|
||||
// but the content should be a valid JSON-RPC result, not a subscribe error.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"1_test"}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
// Body is SSE-wrapped: "data: {…}\n\n" — strip the prefix and verify it's
|
||||
// a valid JSON-RPC result (not an error about missing agent_name).
|
||||
let json_part = body
|
||||
.trim_start_matches("data: ")
|
||||
.trim_end_matches("\n\n")
|
||||
.trim();
|
||||
let parsed: serde_json::Value = serde_json::from_str(json_part)
|
||||
.unwrap_or_else(|_| panic!("expected JSON-RPC in SSE body, got: {body}"));
|
||||
assert!(
|
||||
parsed.get("result").is_some(),
|
||||
"expected JSON-RPC result (disk-based handler ran): {parsed}"
|
||||
);
|
||||
// Must NOT be an error about missing agent_name (agent_name is now optional)
|
||||
assert!(
|
||||
parsed.get("error").is_none(),
|
||||
"unexpected error when agent_name omitted: {parsed}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_no_agent_no_logs_returns_not_found() {
|
||||
// Agent not in pool and no log files → SSE success with "No log files found" message.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"99_nope","agent_name":"bot"}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
assert_eq!(
|
||||
resp.0.headers().get("content-type").unwrap(),
|
||||
"text/event-stream"
|
||||
);
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
assert!(body.contains("data:"), "expected SSE data prefix: {body}");
|
||||
// Must NOT return isError — should be a success result with "No log files found"
|
||||
assert!(
|
||||
!body.contains("isError"),
|
||||
"expected no isError for missing agent: {body}"
|
||||
);
|
||||
assert!(
|
||||
body.contains("No log files found"),
|
||||
"expected not-found message: {body}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mcp_post_sse_get_agent_output_exited_agent_reads_disk_logs() {
|
||||
use crate::agent_log::AgentLogWriter;
|
||||
use crate::agents::AgentEvent;
|
||||
// Agent has exited (not in pool) but wrote logs to disk.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let mut writer = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-sse").unwrap();
|
||||
writer
|
||||
.write_event(&AgentEvent::Output {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
text: "disk output".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
drop(writer);
|
||||
|
||||
let ctx = std::sync::Arc::new(test_ctx(root));
|
||||
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
|
||||
let resp = cli
|
||||
.post("/mcp")
|
||||
.header("content-type", "application/json")
|
||||
.header("accept", "text/event-stream")
|
||||
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"42_story_foo","agent_name":"coder-1"}}}"#)
|
||||
.send()
|
||||
.await;
|
||||
let body = resp.0.into_body().into_string().await.unwrap();
|
||||
assert!(
|
||||
body.contains("disk output"),
|
||||
"expected disk log content in SSE response: {body}"
|
||||
);
|
||||
assert!(
|
||||
!body.contains("isError"),
|
||||
"expected no error for exited agent with logs: {body}"
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user