refactor: split http/mcp/shell_tools.rs (1144) into mod + exec + script
The 1144-line shell_tools.rs is split: - exec.rs: validate_working_dir + tool_run_command + handle_run_command_sse + their tests (~550 lines) - script.rs: tool_run_tests + tool_get_test_result + tool_run_build + tool_run_lint + helpers + their tests (~610 lines) - mod.rs: re-exports (~12 lines) Tests stay co-located. All 2636 tests pass; clippy clean.
This commit is contained in:
@@ -0,0 +1,548 @@
|
||||
//! MCP shell command execution: tool_run_command + SSE streaming variant.
|
||||
|
||||
use bytes::Bytes;
|
||||
use futures::StreamExt;
|
||||
use poem::{Body, Response};
|
||||
use serde_json::{Value, json};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::http::context::AppContext;
|
||||
#[allow(unused_imports)]
|
||||
use crate::service::shell::is_dangerous;
|
||||
|
||||
const DEFAULT_TIMEOUT_SECS: u64 = 120;
|
||||
const MAX_TIMEOUT_SECS: u64 = 600;
|
||||
|
||||
/// Validates that `working_dir` exists and is inside the project's
|
||||
/// `.huskies/worktrees/` directory. Returns the canonicalized path.
|
||||
///
|
||||
/// Thin wrapper that obtains the project root from `ctx` and delegates to
|
||||
/// `service::shell::io::validate_working_dir`.
|
||||
pub(super) fn validate_working_dir(working_dir: &str, ctx: &AppContext) -> Result<PathBuf, String> {
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
crate::service::shell::io::validate_working_dir(working_dir, &project_root)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
/// Regular (non-SSE) run_command: runs the bash command to completion and
|
||||
/// returns stdout, stderr, exit_code, and whether it timed out.
|
||||
pub(crate) async fn tool_run_command(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let command = args
|
||||
.get("command")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: command")?
|
||||
.to_string();
|
||||
|
||||
let working_dir = args
|
||||
.get("working_dir")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: working_dir")?;
|
||||
|
||||
let timeout_secs = args
|
||||
.get("timeout")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(DEFAULT_TIMEOUT_SECS)
|
||||
.min(MAX_TIMEOUT_SECS);
|
||||
|
||||
if let Some(reason) = is_dangerous(&command) {
|
||||
return Err(reason);
|
||||
}
|
||||
|
||||
let canonical_dir = validate_working_dir(working_dir, ctx)?;
|
||||
|
||||
let result = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(timeout_secs),
|
||||
tokio::task::spawn_blocking({
|
||||
let cmd = command.clone();
|
||||
let dir = canonical_dir.clone();
|
||||
move || {
|
||||
std::process::Command::new("bash")
|
||||
.arg("-c")
|
||||
.arg(&cmd)
|
||||
.current_dir(&dir)
|
||||
.output()
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Err(_) => {
|
||||
// timed out
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"stdout": "",
|
||||
"stderr": format!("Command timed out after {timeout_secs}s"),
|
||||
"exit_code": -1,
|
||||
"timed_out": true,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
Ok(Err(e)) => Err(format!("Task join error: {e}")),
|
||||
Ok(Ok(Err(e))) => Err(format!("Failed to execute command: {e}")),
|
||||
Ok(Ok(Ok(output))) => serde_json::to_string_pretty(&json!({
|
||||
"stdout": String::from_utf8_lossy(&output.stdout),
|
||||
"stderr": String::from_utf8_lossy(&output.stderr),
|
||||
"exit_code": output.status.code().unwrap_or(-1),
|
||||
"timed_out": false,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}")),
|
||||
}
|
||||
}
|
||||
|
||||
/// SSE streaming run_command: spawns the process and emits stdout/stderr lines
|
||||
pub(crate) fn handle_run_command_sse(
|
||||
id: Option<Value>,
|
||||
params: &Value,
|
||||
ctx: &AppContext,
|
||||
) -> Response {
|
||||
use super::super::{JsonRpcResponse, to_sse_response};
|
||||
|
||||
let args = params.get("arguments").cloned().unwrap_or(json!({}));
|
||||
|
||||
let command = match args.get("command").and_then(|v| v.as_str()) {
|
||||
Some(c) => c.to_string(),
|
||||
None => {
|
||||
return to_sse_response(JsonRpcResponse::error(
|
||||
id,
|
||||
-32602,
|
||||
"Missing required argument: command".into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let working_dir = match args.get("working_dir").and_then(|v| v.as_str()) {
|
||||
Some(d) => d.to_string(),
|
||||
None => {
|
||||
return to_sse_response(JsonRpcResponse::error(
|
||||
id,
|
||||
-32602,
|
||||
"Missing required argument: working_dir".into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let timeout_secs = args
|
||||
.get("timeout")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(DEFAULT_TIMEOUT_SECS)
|
||||
.min(MAX_TIMEOUT_SECS);
|
||||
|
||||
if let Some(reason) = is_dangerous(&command) {
|
||||
return to_sse_response(JsonRpcResponse::error(id, -32602, reason));
|
||||
}
|
||||
|
||||
let canonical_dir = match validate_working_dir(&working_dir, ctx) {
|
||||
Ok(d) => d,
|
||||
Err(e) => return to_sse_response(JsonRpcResponse::error(id, -32602, e)),
|
||||
};
|
||||
|
||||
let final_id = id;
|
||||
|
||||
let stream = async_stream::stream! {
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
|
||||
let mut child = match tokio::process::Command::new("bash")
|
||||
.arg("-c")
|
||||
.arg(&command)
|
||||
.current_dir(&canonical_dir)
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
{
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
let resp = JsonRpcResponse::success(
|
||||
final_id,
|
||||
json!({
|
||||
"content": [{"type": "text", "text": format!("Failed to spawn process: {e}")}],
|
||||
"isError": true
|
||||
}),
|
||||
);
|
||||
if let Ok(s) = serde_json::to_string(&resp) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let stdout = child.stdout.take().expect("stdout piped");
|
||||
let stderr = child.stderr.take().expect("stderr piped");
|
||||
let mut stdout_lines = tokio::io::BufReader::new(stdout).lines();
|
||||
let mut stderr_lines = tokio::io::BufReader::new(stderr).lines();
|
||||
|
||||
let deadline = tokio::time::Instant::now()
|
||||
+ std::time::Duration::from_secs(timeout_secs);
|
||||
let mut stdout_done = false;
|
||||
let mut stderr_done = false;
|
||||
let mut timed_out = false;
|
||||
|
||||
loop {
|
||||
if stdout_done && stderr_done {
|
||||
break;
|
||||
}
|
||||
|
||||
let remaining = deadline.saturating_duration_since(tokio::time::Instant::now());
|
||||
if remaining.is_zero() {
|
||||
timed_out = true;
|
||||
let _ = child.kill().await;
|
||||
break;
|
||||
}
|
||||
|
||||
tokio::select! {
|
||||
line = stdout_lines.next_line(), if !stdout_done => {
|
||||
match line {
|
||||
Ok(Some(l)) => {
|
||||
let notif = json!({
|
||||
"jsonrpc": "2.0",
|
||||
"method": "notifications/tools/progress",
|
||||
"params": { "stream": "stdout", "line": l }
|
||||
});
|
||||
if let Ok(s) = serde_json::to_string(¬if) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
}
|
||||
_ => { stdout_done = true; }
|
||||
}
|
||||
}
|
||||
line = stderr_lines.next_line(), if !stderr_done => {
|
||||
match line {
|
||||
Ok(Some(l)) => {
|
||||
let notif = json!({
|
||||
"jsonrpc": "2.0",
|
||||
"method": "notifications/tools/progress",
|
||||
"params": { "stream": "stderr", "line": l }
|
||||
});
|
||||
if let Ok(s) = serde_json::to_string(¬if) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
}
|
||||
_ => { stderr_done = true; }
|
||||
}
|
||||
}
|
||||
_ = tokio::time::sleep(remaining) => {
|
||||
timed_out = true;
|
||||
let _ = child.kill().await;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let exit_code = child.wait().await.ok().and_then(|s| s.code()).unwrap_or(-1);
|
||||
|
||||
let summary = json!({
|
||||
"exit_code": exit_code,
|
||||
"timed_out": timed_out,
|
||||
});
|
||||
|
||||
let final_resp = JsonRpcResponse::success(
|
||||
final_id,
|
||||
json!({
|
||||
"content": [{"type": "text", "text": summary.to_string()}]
|
||||
}),
|
||||
);
|
||||
if let Ok(s) = serde_json::to_string(&final_resp) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
};
|
||||
|
||||
Response::builder()
|
||||
.status(poem::http::StatusCode::OK)
|
||||
.header("Content-Type", "text/event-stream")
|
||||
.header("Cache-Control", "no-cache")
|
||||
.body(Body::from_bytes_stream(stream.map(|r| r.map(Bytes::from))))
|
||||
}
|
||||
|
||||
/// Run the project's test suite (`script/test`) and block until complete.
|
||||
///
|
||||
/// Spawns the test process, then polls every second server-side until the
|
||||
/// child exits or the timeout is reached. Returns the full test result in
|
||||
/// a single MCP call — no polling needed from the agent.
|
||||
///
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_rf_root() {
|
||||
assert!(is_dangerous("rm -rf /").is_some());
|
||||
assert!(is_dangerous(" rm -rf / ").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_fr_root() {
|
||||
assert!(is_dangerous("rm -fr /").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_rf_star() {
|
||||
assert!(is_dangerous("rm -rf /*").is_some());
|
||||
assert!(is_dangerous("rm -fr /*").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_sudo() {
|
||||
assert!(is_dangerous("sudo ls").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_shutdown() {
|
||||
assert!(is_dangerous("shutdown -h now").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_mkfs() {
|
||||
assert!(is_dangerous("mkfs /dev/sda1").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_fork_bomb() {
|
||||
assert!(is_dangerous(":(){ :|:& };:").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_allows_safe_commands() {
|
||||
assert!(is_dangerous("cargo build").is_none());
|
||||
assert!(is_dangerous("npm test").is_none());
|
||||
assert!(is_dangerous("git status").is_none());
|
||||
assert!(is_dangerous("ls -la").is_none());
|
||||
assert!(is_dangerous("rm -rf target/").is_none());
|
||||
}
|
||||
|
||||
// ── validate_working_dir ──────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_relative_path() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir("relative/path", &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("absolute"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_nonexistent_path() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir("/nonexistent_path_xyz_abc", &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_path_outside_worktrees() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
// Create the worktrees dir so it exists
|
||||
let wt_dir = tmp.path().join(".huskies").join("worktrees");
|
||||
std::fs::create_dir_all(&wt_dir).unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// Try to use /tmp (outside worktrees)
|
||||
let result = validate_working_dir(tmp.path().to_str().unwrap(), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("inside .huskies/worktrees"),
|
||||
"expected sandbox error"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_accepts_path_inside_worktrees() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("42_test_story");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir(story_wt.to_str().unwrap(), &ctx);
|
||||
assert!(result.is_ok(), "expected Ok, got: {:?}", result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_no_worktrees_dir() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
// Do NOT create worktrees dir
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir(tmp.path().to_str().unwrap(), &ctx);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ── tool_run_command ───────────────────────────────────────────────
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_missing_command() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(&json!({"working_dir": "/tmp"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("command"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_missing_working_dir() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(&json!({"command": "ls"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("working_dir"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_blocks_dangerous_command() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result =
|
||||
tool_run_command(&json!({"command": "rm -rf /", "working_dir": "/tmp"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("blocked"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_rejects_path_outside_worktrees() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let wt_dir = tmp.path().join(".huskies").join("worktrees");
|
||||
std::fs::create_dir_all(&wt_dir).unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "ls",
|
||||
"working_dir": tmp.path().to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("worktrees"),
|
||||
"expected sandbox error"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_runs_in_worktree_and_returns_output() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("42_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
std::fs::write(story_wt.join("canary.txt"), "hello").unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "ls",
|
||||
"working_dir": story_wt.to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["exit_code"], 0);
|
||||
assert!(parsed["stdout"].as_str().unwrap().contains("canary.txt"));
|
||||
assert_eq!(parsed["timed_out"], false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_captures_nonzero_exit_code() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("43_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "exit 42",
|
||||
"working_dir": story_wt.to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["exit_code"], 42);
|
||||
assert_eq!(parsed["timed_out"], false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_timeout_returns_timed_out_true() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("44_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "sleep 10",
|
||||
"working_dir": story_wt.to_str().unwrap(),
|
||||
"timeout": 1
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["timed_out"], true);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_captures_stderr() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("45_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "echo 'error msg' >&2",
|
||||
"working_dir": story_wt.to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert!(
|
||||
parsed["stderr"].as_str().unwrap().contains("error msg"),
|
||||
"expected stderr: {parsed}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_clamps_timeout_to_max() {
|
||||
// Verify timeout > 600 is clamped to 600. We don't run a 600s sleep;
|
||||
// just confirm the tool accepts the arg without error (sandbox check will
|
||||
// fail first in a different test, here we test the arg parsing path).
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// Will fail at working_dir validation, not timeout parsing — that's fine
|
||||
let result = tool_run_command(
|
||||
&json!({"command": "ls", "working_dir": "/tmp", "timeout": 9999}),
|
||||
&ctx,
|
||||
)
|
||||
.await;
|
||||
// Just ensure it doesn't panic and returns an Err about sandbox (not timeout)
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ── tool_run_tests ────────────────────────────────────────────────
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
//! MCP shell tools — run commands, execute tests, and stream output via MCP.
|
||||
//!
|
||||
//! This file is a thin adapter: it deserialises MCP payloads, delegates to
|
||||
//! `crate::service::shell` for all business logic, and serialises responses.
|
||||
|
||||
mod exec;
|
||||
mod script;
|
||||
|
||||
pub(crate) use exec::{handle_run_command_sse, tool_run_command};
|
||||
pub(crate) use script::{tool_get_test_result, tool_run_build, tool_run_lint, tool_run_tests};
|
||||
@@ -1,269 +1,17 @@
|
||||
//! MCP shell tools — run commands, execute tests, and stream output via MCP.
|
||||
//!
|
||||
//! This file is a thin adapter: it deserialises MCP payloads, delegates to
|
||||
//! `crate::service::shell` for all business logic, and serialises responses.
|
||||
//! MCP shell script tools: run_tests / get_test_result / run_build / run_lint.
|
||||
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use crate::http::context::AppContext;
|
||||
#[allow(unused_imports)]
|
||||
use crate::service::shell::{extract_count, is_dangerous, parse_test_counts, truncate_output};
|
||||
use bytes::Bytes;
|
||||
use futures::StreamExt;
|
||||
use poem::{Body, Response};
|
||||
use serde_json::{Value, json};
|
||||
use std::path::PathBuf;
|
||||
use crate::service::shell::{extract_count, parse_test_counts, truncate_output};
|
||||
|
||||
use super::exec::validate_working_dir;
|
||||
|
||||
const DEFAULT_TIMEOUT_SECS: u64 = 120;
|
||||
const MAX_TIMEOUT_SECS: u64 = 600;
|
||||
const TEST_TIMEOUT_SECS: u64 = 1200;
|
||||
const MAX_OUTPUT_LINES: usize = 100;
|
||||
|
||||
/// Validates that `working_dir` exists and is inside the project's
|
||||
/// `.huskies/worktrees/` directory. Returns the canonicalized path.
|
||||
///
|
||||
/// Thin wrapper that obtains the project root from `ctx` and delegates to
|
||||
/// `service::shell::io::validate_working_dir`.
|
||||
fn validate_working_dir(working_dir: &str, ctx: &AppContext) -> Result<PathBuf, String> {
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
crate::service::shell::io::validate_working_dir(working_dir, &project_root)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
/// Regular (non-SSE) run_command: runs the bash command to completion and
|
||||
/// returns stdout, stderr, exit_code, and whether it timed out.
|
||||
pub(super) async fn tool_run_command(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let command = args
|
||||
.get("command")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: command")?
|
||||
.to_string();
|
||||
|
||||
let working_dir = args
|
||||
.get("working_dir")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Missing required argument: working_dir")?;
|
||||
|
||||
let timeout_secs = args
|
||||
.get("timeout")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(DEFAULT_TIMEOUT_SECS)
|
||||
.min(MAX_TIMEOUT_SECS);
|
||||
|
||||
if let Some(reason) = is_dangerous(&command) {
|
||||
return Err(reason);
|
||||
}
|
||||
|
||||
let canonical_dir = validate_working_dir(working_dir, ctx)?;
|
||||
|
||||
let result = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(timeout_secs),
|
||||
tokio::task::spawn_blocking({
|
||||
let cmd = command.clone();
|
||||
let dir = canonical_dir.clone();
|
||||
move || {
|
||||
std::process::Command::new("bash")
|
||||
.arg("-c")
|
||||
.arg(&cmd)
|
||||
.current_dir(&dir)
|
||||
.output()
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Err(_) => {
|
||||
// timed out
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"stdout": "",
|
||||
"stderr": format!("Command timed out after {timeout_secs}s"),
|
||||
"exit_code": -1,
|
||||
"timed_out": true,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
Ok(Err(e)) => Err(format!("Task join error: {e}")),
|
||||
Ok(Ok(Err(e))) => Err(format!("Failed to execute command: {e}")),
|
||||
Ok(Ok(Ok(output))) => serde_json::to_string_pretty(&json!({
|
||||
"stdout": String::from_utf8_lossy(&output.stdout),
|
||||
"stderr": String::from_utf8_lossy(&output.stderr),
|
||||
"exit_code": output.status.code().unwrap_or(-1),
|
||||
"timed_out": false,
|
||||
}))
|
||||
.map_err(|e| format!("Serialization error: {e}")),
|
||||
}
|
||||
}
|
||||
|
||||
/// SSE streaming run_command: spawns the process and emits stdout/stderr lines
|
||||
/// as JSON-RPC notifications, then a final response with exit_code.
|
||||
pub(super) fn handle_run_command_sse(
|
||||
id: Option<Value>,
|
||||
params: &Value,
|
||||
ctx: &AppContext,
|
||||
) -> Response {
|
||||
use super::{JsonRpcResponse, to_sse_response};
|
||||
|
||||
let args = params.get("arguments").cloned().unwrap_or(json!({}));
|
||||
|
||||
let command = match args.get("command").and_then(|v| v.as_str()) {
|
||||
Some(c) => c.to_string(),
|
||||
None => {
|
||||
return to_sse_response(JsonRpcResponse::error(
|
||||
id,
|
||||
-32602,
|
||||
"Missing required argument: command".into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let working_dir = match args.get("working_dir").and_then(|v| v.as_str()) {
|
||||
Some(d) => d.to_string(),
|
||||
None => {
|
||||
return to_sse_response(JsonRpcResponse::error(
|
||||
id,
|
||||
-32602,
|
||||
"Missing required argument: working_dir".into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let timeout_secs = args
|
||||
.get("timeout")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(DEFAULT_TIMEOUT_SECS)
|
||||
.min(MAX_TIMEOUT_SECS);
|
||||
|
||||
if let Some(reason) = is_dangerous(&command) {
|
||||
return to_sse_response(JsonRpcResponse::error(id, -32602, reason));
|
||||
}
|
||||
|
||||
let canonical_dir = match validate_working_dir(&working_dir, ctx) {
|
||||
Ok(d) => d,
|
||||
Err(e) => return to_sse_response(JsonRpcResponse::error(id, -32602, e)),
|
||||
};
|
||||
|
||||
let final_id = id;
|
||||
|
||||
let stream = async_stream::stream! {
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
|
||||
let mut child = match tokio::process::Command::new("bash")
|
||||
.arg("-c")
|
||||
.arg(&command)
|
||||
.current_dir(&canonical_dir)
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
{
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
let resp = JsonRpcResponse::success(
|
||||
final_id,
|
||||
json!({
|
||||
"content": [{"type": "text", "text": format!("Failed to spawn process: {e}")}],
|
||||
"isError": true
|
||||
}),
|
||||
);
|
||||
if let Ok(s) = serde_json::to_string(&resp) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let stdout = child.stdout.take().expect("stdout piped");
|
||||
let stderr = child.stderr.take().expect("stderr piped");
|
||||
let mut stdout_lines = tokio::io::BufReader::new(stdout).lines();
|
||||
let mut stderr_lines = tokio::io::BufReader::new(stderr).lines();
|
||||
|
||||
let deadline = tokio::time::Instant::now()
|
||||
+ std::time::Duration::from_secs(timeout_secs);
|
||||
let mut stdout_done = false;
|
||||
let mut stderr_done = false;
|
||||
let mut timed_out = false;
|
||||
|
||||
loop {
|
||||
if stdout_done && stderr_done {
|
||||
break;
|
||||
}
|
||||
|
||||
let remaining = deadline.saturating_duration_since(tokio::time::Instant::now());
|
||||
if remaining.is_zero() {
|
||||
timed_out = true;
|
||||
let _ = child.kill().await;
|
||||
break;
|
||||
}
|
||||
|
||||
tokio::select! {
|
||||
line = stdout_lines.next_line(), if !stdout_done => {
|
||||
match line {
|
||||
Ok(Some(l)) => {
|
||||
let notif = json!({
|
||||
"jsonrpc": "2.0",
|
||||
"method": "notifications/tools/progress",
|
||||
"params": { "stream": "stdout", "line": l }
|
||||
});
|
||||
if let Ok(s) = serde_json::to_string(¬if) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
}
|
||||
_ => { stdout_done = true; }
|
||||
}
|
||||
}
|
||||
line = stderr_lines.next_line(), if !stderr_done => {
|
||||
match line {
|
||||
Ok(Some(l)) => {
|
||||
let notif = json!({
|
||||
"jsonrpc": "2.0",
|
||||
"method": "notifications/tools/progress",
|
||||
"params": { "stream": "stderr", "line": l }
|
||||
});
|
||||
if let Ok(s) = serde_json::to_string(¬if) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
}
|
||||
_ => { stderr_done = true; }
|
||||
}
|
||||
}
|
||||
_ = tokio::time::sleep(remaining) => {
|
||||
timed_out = true;
|
||||
let _ = child.kill().await;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let exit_code = child.wait().await.ok().and_then(|s| s.code()).unwrap_or(-1);
|
||||
|
||||
let summary = json!({
|
||||
"exit_code": exit_code,
|
||||
"timed_out": timed_out,
|
||||
});
|
||||
|
||||
let final_resp = JsonRpcResponse::success(
|
||||
final_id,
|
||||
json!({
|
||||
"content": [{"type": "text", "text": summary.to_string()}]
|
||||
}),
|
||||
);
|
||||
if let Ok(s) = serde_json::to_string(&final_resp) {
|
||||
yield Ok::<_, std::io::Error>(format!("data: {s}\n\n"));
|
||||
}
|
||||
};
|
||||
|
||||
Response::builder()
|
||||
.status(poem::http::StatusCode::OK)
|
||||
.header("Content-Type", "text/event-stream")
|
||||
.header("Cache-Control", "no-cache")
|
||||
.body(Body::from_bytes_stream(stream.map(|r| r.map(Bytes::from))))
|
||||
}
|
||||
|
||||
/// Run the project's test suite (`script/test`) and block until complete.
|
||||
///
|
||||
/// Spawns the test process, then polls every second server-side until the
|
||||
/// child exits or the timeout is reached. Returns the full test result in
|
||||
/// a single MCP call — no polling needed from the agent.
|
||||
///
|
||||
/// The child process is properly killed on timeout (no zombies).
|
||||
pub(super) async fn tool_run_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
pub(crate) async fn tool_run_tests(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
let working_dir = match args.get("worktree_path").and_then(|v| v.as_str()) {
|
||||
@@ -421,8 +169,7 @@ const TEST_POLL_BLOCK_SECS: u64 = 20;
|
||||
///
|
||||
/// Blocks for up to 15 seconds, checking every second. Returns immediately
|
||||
/// when the test finishes, or after 15s with `{"status": "running"}`.
|
||||
/// This server-side blocking prevents agents from wasting turns polling.
|
||||
pub(super) async fn tool_get_test_result(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
pub(crate) async fn tool_get_test_result(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||
|
||||
let working_dir = match args.get("worktree_path").and_then(|v| v.as_str()) {
|
||||
@@ -528,7 +275,6 @@ pub(super) async fn tool_get_test_result(args: &Value, ctx: &AppContext) -> Resu
|
||||
Err("Test job in unexpected state".to_string())
|
||||
}
|
||||
|
||||
/// Collect stdout/stderr from a finished child and build a `TestJobResult`.
|
||||
fn collect_child_result(
|
||||
child: &mut std::process::Child,
|
||||
status: std::process::ExitStatus,
|
||||
@@ -557,7 +303,6 @@ fn collect_child_result(
|
||||
|
||||
/// Shared implementation for run_build and run_lint: runs a named script
|
||||
/// (`script/<name>`) in the working directory, captures output, and returns
|
||||
/// truncated JSON with `passed`, `exit_code`, and `output`.
|
||||
async fn run_script_tool(
|
||||
script_name: &str,
|
||||
args: &Value,
|
||||
@@ -608,17 +353,14 @@ async fn run_script_tool(
|
||||
.map_err(|e| format!("Serialization error: {e}"))
|
||||
}
|
||||
|
||||
/// Run the project's build script (`script/build`) and return the result.
|
||||
pub(super) async fn tool_run_build(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
pub(crate) async fn tool_run_build(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
run_script_tool("build", args, ctx).await
|
||||
}
|
||||
|
||||
/// Run the project's lint script (`script/lint`) and return the result.
|
||||
pub(super) async fn tool_run_lint(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
pub(crate) async fn tool_run_lint(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||
run_script_tool("lint", args, ctx).await
|
||||
}
|
||||
|
||||
/// Format a `TestJobResult` as the JSON string returned to the agent.
|
||||
fn format_test_result(result: &crate::http::context::TestJobResult) -> Result<String, String> {
|
||||
serde_json::to_string_pretty(&json!({
|
||||
"passed": result.passed,
|
||||
@@ -635,292 +377,6 @@ fn format_test_result(result: &crate::http::context::TestJobResult) -> Result<St
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::http::test_helpers::test_ctx;
|
||||
use serde_json::json;
|
||||
|
||||
// ── is_dangerous ─────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_rf_root() {
|
||||
assert!(is_dangerous("rm -rf /").is_some());
|
||||
assert!(is_dangerous(" rm -rf / ").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_fr_root() {
|
||||
assert!(is_dangerous("rm -fr /").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_rm_rf_star() {
|
||||
assert!(is_dangerous("rm -rf /*").is_some());
|
||||
assert!(is_dangerous("rm -fr /*").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_sudo() {
|
||||
assert!(is_dangerous("sudo ls").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_shutdown() {
|
||||
assert!(is_dangerous("shutdown -h now").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_mkfs() {
|
||||
assert!(is_dangerous("mkfs /dev/sda1").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_blocks_fork_bomb() {
|
||||
assert!(is_dangerous(":(){ :|:& };:").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_dangerous_allows_safe_commands() {
|
||||
assert!(is_dangerous("cargo build").is_none());
|
||||
assert!(is_dangerous("npm test").is_none());
|
||||
assert!(is_dangerous("git status").is_none());
|
||||
assert!(is_dangerous("ls -la").is_none());
|
||||
assert!(is_dangerous("rm -rf target/").is_none());
|
||||
}
|
||||
|
||||
// ── validate_working_dir ──────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_relative_path() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir("relative/path", &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("absolute"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_nonexistent_path() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir("/nonexistent_path_xyz_abc", &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_path_outside_worktrees() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
// Create the worktrees dir so it exists
|
||||
let wt_dir = tmp.path().join(".huskies").join("worktrees");
|
||||
std::fs::create_dir_all(&wt_dir).unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// Try to use /tmp (outside worktrees)
|
||||
let result = validate_working_dir(tmp.path().to_str().unwrap(), &ctx);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("inside .huskies/worktrees"),
|
||||
"expected sandbox error"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_accepts_path_inside_worktrees() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("42_test_story");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir(story_wt.to_str().unwrap(), &ctx);
|
||||
assert!(result.is_ok(), "expected Ok, got: {:?}", result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_working_dir_rejects_no_worktrees_dir() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
// Do NOT create worktrees dir
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = validate_working_dir(tmp.path().to_str().unwrap(), &ctx);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ── tool_run_command ───────────────────────────────────────────────
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_missing_command() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(&json!({"working_dir": "/tmp"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("command"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_missing_working_dir() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(&json!({"command": "ls"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("working_dir"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_blocks_dangerous_command() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result =
|
||||
tool_run_command(&json!({"command": "rm -rf /", "working_dir": "/tmp"}), &ctx).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("blocked"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_rejects_path_outside_worktrees() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let wt_dir = tmp.path().join(".huskies").join("worktrees");
|
||||
std::fs::create_dir_all(&wt_dir).unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "ls",
|
||||
"working_dir": tmp.path().to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("worktrees"),
|
||||
"expected sandbox error"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_runs_in_worktree_and_returns_output() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("42_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
std::fs::write(story_wt.join("canary.txt"), "hello").unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "ls",
|
||||
"working_dir": story_wt.to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["exit_code"], 0);
|
||||
assert!(parsed["stdout"].as_str().unwrap().contains("canary.txt"));
|
||||
assert_eq!(parsed["timed_out"], false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_captures_nonzero_exit_code() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("43_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "exit 42",
|
||||
"working_dir": story_wt.to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["exit_code"], 42);
|
||||
assert_eq!(parsed["timed_out"], false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_timeout_returns_timed_out_true() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("44_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "sleep 10",
|
||||
"working_dir": story_wt.to_str().unwrap(),
|
||||
"timeout": 1
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert_eq!(parsed["timed_out"], true);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_captures_stderr() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let story_wt = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("worktrees")
|
||||
.join("45_test");
|
||||
std::fs::create_dir_all(&story_wt).unwrap();
|
||||
|
||||
let ctx = test_ctx(tmp.path());
|
||||
let result = tool_run_command(
|
||||
&json!({
|
||||
"command": "echo 'error msg' >&2",
|
||||
"working_dir": story_wt.to_str().unwrap()
|
||||
}),
|
||||
&ctx,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||
assert!(
|
||||
parsed["stderr"].as_str().unwrap().contains("error msg"),
|
||||
"expected stderr: {parsed}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_command_clamps_timeout_to_max() {
|
||||
// Verify timeout > 600 is clamped to 600. We don't run a 600s sleep;
|
||||
// just confirm the tool accepts the arg without error (sandbox check will
|
||||
// fail first in a different test, here we test the arg parsing path).
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let ctx = test_ctx(tmp.path());
|
||||
// Will fail at working_dir validation, not timeout parsing — that's fine
|
||||
let result = tool_run_command(
|
||||
&json!({"command": "ls", "working_dir": "/tmp", "timeout": 9999}),
|
||||
&ctx,
|
||||
)
|
||||
.await;
|
||||
// Just ensure it doesn't panic and returns an Err about sandbox (not timeout)
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ── tool_run_tests ────────────────────────────────────────────────
|
||||
|
||||
#[tokio::test]
|
||||
async fn tool_run_tests_missing_script_returns_error() {
|
||||
Reference in New Issue
Block a user