huskies: merge 531_story_mcp_tool_to_read_agent_session_logs_from_disk_not_just_live_stream

This commit is contained in:
dave
2026-04-10 13:05:04 +00:00
parent 31388da609
commit 1dd675796b
3 changed files with 697 additions and 137 deletions
+432
View File
@@ -78,6 +78,7 @@ fn log_dir(project_root: &Path, story_id: &str) -> PathBuf {
}
/// Return the path to a specific log file.
#[allow(dead_code)]
pub fn log_file_path(
project_root: &Path,
story_id: &str,
@@ -108,6 +109,208 @@ pub fn read_log(path: &Path) -> Result<Vec<LogEntry>, String> {
Ok(entries)
}
/// List all log files for a story, optionally filtered by agent name prefix.
///
/// Returns files sorted by modification time (oldest first) so that when all
/// sessions are concatenated the timeline reads chronologically.
pub fn list_story_log_files(
project_root: &Path,
story_id: &str,
agent_name_filter: Option<&str>,
) -> Vec<PathBuf> {
let dir = log_dir(project_root, story_id);
if !dir.is_dir() {
return Vec::new();
}
let prefix = agent_name_filter.map(|n| format!("{n}-"));
let mut files: Vec<(PathBuf, std::time::SystemTime)> = Vec::new();
if let Ok(entries) = fs::read_dir(&dir) {
for entry in entries.flatten() {
let path = entry.path();
let name = match path.file_name().and_then(|n| n.to_str()) {
Some(n) => n.to_string(),
None => continue,
};
if !name.ends_with(".log") {
continue;
}
if let Some(ref pfx) = prefix
&& !name.starts_with(pfx.as_str())
{
continue;
}
let modified = entry
.metadata()
.and_then(|m| m.modified())
.unwrap_or(std::time::SystemTime::UNIX_EPOCH);
files.push((path, modified));
}
}
files.sort_by_key(|(_, t)| *t);
files.into_iter().map(|(p, _)| p).collect()
}
/// Format a single log entry as a human-readable text line.
///
/// `timestamp` is an ISO 8601 string; `event` is the flattened `AgentEvent`
/// value (has `type`, `agent_name`, etc. at the top level).
///
/// Returns `None` for entries that should be skipped (raw streaming noise,
/// trivial status changes, empty output, etc.).
pub fn format_log_entry_as_text(timestamp: &str, event: &serde_json::Value) -> Option<String> {
let agent_name = event
.get("agent_name")
.and_then(|v| v.as_str())
.unwrap_or("?");
// Extract HH:MM:SS from ISO 8601 "2026-04-10T12:48:02.123456789+00:00"
let ts_short = if timestamp.len() >= 19 {
&timestamp[11..19]
} else {
timestamp
};
let pfx = format!("[{ts_short}][{agent_name}]");
match event.get("type").and_then(|v| v.as_str()) {
Some("output") => {
let text = event
.get("text")
.and_then(|v| v.as_str())
.unwrap_or("")
.trim();
if text.is_empty() {
None
} else {
Some(format!("{pfx} {text}"))
}
}
Some("error") => {
let msg = event
.get("message")
.and_then(|v| v.as_str())
.unwrap_or("(unknown error)");
Some(format!("{pfx} ERROR: {msg}"))
}
Some("done") => Some(format!("{pfx} DONE")),
Some("status") => {
// Skip trivial running/started noise
let status = event
.get("status")
.and_then(|v| v.as_str())
.unwrap_or("?");
match status {
"running" | "started" => None,
_ => Some(format!("{pfx} STATUS: {status}")),
}
}
Some("agent_json") => {
let data = event.get("data")?;
match data.get("type").and_then(|v| v.as_str()) {
Some("assistant") => {
let mut parts: Vec<String> = Vec::new();
if let Some(arr) = data
.pointer("/message/content")
.and_then(|v| v.as_array())
{
for item in arr {
match item.get("type").and_then(|v| v.as_str()) {
Some("text") => {
let text = item
.get("text")
.and_then(|v| v.as_str())
.unwrap_or("")
.trim();
if !text.is_empty() {
parts.push(format!("{pfx} {text}"));
}
}
Some("tool_use") => {
let name = item
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("?");
let input = item
.get("input")
.map(|v| {
serde_json::to_string(v).unwrap_or_default()
})
.unwrap_or_default();
let display = if input.len() > 200 {
format!("{}...", &input[..200])
} else {
input
};
parts.push(format!("{pfx} TOOL: {name}({display})"));
}
_ => {}
}
}
}
if parts.is_empty() {
None
} else {
Some(parts.join("\n"))
}
}
Some("user") => {
let mut parts: Vec<String> = Vec::new();
if let Some(arr) = data
.pointer("/message/content")
.and_then(|v| v.as_array())
{
for item in arr {
if item.get("type").and_then(|v| v.as_str())
!= Some("tool_result")
{
continue;
}
let content_str = match item.get("content") {
Some(serde_json::Value::String(s)) => s.clone(),
Some(v) => v.to_string(),
None => String::new(),
};
let display = if content_str.len() > 500 {
format!(
"{}... [{} chars total]",
&content_str[..500],
content_str.len()
)
} else {
content_str
};
if !display.trim().is_empty() {
parts.push(format!("{pfx} RESULT: {display}"));
}
}
}
if parts.is_empty() {
None
} else {
Some(parts.join("\n"))
}
}
_ => None, // Skip stream_event, system init, etc.
}
}
_ => None,
}
}
/// Read log entries from a file and convert them to human-readable text lines,
/// stripping raw streaming noise and JSON internals.
pub fn read_log_as_readable_lines(path: &Path) -> Result<Vec<String>, String> {
let entries = read_log(path)?;
let mut result = Vec::new();
for entry in &entries {
if let Some(line) = format_log_entry_as_text(&entry.timestamp, &entry.event) {
result.push(line);
}
}
Ok(result)
}
/// Find the most recent log file for a given story+agent combination.
///
/// Scans `.huskies/logs/{story_id}/` for files matching `{agent_name}-*.log`
@@ -342,6 +545,235 @@ mod tests {
);
}
#[test]
fn test_list_story_log_files_returns_empty_for_missing_dir() {
let tmp = tempdir().unwrap();
let files = list_story_log_files(tmp.path(), "nonexistent", None);
assert!(files.is_empty());
}
#[test]
fn test_list_story_log_files_no_filter_returns_all_logs() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut w1 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-aaa").unwrap();
w1.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from coder-1".to_string(),
})
.unwrap();
drop(w1);
std::thread::sleep(std::time::Duration::from_millis(10));
let mut w2 =
AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-bbb").unwrap();
w2.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "mergemaster".to_string(),
text: "from mergemaster".to_string(),
})
.unwrap();
drop(w2);
let files = list_story_log_files(root, "42_story_foo", None);
assert_eq!(files.len(), 2, "Should find both log files");
// Oldest first
assert!(
files[0].to_string_lossy().contains("coder-1"),
"coder-1 should be first (older)"
);
}
#[test]
fn test_list_story_log_files_with_agent_filter() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut w1 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-a").unwrap();
w1.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from coder-1".to_string(),
})
.unwrap();
drop(w1);
let mut w2 =
AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-b").unwrap();
w2.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "mergemaster".to_string(),
text: "from mergemaster".to_string(),
})
.unwrap();
drop(w2);
let files = list_story_log_files(root, "42_story_foo", Some("coder-1"));
assert_eq!(files.len(), 1, "Should find only coder-1 log");
assert!(files[0].to_string_lossy().contains("coder-1"));
}
#[test]
fn test_format_log_entry_output_event() {
let ts = "2026-04-10T12:48:02.123456789+00:00";
let event = serde_json::json!({
"type": "output",
"story_id": "42_story",
"agent_name": "coder-1",
"text": "Hello world"
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("12:48:02"), "should include timestamp");
assert!(result.contains("coder-1"), "should include agent name");
assert!(result.contains("Hello world"), "should include text");
}
#[test]
fn test_format_log_entry_skips_empty_output() {
let ts = "2026-04-10T12:48:02.123456789+00:00";
let event = serde_json::json!({
"type": "output",
"story_id": "42_story",
"agent_name": "coder-1",
"text": " "
});
assert!(format_log_entry_as_text(ts, &event).is_none());
}
#[test]
fn test_format_log_entry_error_event() {
let ts = "2026-04-10T12:48:02.123+00:00";
let event = serde_json::json!({
"type": "error",
"story_id": "42_story",
"agent_name": "coder-1",
"message": "Something went wrong"
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("ERROR"));
assert!(result.contains("Something went wrong"));
}
#[test]
fn test_format_log_entry_done_event() {
let ts = "2026-04-10T12:48:02.123+00:00";
let event = serde_json::json!({
"type": "done",
"story_id": "42_story",
"agent_name": "coder-1",
"session_id": null
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("DONE"));
}
#[test]
fn test_format_log_entry_skips_running_status() {
let ts = "2026-04-10T12:48:02.123+00:00";
let event = serde_json::json!({
"type": "status",
"story_id": "42_story",
"agent_name": "coder-1",
"status": "running"
});
assert!(format_log_entry_as_text(ts, &event).is_none());
}
#[test]
fn test_format_log_entry_agent_json_tool_use() {
let ts = "2026-04-10T12:48:03.000+00:00";
let event = serde_json::json!({
"type": "agent_json",
"story_id": "42_story",
"agent_name": "coder-1",
"data": {
"type": "assistant",
"message": {
"content": [
{
"type": "tool_use",
"id": "tool-1",
"name": "Read",
"input": {"file_path": "/some/file.rs"}
}
]
}
}
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("TOOL: Read"), "should show tool call: {result}");
assert!(result.contains("file_path"), "should show input: {result}");
}
#[test]
fn test_format_log_entry_agent_json_text() {
let ts = "2026-04-10T12:48:04.000+00:00";
let event = serde_json::json!({
"type": "agent_json",
"story_id": "42_story",
"agent_name": "coder-1",
"data": {
"type": "assistant",
"message": {
"content": [
{
"type": "text",
"text": "Now I will read the file."
}
]
}
}
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("Now I will read the file."), "should show text: {result}");
}
#[test]
fn test_format_log_entry_skips_stream_events() {
let ts = "2026-04-10T12:48:04.000+00:00";
let event = serde_json::json!({
"type": "agent_json",
"story_id": "42_story",
"agent_name": "coder-1",
"data": {
"type": "stream_event",
"event": {"type": "content_block_delta", "delta": {"type": "text_delta", "text": "chunk"}}
}
});
assert!(format_log_entry_as_text(ts, &event).is_none(), "stream events should be skipped");
}
#[test]
fn test_read_log_as_readable_lines_produces_formatted_output() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-readable").unwrap();
writer
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "Let me read the file".to_string(),
})
.unwrap();
writer
.write_event(&AgentEvent::Done {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
session_id: Some("sess-readable".to_string()),
})
.unwrap();
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-readable");
let lines = read_log_as_readable_lines(&path).unwrap();
assert_eq!(lines.len(), 2, "Should produce 2 readable lines");
assert!(lines[0].contains("Let me read the file"), "first line: {}", lines[0]);
assert!(lines[1].contains("DONE"), "second line: {}", lines[1]);
}
#[test]
fn test_find_latest_log_returns_none_for_missing_dir() {
let tmp = tempdir().unwrap();
+227 -123
View File
@@ -96,111 +96,97 @@ pub(super) fn tool_list_agents(ctx: &AppContext) -> Result<String, String> {
.map_err(|e| format!("Serialization error: {e}"))
}
pub(super) async fn tool_get_agent_output_poll(args: &Value, ctx: &AppContext) -> Result<String, String> {
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let agent_name = args
.get("agent_name")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: agent_name")?;
// Try draining in-memory events first.
match ctx.agents.drain_events(story_id, agent_name) {
Ok(drained) => {
let done = drained.iter().any(|e| {
matches!(
e,
crate::agents::AgentEvent::Done { .. }
| crate::agents::AgentEvent::Error { .. }
)
});
let events: Vec<serde_json::Value> = drained
.into_iter()
.filter_map(|e| serde_json::to_value(&e).ok())
.collect();
serde_json::to_string_pretty(&json!({
"events": events,
"done": done,
"event_count": events.len(),
"message": if done { "Agent stream ended." } else if events.is_empty() { "No new events. Call again to continue." } else { "Events returned. Call again to continue." }
}))
.map_err(|e| format!("Serialization error: {e}"))
}
Err(_) => {
// Agent not in memory — fall back to persistent log file.
get_agent_output_from_log(story_id, agent_name, ctx)
}
}
}
/// Fall back to reading agent output from the persistent log file on disk.
/// Read agent session logs from disk and return a human-readable timeline.
///
/// Tries to find the log file via the agent's stored log_session_id first,
/// then falls back to `find_latest_log` scanning the log directory.
pub(super) fn get_agent_output_from_log(
story_id: &str,
agent_name: &str,
/// Stitches all session log files for the story together in chronological
/// order. If `agent_name` is omitted, logs from every agent are included.
/// Supports `lines` (tail the last N lines) and `filter` (substring match).
/// If a named agent is currently running, its buffered in-memory events are
/// appended as "live" output so the caller sees everything in one call.
pub(super) async fn tool_get_agent_output(
args: &Value,
ctx: &AppContext,
) -> Result<String, String> {
use crate::agent_log;
let story_id = args
.get("story_id")
.and_then(|v| v.as_str())
.ok_or("Missing required argument: story_id")?;
let agent_name_filter = args.get("agent_name").and_then(|v| v.as_str());
let tail = args
.get("lines")
.and_then(|v| v.as_u64())
.map(|n| n as usize);
let filter = args.get("filter").and_then(|v| v.as_str());
let project_root = ctx.agents.get_project_root(&ctx.state)?;
// Try to find the log file: first from in-memory agent info, then by scanning.
let log_path = ctx
.agents
.get_log_info(story_id, agent_name)
.map(|(session_id, root)| agent_log::log_file_path(&root, story_id, agent_name, &session_id))
.filter(|p| p.exists())
.or_else(|| agent_log::find_latest_log(&project_root, story_id, agent_name));
// Collect all matching log files, oldest first.
let log_files =
agent_log::list_story_log_files(&project_root, story_id, agent_name_filter);
let log_path = match log_path {
Some(p) => p,
None => {
return serde_json::to_string_pretty(&json!({
"events": [],
"done": true,
"event_count": 0,
"message": format!("No agent '{agent_name}' for story '{story_id}' and no log file found."),
"source": "none",
}))
.map_err(|e| format!("Serialization error: {e}"));
let mut all_lines: Vec<String> = Vec::new();
for path in &log_files {
let file_name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("?");
all_lines.push(format!("=== {} ===", file_name.trim_end_matches(".log")));
match agent_log::read_log_as_readable_lines(path) {
Ok(lines) => all_lines.extend(lines),
Err(e) => all_lines.push(format!("[ERROR reading log: {e}]")),
}
all_lines.push(String::new()); // blank line between sessions
}
// Append buffered live events when a specific agent is requested.
if let Some(agent_name) = agent_name_filter
&& let Ok(live_events) = ctx.agents.drain_events(story_id, agent_name)
&& !live_events.is_empty()
{
all_lines.push(format!("=== {agent_name} (live) ==="));
let now = chrono::Utc::now().to_rfc3339();
for event in &live_events {
if let Ok(event_value) = serde_json::to_value(event)
&& let Some(line) =
agent_log::format_log_entry_as_text(&now, &event_value)
{
all_lines.push(line);
}
}
all_lines.push(String::new());
}
if log_files.is_empty() {
let agent_hint = agent_name_filter
.map(|n| format!(" agent '{n}'"))
.unwrap_or_default();
return Ok(format!(
"No log files found for story '{story_id}'{agent_hint}."
));
}
// Apply substring filter (always keep section headers).
let filtered: Vec<String> = if let Some(f) = filter {
all_lines
.into_iter()
.filter(|l| l.starts_with("===") || l.contains(f))
.collect()
} else {
all_lines
};
match agent_log::read_log(&log_path) {
Ok(entries) => {
let events: Vec<serde_json::Value> = entries
.into_iter()
.map(|e| {
let mut val = e.event;
if let serde_json::Value::Object(ref mut map) = val {
map.insert(
"timestamp".to_string(),
serde_json::Value::String(e.timestamp),
);
}
val
})
.collect();
// Apply tail (last N lines).
let output = if let Some(n) = tail {
let start = filtered.len().saturating_sub(n);
filtered[start..].join("\n")
} else {
filtered.join("\n")
};
let count = events.len();
serde_json::to_string_pretty(&json!({
"events": events,
"done": true,
"event_count": count,
"message": "Events loaded from persistent log file.",
"source": "log_file",
"log_file": log_path.to_string_lossy(),
}))
.map_err(|e| format!("Serialization error: {e}"))
}
Err(e) => Err(format!("Failed to read log file: {e}")),
}
Ok(output)
}
pub(super) fn tool_get_agent_config(ctx: &AppContext) -> Result<String, String> {
@@ -396,61 +382,179 @@ mod tests {
}
#[tokio::test]
async fn tool_get_agent_output_poll_missing_story_id() {
async fn tool_get_agent_output_missing_story_id() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result = tool_get_agent_output_poll(&json!({"agent_name": "bot"}), &ctx).await;
let result = tool_get_agent_output(&json!({}), &ctx).await;
assert!(result.is_err());
assert!(result.unwrap_err().contains("story_id"));
}
#[tokio::test]
async fn tool_get_agent_output_poll_missing_agent_name() {
async fn tool_get_agent_output_no_logs_returns_not_found_message() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
let result =
tool_get_agent_output_poll(&json!({"story_id": "1_test"}), &ctx).await;
assert!(result.is_err());
assert!(result.unwrap_err().contains("agent_name"));
}
#[tokio::test]
async fn tool_get_agent_output_poll_no_agent_falls_back_to_empty_log() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
// No agent registered, no log file → returns empty response from log fallback
let result = tool_get_agent_output_poll(
// No agent registered, no log file → returns "no log files found" message
let result = tool_get_agent_output(
&json!({"story_id": "99_nope", "agent_name": "bot"}),
&ctx,
)
.await
.unwrap();
let parsed: Value = serde_json::from_str(&result).unwrap();
assert_eq!(parsed["done"], true);
assert_eq!(parsed["event_count"], 0);
assert!(
parsed["message"].as_str().unwrap_or("").contains("No agent"),
"expected 'No agent' message: {parsed}"
result.contains("No log files found"),
"expected 'No log files found' message: {result}"
);
}
#[tokio::test]
async fn tool_get_agent_output_poll_with_running_agent_returns_empty_events() {
async fn tool_get_agent_output_agent_name_is_optional() {
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
// Inject a running agent — no events broadcast yet
ctx.agents
.inject_test_agent("10_story", "worker", crate::agents::AgentStatus::Running);
let result = tool_get_agent_output_poll(
&json!({"story_id": "10_story", "agent_name": "worker"}),
// No agent_name provided — should succeed (no error)
let result = tool_get_agent_output(
&json!({"story_id": "99_nope"}),
&ctx,
)
.await
.unwrap();
let parsed: Value = serde_json::from_str(&result).unwrap();
assert_eq!(parsed["done"], false);
assert_eq!(parsed["event_count"], 0);
assert!(parsed["events"].is_array());
assert!(result.contains("No log files found"));
}
#[tokio::test]
async fn tool_get_agent_output_reads_from_disk() {
use crate::agent_log::AgentLogWriter;
use crate::agents::AgentEvent;
use crate::store::StoreOps;
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
// Point the store at the tmp root so the tool can find log files.
ctx.store
.set("project_root", json!(tmp.path().to_string_lossy().as_ref()));
// Write a log file
let mut writer = AgentLogWriter::new(
tmp.path(),
"42_story_foo",
"coder-1",
"sess-test",
)
.unwrap();
writer
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "My readable output line".to_string(),
})
.unwrap();
writer
.write_event(&AgentEvent::Done {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
session_id: Some("sess-test".to_string()),
})
.unwrap();
drop(writer);
let result = tool_get_agent_output(
&json!({"story_id": "42_story_foo", "agent_name": "coder-1"}),
&ctx,
)
.await
.unwrap();
assert!(
result.contains("My readable output line"),
"expected output text in result: {result}"
);
assert!(result.contains("DONE"), "expected DONE marker: {result}");
}
#[tokio::test]
async fn tool_get_agent_output_tail_limits_lines() {
use crate::agent_log::AgentLogWriter;
use crate::agents::AgentEvent;
use crate::store::StoreOps;
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
ctx.store
.set("project_root", json!(tmp.path().to_string_lossy().as_ref()));
let mut writer = AgentLogWriter::new(
tmp.path(),
"42_story_bar",
"coder-1",
"sess-tail",
)
.unwrap();
for i in 0..10 {
writer
.write_event(&AgentEvent::Output {
story_id: "42_story_bar".to_string(),
agent_name: "coder-1".to_string(),
text: format!("line {i}"),
})
.unwrap();
}
drop(writer);
let result = tool_get_agent_output(
&json!({"story_id": "42_story_bar", "agent_name": "coder-1", "lines": 3}),
&ctx,
)
.await
.unwrap();
// Should contain "line 7", "line 8", "line 9" but NOT "line 0"
assert!(result.contains("line 9"), "should contain last line: {result}");
assert!(!result.contains("line 0"), "should not contain early lines: {result}");
}
#[tokio::test]
async fn tool_get_agent_output_filter_narrows_results() {
use crate::agent_log::AgentLogWriter;
use crate::agents::AgentEvent;
use crate::store::StoreOps;
let tmp = tempfile::tempdir().unwrap();
let ctx = test_ctx(tmp.path());
ctx.store
.set("project_root", json!(tmp.path().to_string_lossy().as_ref()));
let mut writer = AgentLogWriter::new(
tmp.path(),
"42_story_baz",
"coder-1",
"sess-filter",
)
.unwrap();
writer
.write_event(&AgentEvent::Output {
story_id: "42_story_baz".to_string(),
agent_name: "coder-1".to_string(),
text: "needle line here".to_string(),
})
.unwrap();
writer
.write_event(&AgentEvent::Output {
story_id: "42_story_baz".to_string(),
agent_name: "coder-1".to_string(),
text: "haystack only".to_string(),
})
.unwrap();
drop(writer);
let result = tool_get_agent_output(
&json!({"story_id": "42_story_baz", "agent_name": "coder-1", "filter": "needle"}),
&ctx,
)
.await
.unwrap();
assert!(result.contains("needle"), "filter should keep matching lines: {result}");
assert!(!result.contains("haystack"), "filter should remove non-matching lines: {result}");
}
#[tokio::test]
+38 -14
View File
@@ -134,7 +134,17 @@ pub async fn mcp_post_handler(req: &Request, body: Body, ctx: Data<&Arc<AppConte
.and_then(|v| v.as_str())
.unwrap_or("");
if tool_name == "get_agent_output" {
return handle_agent_output_sse(rpc.id, &rpc.params, &ctx);
// Only use live SSE streaming when agent_name is explicitly provided.
// Without agent_name, fall through to the disk-based handler below.
let has_agent_name = rpc
.params
.get("arguments")
.and_then(|a| a.get("agent_name"))
.and_then(|v| v.as_str())
.is_some();
if has_agent_name {
return handle_agent_output_sse(rpc.id, &rpc.params, &ctx);
}
}
if tool_name == "run_command" {
return shell_tools::handle_run_command_sse(rpc.id, &rpc.params, &ctx);
@@ -502,24 +512,28 @@ fn handle_tools_list(id: Option<Value>) -> JsonRpcResponse {
},
{
"name": "get_agent_output",
"description": "Poll recent output from a running agent. Subscribes to the agent's event stream and collects events for up to 2 seconds. Returns text output and status events. Call repeatedly to follow progress.",
"description": "Read agent session logs from disk as a human-readable timeline. Stitches all sessions for the story together in chronological order — text output, tool calls, tool results, errors. Works for both running and completed agents. If agent_name is omitted, returns logs from every agent that worked on the story. If the named agent is currently running, live buffered events are appended.",
"inputSchema": {
"type": "object",
"properties": {
"story_id": {
"type": "string",
"description": "Story identifier"
"description": "Story identifier (e.g. '42_story_my_feature')"
},
"agent_name": {
"type": "string",
"description": "Agent name"
"description": "Optional: filter to a specific agent (e.g. 'mergemaster', 'coder-1'). Omit to see all agents."
},
"timeout_ms": {
"lines": {
"type": "integer",
"description": "How long to wait for events in milliseconds (default: 2000, max: 10000)"
"description": "Optional: return only the last N lines (tail). Useful for large logs."
},
"filter": {
"type": "string",
"description": "Optional: return only lines containing this substring (e.g. 'ERROR', 'TOOL:', a function name)."
}
},
"required": ["story_id", "agent_name"]
"required": ["story_id"]
}
},
{
@@ -1300,7 +1314,7 @@ async fn handle_tools_call(
"list_agents" => agent_tools::tool_list_agents(ctx),
"get_agent_config" => agent_tools::tool_get_agent_config(ctx),
"reload_agent_config" => agent_tools::tool_get_agent_config(ctx),
"get_agent_output" => agent_tools::tool_get_agent_output_poll(&args, ctx).await,
"get_agent_output" => agent_tools::tool_get_agent_output(&args, ctx).await,
"wait_for_agent" => agent_tools::tool_wait_for_agent(&args, ctx).await,
// Worktree tools
"create_worktree" => agent_tools::tool_create_worktree(&args, ctx).await,
@@ -1756,7 +1770,11 @@ mod tests {
}
#[tokio::test]
async fn mcp_post_sse_get_agent_output_missing_agent_name() {
async fn mcp_post_sse_get_agent_output_without_agent_name_returns_disk_content() {
// Without agent_name the SSE live-streaming intercept is skipped and
// the disk-based handler runs. The transport still wraps the result in
// SSE format (data: …\n\n) because the client sent Accept: text/event-stream,
// but the content should be a valid JSON-RPC result, not a subscribe error.
let tmp = tempfile::tempdir().unwrap();
let ctx = std::sync::Arc::new(test_ctx(tmp.path()));
let cli = poem::test::TestClient::new(test_mcp_app(ctx));
@@ -1767,11 +1785,17 @@ mod tests {
.body(r#"{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"get_agent_output","arguments":{"story_id":"1_test"}}}"#)
.send()
.await;
assert_eq!(
resp.0.headers().get("content-type").unwrap(),
"text/event-stream",
"expected SSE content-type"
);
let body = resp.0.into_body().into_string().await.unwrap();
// Body is SSE-wrapped: "data: {…}\n\n" — strip the prefix and verify it's
// a valid JSON-RPC result (not an error about missing agent_name).
let json_part = body.trim_start_matches("data: ").trim_end_matches("\n\n").trim();
let parsed: serde_json::Value = serde_json::from_str(json_part)
.unwrap_or_else(|_| panic!("expected JSON-RPC in SSE body, got: {body}"));
assert!(parsed.get("result").is_some(),
"expected JSON-RPC result (disk-based handler ran): {parsed}");
// Must NOT be an error about missing agent_name (agent_name is now optional)
assert!(parsed.get("error").is_none(),
"unexpected error when agent_name omitted: {parsed}");
}
#[tokio::test]