huskies: merge 531_story_mcp_tool_to_read_agent_session_logs_from_disk_not_just_live_stream

This commit is contained in:
dave
2026-04-10 13:05:04 +00:00
parent 31388da609
commit 1dd675796b
3 changed files with 697 additions and 137 deletions
+432
View File
@@ -78,6 +78,7 @@ fn log_dir(project_root: &Path, story_id: &str) -> PathBuf {
}
/// Return the path to a specific log file.
#[allow(dead_code)]
pub fn log_file_path(
project_root: &Path,
story_id: &str,
@@ -108,6 +109,208 @@ pub fn read_log(path: &Path) -> Result<Vec<LogEntry>, String> {
Ok(entries)
}
/// List all log files for a story, optionally filtered by agent name prefix.
///
/// Returns files sorted by modification time (oldest first) so that when all
/// sessions are concatenated the timeline reads chronologically.
pub fn list_story_log_files(
project_root: &Path,
story_id: &str,
agent_name_filter: Option<&str>,
) -> Vec<PathBuf> {
let dir = log_dir(project_root, story_id);
if !dir.is_dir() {
return Vec::new();
}
let prefix = agent_name_filter.map(|n| format!("{n}-"));
let mut files: Vec<(PathBuf, std::time::SystemTime)> = Vec::new();
if let Ok(entries) = fs::read_dir(&dir) {
for entry in entries.flatten() {
let path = entry.path();
let name = match path.file_name().and_then(|n| n.to_str()) {
Some(n) => n.to_string(),
None => continue,
};
if !name.ends_with(".log") {
continue;
}
if let Some(ref pfx) = prefix
&& !name.starts_with(pfx.as_str())
{
continue;
}
let modified = entry
.metadata()
.and_then(|m| m.modified())
.unwrap_or(std::time::SystemTime::UNIX_EPOCH);
files.push((path, modified));
}
}
files.sort_by_key(|(_, t)| *t);
files.into_iter().map(|(p, _)| p).collect()
}
/// Format a single log entry as a human-readable text line.
///
/// `timestamp` is an ISO 8601 string; `event` is the flattened `AgentEvent`
/// value (has `type`, `agent_name`, etc. at the top level).
///
/// Returns `None` for entries that should be skipped (raw streaming noise,
/// trivial status changes, empty output, etc.).
pub fn format_log_entry_as_text(timestamp: &str, event: &serde_json::Value) -> Option<String> {
let agent_name = event
.get("agent_name")
.and_then(|v| v.as_str())
.unwrap_or("?");
// Extract HH:MM:SS from ISO 8601 "2026-04-10T12:48:02.123456789+00:00"
let ts_short = if timestamp.len() >= 19 {
&timestamp[11..19]
} else {
timestamp
};
let pfx = format!("[{ts_short}][{agent_name}]");
match event.get("type").and_then(|v| v.as_str()) {
Some("output") => {
let text = event
.get("text")
.and_then(|v| v.as_str())
.unwrap_or("")
.trim();
if text.is_empty() {
None
} else {
Some(format!("{pfx} {text}"))
}
}
Some("error") => {
let msg = event
.get("message")
.and_then(|v| v.as_str())
.unwrap_or("(unknown error)");
Some(format!("{pfx} ERROR: {msg}"))
}
Some("done") => Some(format!("{pfx} DONE")),
Some("status") => {
// Skip trivial running/started noise
let status = event
.get("status")
.and_then(|v| v.as_str())
.unwrap_or("?");
match status {
"running" | "started" => None,
_ => Some(format!("{pfx} STATUS: {status}")),
}
}
Some("agent_json") => {
let data = event.get("data")?;
match data.get("type").and_then(|v| v.as_str()) {
Some("assistant") => {
let mut parts: Vec<String> = Vec::new();
if let Some(arr) = data
.pointer("/message/content")
.and_then(|v| v.as_array())
{
for item in arr {
match item.get("type").and_then(|v| v.as_str()) {
Some("text") => {
let text = item
.get("text")
.and_then(|v| v.as_str())
.unwrap_or("")
.trim();
if !text.is_empty() {
parts.push(format!("{pfx} {text}"));
}
}
Some("tool_use") => {
let name = item
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("?");
let input = item
.get("input")
.map(|v| {
serde_json::to_string(v).unwrap_or_default()
})
.unwrap_or_default();
let display = if input.len() > 200 {
format!("{}...", &input[..200])
} else {
input
};
parts.push(format!("{pfx} TOOL: {name}({display})"));
}
_ => {}
}
}
}
if parts.is_empty() {
None
} else {
Some(parts.join("\n"))
}
}
Some("user") => {
let mut parts: Vec<String> = Vec::new();
if let Some(arr) = data
.pointer("/message/content")
.and_then(|v| v.as_array())
{
for item in arr {
if item.get("type").and_then(|v| v.as_str())
!= Some("tool_result")
{
continue;
}
let content_str = match item.get("content") {
Some(serde_json::Value::String(s)) => s.clone(),
Some(v) => v.to_string(),
None => String::new(),
};
let display = if content_str.len() > 500 {
format!(
"{}... [{} chars total]",
&content_str[..500],
content_str.len()
)
} else {
content_str
};
if !display.trim().is_empty() {
parts.push(format!("{pfx} RESULT: {display}"));
}
}
}
if parts.is_empty() {
None
} else {
Some(parts.join("\n"))
}
}
_ => None, // Skip stream_event, system init, etc.
}
}
_ => None,
}
}
/// Read log entries from a file and convert them to human-readable text lines,
/// stripping raw streaming noise and JSON internals.
pub fn read_log_as_readable_lines(path: &Path) -> Result<Vec<String>, String> {
let entries = read_log(path)?;
let mut result = Vec::new();
for entry in &entries {
if let Some(line) = format_log_entry_as_text(&entry.timestamp, &entry.event) {
result.push(line);
}
}
Ok(result)
}
/// Find the most recent log file for a given story+agent combination.
///
/// Scans `.huskies/logs/{story_id}/` for files matching `{agent_name}-*.log`
@@ -342,6 +545,235 @@ mod tests {
);
}
#[test]
fn test_list_story_log_files_returns_empty_for_missing_dir() {
let tmp = tempdir().unwrap();
let files = list_story_log_files(tmp.path(), "nonexistent", None);
assert!(files.is_empty());
}
#[test]
fn test_list_story_log_files_no_filter_returns_all_logs() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut w1 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-aaa").unwrap();
w1.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from coder-1".to_string(),
})
.unwrap();
drop(w1);
std::thread::sleep(std::time::Duration::from_millis(10));
let mut w2 =
AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-bbb").unwrap();
w2.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "mergemaster".to_string(),
text: "from mergemaster".to_string(),
})
.unwrap();
drop(w2);
let files = list_story_log_files(root, "42_story_foo", None);
assert_eq!(files.len(), 2, "Should find both log files");
// Oldest first
assert!(
files[0].to_string_lossy().contains("coder-1"),
"coder-1 should be first (older)"
);
}
#[test]
fn test_list_story_log_files_with_agent_filter() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut w1 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-a").unwrap();
w1.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from coder-1".to_string(),
})
.unwrap();
drop(w1);
let mut w2 =
AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-b").unwrap();
w2.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "mergemaster".to_string(),
text: "from mergemaster".to_string(),
})
.unwrap();
drop(w2);
let files = list_story_log_files(root, "42_story_foo", Some("coder-1"));
assert_eq!(files.len(), 1, "Should find only coder-1 log");
assert!(files[0].to_string_lossy().contains("coder-1"));
}
#[test]
fn test_format_log_entry_output_event() {
let ts = "2026-04-10T12:48:02.123456789+00:00";
let event = serde_json::json!({
"type": "output",
"story_id": "42_story",
"agent_name": "coder-1",
"text": "Hello world"
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("12:48:02"), "should include timestamp");
assert!(result.contains("coder-1"), "should include agent name");
assert!(result.contains("Hello world"), "should include text");
}
#[test]
fn test_format_log_entry_skips_empty_output() {
let ts = "2026-04-10T12:48:02.123456789+00:00";
let event = serde_json::json!({
"type": "output",
"story_id": "42_story",
"agent_name": "coder-1",
"text": " "
});
assert!(format_log_entry_as_text(ts, &event).is_none());
}
#[test]
fn test_format_log_entry_error_event() {
let ts = "2026-04-10T12:48:02.123+00:00";
let event = serde_json::json!({
"type": "error",
"story_id": "42_story",
"agent_name": "coder-1",
"message": "Something went wrong"
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("ERROR"));
assert!(result.contains("Something went wrong"));
}
#[test]
fn test_format_log_entry_done_event() {
let ts = "2026-04-10T12:48:02.123+00:00";
let event = serde_json::json!({
"type": "done",
"story_id": "42_story",
"agent_name": "coder-1",
"session_id": null
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("DONE"));
}
#[test]
fn test_format_log_entry_skips_running_status() {
let ts = "2026-04-10T12:48:02.123+00:00";
let event = serde_json::json!({
"type": "status",
"story_id": "42_story",
"agent_name": "coder-1",
"status": "running"
});
assert!(format_log_entry_as_text(ts, &event).is_none());
}
#[test]
fn test_format_log_entry_agent_json_tool_use() {
let ts = "2026-04-10T12:48:03.000+00:00";
let event = serde_json::json!({
"type": "agent_json",
"story_id": "42_story",
"agent_name": "coder-1",
"data": {
"type": "assistant",
"message": {
"content": [
{
"type": "tool_use",
"id": "tool-1",
"name": "Read",
"input": {"file_path": "/some/file.rs"}
}
]
}
}
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("TOOL: Read"), "should show tool call: {result}");
assert!(result.contains("file_path"), "should show input: {result}");
}
#[test]
fn test_format_log_entry_agent_json_text() {
let ts = "2026-04-10T12:48:04.000+00:00";
let event = serde_json::json!({
"type": "agent_json",
"story_id": "42_story",
"agent_name": "coder-1",
"data": {
"type": "assistant",
"message": {
"content": [
{
"type": "text",
"text": "Now I will read the file."
}
]
}
}
});
let result = format_log_entry_as_text(ts, &event).unwrap();
assert!(result.contains("Now I will read the file."), "should show text: {result}");
}
#[test]
fn test_format_log_entry_skips_stream_events() {
let ts = "2026-04-10T12:48:04.000+00:00";
let event = serde_json::json!({
"type": "agent_json",
"story_id": "42_story",
"agent_name": "coder-1",
"data": {
"type": "stream_event",
"event": {"type": "content_block_delta", "delta": {"type": "text_delta", "text": "chunk"}}
}
});
assert!(format_log_entry_as_text(ts, &event).is_none(), "stream events should be skipped");
}
#[test]
fn test_read_log_as_readable_lines_produces_formatted_output() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-readable").unwrap();
writer
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "Let me read the file".to_string(),
})
.unwrap();
writer
.write_event(&AgentEvent::Done {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
session_id: Some("sess-readable".to_string()),
})
.unwrap();
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-readable");
let lines = read_log_as_readable_lines(&path).unwrap();
assert_eq!(lines.len(), 2, "Should produce 2 readable lines");
assert!(lines[0].contains("Let me read the file"), "first line: {}", lines[0]);
assert!(lines[1].contains("DONE"), "second line: {}", lines[1]);
}
#[test]
fn test_find_latest_log_returns_none_for_missing_dir() {
let tmp = tempdir().unwrap();