Files
storkit/server/src/agent_log.rs

378 lines
12 KiB
Rust

use crate::agents::AgentEvent;
use chrono::Utc;
use serde::{Deserialize, Serialize};
use std::fs::{self, File, OpenOptions};
use std::io::{BufRead, BufReader, Write};
use std::path::{Path, PathBuf};
/// A single line in the agent log file (JSONL format).
#[derive(Debug, Serialize, Deserialize)]
pub struct LogEntry {
pub timestamp: String,
#[serde(flatten)]
pub event: serde_json::Value,
}
/// Writes agent events to a persistent log file (JSONL format).
///
/// Each agent session gets its own log file at:
/// `.story_kit/logs/{story_id}/{agent_name}-{session_id}.log`
pub struct AgentLogWriter {
file: File,
}
impl AgentLogWriter {
/// Create a new log writer, creating the directory structure as needed.
///
/// The log file is opened in append mode so that a restart mid-session
/// won't overwrite earlier output.
pub fn new(
project_root: &Path,
story_id: &str,
agent_name: &str,
session_id: &str,
) -> Result<Self, String> {
let dir = log_dir(project_root, story_id);
fs::create_dir_all(&dir)
.map_err(|e| format!("Failed to create log directory {}: {e}", dir.display()))?;
let path = dir.join(format!("{agent_name}-{session_id}.log"));
let file = OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.map_err(|e| format!("Failed to open log file {}: {e}", path.display()))?;
Ok(Self { file })
}
/// Write an agent event as a JSONL line with an ISO 8601 timestamp.
pub fn write_event(&mut self, event: &AgentEvent) -> Result<(), String> {
let event_value =
serde_json::to_value(event).map_err(|e| format!("Failed to serialize event: {e}"))?;
let entry = LogEntry {
timestamp: Utc::now().to_rfc3339(),
event: event_value,
};
let mut line =
serde_json::to_string(&entry).map_err(|e| format!("Failed to serialize entry: {e}"))?;
line.push('\n');
self.file
.write_all(line.as_bytes())
.map_err(|e| format!("Failed to write log entry: {e}"))?;
Ok(())
}
}
/// Return the log directory for a story.
fn log_dir(project_root: &Path, story_id: &str) -> PathBuf {
project_root
.join(".story_kit")
.join("logs")
.join(story_id)
}
/// Return the path to a specific log file.
pub fn log_file_path(
project_root: &Path,
story_id: &str,
agent_name: &str,
session_id: &str,
) -> PathBuf {
log_dir(project_root, story_id).join(format!("{agent_name}-{session_id}.log"))
}
/// Read all log entries from a log file.
pub fn read_log(path: &Path) -> Result<Vec<LogEntry>, String> {
let file =
File::open(path).map_err(|e| format!("Failed to open log file {}: {e}", path.display()))?;
let reader = BufReader::new(file);
let mut entries = Vec::new();
for line in reader.lines() {
let line = line.map_err(|e| format!("Failed to read log line: {e}"))?;
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
let entry: LogEntry = serde_json::from_str(trimmed)
.map_err(|e| format!("Failed to parse log entry: {e}"))?;
entries.push(entry);
}
Ok(entries)
}
/// Find the most recent log file for a given story+agent combination.
///
/// Scans `.story_kit/logs/{story_id}/` for files matching `{agent_name}-*.log`
/// and returns the one with the most recent modification time.
pub fn find_latest_log(
project_root: &Path,
story_id: &str,
agent_name: &str,
) -> Option<PathBuf> {
let dir = log_dir(project_root, story_id);
if !dir.is_dir() {
return None;
}
let prefix = format!("{agent_name}-");
let mut best: Option<(PathBuf, std::time::SystemTime)> = None;
let entries = fs::read_dir(&dir).ok()?;
for entry in entries.flatten() {
let path = entry.path();
let name = match path.file_name().and_then(|n| n.to_str()) {
Some(n) => n.to_string(),
None => continue,
};
if !name.starts_with(&prefix) || !name.ends_with(".log") {
continue;
}
let modified = match entry.metadata().and_then(|m| m.modified()) {
Ok(t) => t,
Err(_) => continue,
};
if best.as_ref().is_none_or(|(_, t)| modified > *t) {
best = Some((path, modified));
}
}
best.map(|(p, _)| p)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::agents::AgentEvent;
use tempfile::tempdir;
#[test]
fn test_log_writer_creates_directory_and_file() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let _writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-abc123").unwrap();
let expected_path = root
.join(".story_kit")
.join("logs")
.join("42_story_foo")
.join("coder-1-sess-abc123.log");
assert!(expected_path.exists(), "Log file should exist");
}
#[test]
fn test_log_writer_writes_jsonl_with_timestamps() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-001").unwrap();
let event = AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
};
writer.write_event(&event).unwrap();
let event2 = AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "Hello world".to_string(),
};
writer.write_event(&event2).unwrap();
// Read the file and verify
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-001");
let content = fs::read_to_string(&path).unwrap();
let lines: Vec<&str> = content.lines().collect();
assert_eq!(lines.len(), 2, "Should have 2 log lines");
// Parse each line as valid JSON with a timestamp
for line in &lines {
let entry: LogEntry = serde_json::from_str(line).unwrap();
assert!(!entry.timestamp.is_empty(), "Timestamp should be present");
// Verify it's a valid ISO 8601 timestamp
chrono::DateTime::parse_from_rfc3339(&entry.timestamp)
.expect("Timestamp should be valid RFC3339");
}
// Verify the first entry is a status event
let entry1: LogEntry = serde_json::from_str(lines[0]).unwrap();
assert_eq!(entry1.event["type"], "status");
assert_eq!(entry1.event["status"], "running");
// Verify the second entry is an output event
let entry2: LogEntry = serde_json::from_str(lines[1]).unwrap();
assert_eq!(entry2.event["type"], "output");
assert_eq!(entry2.event["text"], "Hello world");
}
#[test]
fn test_read_log_parses_written_events() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-002").unwrap();
let events = vec![
AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
},
AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "Processing...".to_string(),
},
AgentEvent::AgentJson {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
data: serde_json::json!({"type": "tool_use", "name": "read_file"}),
},
AgentEvent::Done {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
session_id: Some("sess-002".to_string()),
},
];
for event in &events {
writer.write_event(event).unwrap();
}
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-002");
let entries = read_log(&path).unwrap();
assert_eq!(entries.len(), 4, "Should read back all 4 events");
// Verify event types round-trip correctly
assert_eq!(entries[0].event["type"], "status");
assert_eq!(entries[1].event["type"], "output");
assert_eq!(entries[2].event["type"], "agent_json");
assert_eq!(entries[3].event["type"], "done");
}
#[test]
fn test_separate_sessions_produce_separate_files() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer1 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-aaa").unwrap();
let mut writer2 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-bbb").unwrap();
writer1
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from session aaa".to_string(),
})
.unwrap();
writer2
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from session bbb".to_string(),
})
.unwrap();
let path1 = log_file_path(root, "42_story_foo", "coder-1", "sess-aaa");
let path2 = log_file_path(root, "42_story_foo", "coder-1", "sess-bbb");
assert_ne!(path1, path2, "Different sessions should use different files");
let entries1 = read_log(&path1).unwrap();
let entries2 = read_log(&path2).unwrap();
assert_eq!(entries1.len(), 1);
assert_eq!(entries2.len(), 1);
assert_eq!(entries1[0].event["text"], "from session aaa");
assert_eq!(entries2[0].event["text"], "from session bbb");
}
#[test]
fn test_find_latest_log_returns_most_recent() {
let tmp = tempdir().unwrap();
let root = tmp.path();
// Create two log files with a small delay
let mut writer1 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-old").unwrap();
writer1
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "old".to_string(),
})
.unwrap();
drop(writer1);
// Touch the second file to ensure it's newer
std::thread::sleep(std::time::Duration::from_millis(50));
let mut writer2 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-new").unwrap();
writer2
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "new".to_string(),
})
.unwrap();
drop(writer2);
let latest = find_latest_log(root, "42_story_foo", "coder-1").unwrap();
assert!(
latest.to_string_lossy().contains("sess-new"),
"Should find the newest log file, got: {}",
latest.display()
);
}
#[test]
fn test_find_latest_log_returns_none_for_missing_dir() {
let tmp = tempdir().unwrap();
let result = find_latest_log(tmp.path(), "nonexistent", "coder-1");
assert!(result.is_none());
}
#[test]
fn test_log_files_persist_on_disk() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let path = {
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-persist").unwrap();
writer
.write_event(&AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
})
.unwrap();
log_file_path(root, "42_story_foo", "coder-1", "sess-persist")
// writer is dropped here
};
// File should still exist and be readable
assert!(path.exists(), "Log file should persist after writer is dropped");
let entries = read_log(&path).unwrap();
assert_eq!(entries.len(), 1);
assert_eq!(entries[0].event["type"], "status");
}
}