story-kit: accept 96_story_reset_agent_lozenge_to_idle_state_when_returning_to_roster

This commit is contained in:
Dave
2026-02-23 20:52:06 +00:00
parent 7f18542c09
commit bed46fea1b
13 changed files with 627 additions and 48 deletions

View File

@@ -1 +1 @@
64.60
65.14

View File

@@ -51,7 +51,7 @@ system_prompt = "You are a supervisor agent. Read CLAUDE.md and .story_kit/READM
[[agent]]
name = "coder-1"
role = "Full-stack engineer. Implements features across all components."
model = "claude-sonnet-4-5-20241022"
model = "sonnet"
max_turns = 50
max_budget_usd = 5.00
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates (cargo clippy + tests) when your process exits and advance the pipeline based on the results."
@@ -60,7 +60,7 @@ system_prompt = "You are a full-stack engineer working autonomously in a git wor
[[agent]]
name = "coder-2"
role = "Full-stack engineer. Implements features across all components."
model = "claude-sonnet-4-5-20241022"
model = "sonnet"
max_turns = 50
max_budget_usd = 5.00
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates (cargo clippy + tests) when your process exits and advance the pipeline based on the results."
@@ -69,7 +69,7 @@ system_prompt = "You are a full-stack engineer working autonomously in a git wor
[[agent]]
name = "coder-3"
role = "Full-stack engineer. Implements features across all components."
model = "claude-sonnet-4-5-20241022"
model = "sonnet"
max_turns = 50
max_budget_usd = 5.00
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates (cargo clippy + tests) when your process exits and advance the pipeline based on the results."
@@ -87,7 +87,7 @@ system_prompt = "You are a senior full-stack engineer working autonomously in a
[[agent]]
name = "qa"
role = "Reviews coder work in worktrees: runs quality gates, generates testing plans, and reports findings."
model = "claude-sonnet-4-5-20241022"
model = "sonnet"
max_turns = 40
max_budget_usd = 4.00
prompt = """You are the QA agent for story {{story_id}}. Your job is to review the coder's work in the worktree and produce a structured QA report.
@@ -153,7 +153,7 @@ system_prompt = "You are a QA agent. Your job is read-only: review code quality,
[[agent]]
name = "mergemaster"
role = "Merges completed coder work into master, runs quality gates, archives stories, and cleans up worktrees."
model = "claude-sonnet-4-5-20241022"
model = "sonnet"
max_turns = 30
max_budget_usd = 3.00
prompt = """You are the mergemaster agent for story {{story_id}}. Your job is to merge the completed coder work into master using the merge_agent_work MCP tool.

View File

@@ -1,21 +0,0 @@
---
name: "Persistent per-session agent logs"
---
# Story 89: Persistent per-session agent logs
## User Story
As a user, I want each agent session to write its output to a persistent log file so I can inspect what an agent did after it completes, even across server restarts.
## Acceptance Criteria
- [ ] Each agent session writes output to a log file at .story_kit/logs/{story_id}/{agent_name}-{session_id}.log
- [ ] Log files persist across server restarts and agent completions
- [ ] The get_agent_output MCP tool falls back to reading the log file when the in-memory stream is empty or the agent has completed
- [ ] Log files include timestamps, tool calls, text output, and status events
- [ ] Different sessions for the same agent on the same story produce separate log files (no mixing)
## Out of Scope
- TBD

View File

@@ -0,0 +1,42 @@
---
name: "Persistent per-session agent logs"
---
# Story 89: Persistent per-session agent logs
## User Story
As a user, I want each agent session to write its output to a persistent log file so I can inspect what an agent did after it completes, even across server restarts.
## Acceptance Criteria
- [ ] Each agent session writes output to a log file at .story_kit/logs/{story_id}/{agent_name}-{session_id}.log
- [ ] Log files persist across server restarts and agent completions
- [ ] The get_agent_output MCP tool falls back to reading the log file when the in-memory stream is empty or the agent has completed
- [ ] Log files include timestamps, tool calls, text output, and status events
- [ ] Different sessions for the same agent on the same story produce separate log files (no mixing)
## Test Plan
### Unit Tests (server/src/agent_log.rs)
1. **test_log_writer_creates_directory_and_file** — AC1: Verify `AgentLogWriter::new()` creates `.story_kit/logs/{story_id}/` and the log file `{agent_name}-{session_id}.log`.
2. **test_log_writer_writes_jsonl_with_timestamps** — AC4: Verify `write_event()` writes valid JSONL with ISO 8601 timestamps including type, data, and timestamp fields.
3. **test_read_log_parses_written_events** — AC3/AC4: Verify `read_log()` can round-trip events written by `write_event()`.
4. **test_separate_sessions_produce_separate_files** — AC5: Verify two `AgentLogWriter` instances for the same story_id+agent_name but different session_ids write to different files without mixing.
5. **test_find_latest_log_returns_most_recent** — AC3: Verify `find_latest_log()` returns the correct most-recent log for a given story_id+agent_name pair.
6. **test_log_files_persist_on_disk** — AC2: Verify that after writer is dropped, the file still exists and is readable.
### Unit Tests (server/src/agents.rs)
7. **test_emit_event_writes_to_log_writer** — AC1/AC4: Verify that `emit_event` with a log writer writes to the log file in addition to broadcast+event_log.
### Integration Tests (server/src/http/mcp.rs)
8. **test_get_agent_output_falls_back_to_log_file** — AC3: Verify that when in-memory events are empty and agent is completed, `get_agent_output` reads from the log file.
## Out of Scope
- Log rotation or cleanup of old log files
- Frontend UI for viewing log files
- Log file compression

View File

@@ -7,6 +7,7 @@ export interface AgentInfo {
session_id: string | null;
worktree_path: string | null;
base_branch: string | null;
log_session_id: string | null;
}
export interface AgentEvent {

View File

@@ -53,6 +53,7 @@ describe("AgentPanel active work list removed", () => {
session_id: null,
worktree_path: "/tmp/wt",
base_branch: "master",
log_session_id: null,
},
];
mockedAgents.listAgents.mockResolvedValue(agentList);
@@ -106,6 +107,7 @@ describe("RosterBadge availability state", () => {
session_id: null,
worktree_path: null,
base_branch: null,
log_session_id: null,
},
];
mockedAgents.listAgents.mockResolvedValue(agentList);
@@ -127,6 +129,7 @@ describe("RosterBadge availability state", () => {
session_id: null,
worktree_path: null,
base_branch: null,
log_session_id: null,
},
];
mockedAgents.listAgents.mockResolvedValue(agentList);

377
server/src/agent_log.rs Normal file
View File

@@ -0,0 +1,377 @@
use crate::agents::AgentEvent;
use chrono::Utc;
use serde::{Deserialize, Serialize};
use std::fs::{self, File, OpenOptions};
use std::io::{BufRead, BufReader, Write};
use std::path::{Path, PathBuf};
/// A single line in the agent log file (JSONL format).
#[derive(Debug, Serialize, Deserialize)]
pub struct LogEntry {
pub timestamp: String,
#[serde(flatten)]
pub event: serde_json::Value,
}
/// Writes agent events to a persistent log file (JSONL format).
///
/// Each agent session gets its own log file at:
/// `.story_kit/logs/{story_id}/{agent_name}-{session_id}.log`
pub struct AgentLogWriter {
file: File,
}
impl AgentLogWriter {
/// Create a new log writer, creating the directory structure as needed.
///
/// The log file is opened in append mode so that a restart mid-session
/// won't overwrite earlier output.
pub fn new(
project_root: &Path,
story_id: &str,
agent_name: &str,
session_id: &str,
) -> Result<Self, String> {
let dir = log_dir(project_root, story_id);
fs::create_dir_all(&dir)
.map_err(|e| format!("Failed to create log directory {}: {e}", dir.display()))?;
let path = dir.join(format!("{agent_name}-{session_id}.log"));
let file = OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.map_err(|e| format!("Failed to open log file {}: {e}", path.display()))?;
Ok(Self { file })
}
/// Write an agent event as a JSONL line with an ISO 8601 timestamp.
pub fn write_event(&mut self, event: &AgentEvent) -> Result<(), String> {
let event_value =
serde_json::to_value(event).map_err(|e| format!("Failed to serialize event: {e}"))?;
let entry = LogEntry {
timestamp: Utc::now().to_rfc3339(),
event: event_value,
};
let mut line =
serde_json::to_string(&entry).map_err(|e| format!("Failed to serialize entry: {e}"))?;
line.push('\n');
self.file
.write_all(line.as_bytes())
.map_err(|e| format!("Failed to write log entry: {e}"))?;
Ok(())
}
}
/// Return the log directory for a story.
fn log_dir(project_root: &Path, story_id: &str) -> PathBuf {
project_root
.join(".story_kit")
.join("logs")
.join(story_id)
}
/// Return the path to a specific log file.
pub fn log_file_path(
project_root: &Path,
story_id: &str,
agent_name: &str,
session_id: &str,
) -> PathBuf {
log_dir(project_root, story_id).join(format!("{agent_name}-{session_id}.log"))
}
/// Read all log entries from a log file.
pub fn read_log(path: &Path) -> Result<Vec<LogEntry>, String> {
let file =
File::open(path).map_err(|e| format!("Failed to open log file {}: {e}", path.display()))?;
let reader = BufReader::new(file);
let mut entries = Vec::new();
for line in reader.lines() {
let line = line.map_err(|e| format!("Failed to read log line: {e}"))?;
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
let entry: LogEntry = serde_json::from_str(trimmed)
.map_err(|e| format!("Failed to parse log entry: {e}"))?;
entries.push(entry);
}
Ok(entries)
}
/// Find the most recent log file for a given story+agent combination.
///
/// Scans `.story_kit/logs/{story_id}/` for files matching `{agent_name}-*.log`
/// and returns the one with the most recent modification time.
pub fn find_latest_log(
project_root: &Path,
story_id: &str,
agent_name: &str,
) -> Option<PathBuf> {
let dir = log_dir(project_root, story_id);
if !dir.is_dir() {
return None;
}
let prefix = format!("{agent_name}-");
let mut best: Option<(PathBuf, std::time::SystemTime)> = None;
let entries = fs::read_dir(&dir).ok()?;
for entry in entries.flatten() {
let path = entry.path();
let name = match path.file_name().and_then(|n| n.to_str()) {
Some(n) => n.to_string(),
None => continue,
};
if !name.starts_with(&prefix) || !name.ends_with(".log") {
continue;
}
let modified = match entry.metadata().and_then(|m| m.modified()) {
Ok(t) => t,
Err(_) => continue,
};
if best.as_ref().is_none_or(|(_, t)| modified > *t) {
best = Some((path, modified));
}
}
best.map(|(p, _)| p)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::agents::AgentEvent;
use tempfile::tempdir;
#[test]
fn test_log_writer_creates_directory_and_file() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let _writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-abc123").unwrap();
let expected_path = root
.join(".story_kit")
.join("logs")
.join("42_story_foo")
.join("coder-1-sess-abc123.log");
assert!(expected_path.exists(), "Log file should exist");
}
#[test]
fn test_log_writer_writes_jsonl_with_timestamps() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-001").unwrap();
let event = AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
};
writer.write_event(&event).unwrap();
let event2 = AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "Hello world".to_string(),
};
writer.write_event(&event2).unwrap();
// Read the file and verify
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-001");
let content = fs::read_to_string(&path).unwrap();
let lines: Vec<&str> = content.lines().collect();
assert_eq!(lines.len(), 2, "Should have 2 log lines");
// Parse each line as valid JSON with a timestamp
for line in &lines {
let entry: LogEntry = serde_json::from_str(line).unwrap();
assert!(!entry.timestamp.is_empty(), "Timestamp should be present");
// Verify it's a valid ISO 8601 timestamp
chrono::DateTime::parse_from_rfc3339(&entry.timestamp)
.expect("Timestamp should be valid RFC3339");
}
// Verify the first entry is a status event
let entry1: LogEntry = serde_json::from_str(lines[0]).unwrap();
assert_eq!(entry1.event["type"], "status");
assert_eq!(entry1.event["status"], "running");
// Verify the second entry is an output event
let entry2: LogEntry = serde_json::from_str(lines[1]).unwrap();
assert_eq!(entry2.event["type"], "output");
assert_eq!(entry2.event["text"], "Hello world");
}
#[test]
fn test_read_log_parses_written_events() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-002").unwrap();
let events = vec![
AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
},
AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "Processing...".to_string(),
},
AgentEvent::AgentJson {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
data: serde_json::json!({"type": "tool_use", "name": "read_file"}),
},
AgentEvent::Done {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
session_id: Some("sess-002".to_string()),
},
];
for event in &events {
writer.write_event(event).unwrap();
}
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-002");
let entries = read_log(&path).unwrap();
assert_eq!(entries.len(), 4, "Should read back all 4 events");
// Verify event types round-trip correctly
assert_eq!(entries[0].event["type"], "status");
assert_eq!(entries[1].event["type"], "output");
assert_eq!(entries[2].event["type"], "agent_json");
assert_eq!(entries[3].event["type"], "done");
}
#[test]
fn test_separate_sessions_produce_separate_files() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let mut writer1 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-aaa").unwrap();
let mut writer2 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-bbb").unwrap();
writer1
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from session aaa".to_string(),
})
.unwrap();
writer2
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "from session bbb".to_string(),
})
.unwrap();
let path1 = log_file_path(root, "42_story_foo", "coder-1", "sess-aaa");
let path2 = log_file_path(root, "42_story_foo", "coder-1", "sess-bbb");
assert_ne!(path1, path2, "Different sessions should use different files");
let entries1 = read_log(&path1).unwrap();
let entries2 = read_log(&path2).unwrap();
assert_eq!(entries1.len(), 1);
assert_eq!(entries2.len(), 1);
assert_eq!(entries1[0].event["text"], "from session aaa");
assert_eq!(entries2[0].event["text"], "from session bbb");
}
#[test]
fn test_find_latest_log_returns_most_recent() {
let tmp = tempdir().unwrap();
let root = tmp.path();
// Create two log files with a small delay
let mut writer1 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-old").unwrap();
writer1
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "old".to_string(),
})
.unwrap();
drop(writer1);
// Touch the second file to ensure it's newer
std::thread::sleep(std::time::Duration::from_millis(50));
let mut writer2 =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-new").unwrap();
writer2
.write_event(&AgentEvent::Output {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
text: "new".to_string(),
})
.unwrap();
drop(writer2);
let latest = find_latest_log(root, "42_story_foo", "coder-1").unwrap();
assert!(
latest.to_string_lossy().contains("sess-new"),
"Should find the newest log file, got: {}",
latest.display()
);
}
#[test]
fn test_find_latest_log_returns_none_for_missing_dir() {
let tmp = tempdir().unwrap();
let result = find_latest_log(tmp.path(), "nonexistent", "coder-1");
assert!(result.is_none());
}
#[test]
fn test_log_files_persist_on_disk() {
let tmp = tempdir().unwrap();
let root = tmp.path();
let path = {
let mut writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-persist").unwrap();
writer
.write_event(&AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
})
.unwrap();
log_file_path(root, "42_story_foo", "coder-1", "sess-persist")
// writer is dropped here
};
// File should still exist and be readable
assert!(path.exists(), "Log file should persist after writer is dropped");
let entries = read_log(&path).unwrap();
assert_eq!(entries.len(), 1);
assert_eq!(entries[0].event["type"], "status");
}
}

View File

@@ -1,3 +1,4 @@
use crate::agent_log::AgentLogWriter;
use crate::config::ProjectConfig;
use crate::worktree::{self, WorktreeInfo};
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
@@ -113,6 +114,8 @@ pub struct AgentInfo {
pub worktree_path: Option<String>,
pub base_branch: Option<String>,
pub completion: Option<CompletionReport>,
/// UUID identifying the persistent log file for this session.
pub log_session_id: Option<String>,
}
struct StoryAgent {
@@ -128,6 +131,8 @@ struct StoryAgent {
completion: Option<CompletionReport>,
/// Project root, stored for pipeline advancement after completion.
project_root: Option<PathBuf>,
/// UUID identifying the log file for this session.
log_session_id: Option<String>,
}
/// Build an `AgentInfo` snapshot from a `StoryAgent` map entry.
@@ -146,6 +151,7 @@ fn agent_info_from_entry(story_id: &str, agent: &StoryAgent) -> AgentInfo {
.as_ref()
.map(|wt| wt.base_branch.clone()),
completion: agent.completion.clone(),
log_session_id: agent.log_session_id.clone(),
}
}
@@ -210,6 +216,23 @@ impl AgentPool {
let event_log: Arc<Mutex<Vec<AgentEvent>>> = Arc::new(Mutex::new(Vec::new()));
// Generate a unique session ID for the persistent log file.
let log_session_id = uuid::Uuid::new_v4().to_string();
// Create persistent log writer.
let log_writer = match AgentLogWriter::new(
project_root,
story_id,
&resolved_name,
&log_session_id,
) {
Ok(w) => Some(Arc::new(Mutex::new(w))),
Err(e) => {
eprintln!("[agents] Failed to create log writer for {story_id}:{resolved_name}: {e}");
None
}
};
// Register as pending
{
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
@@ -225,6 +248,7 @@ impl AgentPool {
event_log: event_log.clone(),
completion: None,
project_root: Some(project_root.to_path_buf()),
log_session_id: Some(log_session_id.clone()),
},
);
}
@@ -267,6 +291,7 @@ impl AgentPool {
let key_clone = key.clone();
let log_clone = event_log.clone();
let port_for_task = self.port;
let log_writer_clone = log_writer.clone();
let handle = tokio::spawn(async move {
let _ = tx_clone.send(AgentEvent::Status {
@@ -277,6 +302,7 @@ impl AgentPool {
match run_agent_pty_streaming(
&sid, &aname, &command, &args, &prompt, &cwd, &tx_clone, &log_clone,
log_writer_clone,
)
.await
{
@@ -324,6 +350,7 @@ impl AgentPool {
worktree_path: Some(wt_path_str),
base_branch: Some(wt_info.base_branch.clone()),
completion: None,
log_session_id: Some(log_session_id),
})
}
@@ -487,6 +514,7 @@ impl AgentPool {
worktree_path: None,
base_branch: None,
completion: None,
log_session_id: None,
}
});
}
@@ -962,6 +990,22 @@ impl AgentPool {
state.get_project_root()
}
/// Get the log session ID and project root for an agent, if available.
///
/// Used by MCP tools to find the persistent log file for a completed agent.
pub fn get_log_info(
&self,
story_id: &str,
agent_name: &str,
) -> Option<(String, PathBuf)> {
let key = composite_key(story_id, agent_name);
let agents = self.agents.lock().ok()?;
let agent = agents.get(&key)?;
let session_id = agent.log_session_id.clone()?;
let project_root = agent.project_root.clone()?;
Some((session_id, project_root))
}
/// Test helper: inject a pre-built agent entry so unit tests can exercise
/// wait/subscribe logic without spawning a real process.
#[cfg(test)]
@@ -986,6 +1030,7 @@ impl AgentPool {
event_log: Arc::new(Mutex::new(Vec::new())),
completion: None,
project_root: None,
log_session_id: None,
},
);
tx
@@ -1020,6 +1065,7 @@ impl AgentPool {
event_log: Arc::new(Mutex::new(Vec::new())),
completion: None,
project_root: None,
log_session_id: None,
},
);
tx
@@ -1279,6 +1325,7 @@ impl AgentPool {
event_log: Arc::new(Mutex::new(Vec::new())),
completion: Some(completion),
project_root: Some(project_root),
log_session_id: None,
},
);
tx
@@ -2078,6 +2125,7 @@ async fn run_agent_pty_streaming(
cwd: &str,
tx: &broadcast::Sender<AgentEvent>,
event_log: &Arc<Mutex<Vec<AgentEvent>>>,
log_writer: Option<Arc<Mutex<AgentLogWriter>>>,
) -> Result<Option<String>, String> {
let sid = story_id.to_string();
let aname = agent_name.to_string();
@@ -2089,21 +2137,38 @@ async fn run_agent_pty_streaming(
let event_log = event_log.clone();
tokio::task::spawn_blocking(move || {
run_agent_pty_blocking(&sid, &aname, &cmd, &args, &prompt, &cwd, &tx, &event_log)
run_agent_pty_blocking(
&sid,
&aname,
&cmd,
&args,
&prompt,
&cwd,
&tx,
&event_log,
log_writer.as_deref(),
)
})
.await
.map_err(|e| format!("Agent task panicked: {e}"))?
}
/// Helper to send an event to both broadcast and event log.
/// Helper to send an event to broadcast, event log, and optional persistent log file.
fn emit_event(
event: AgentEvent,
tx: &broadcast::Sender<AgentEvent>,
event_log: &Mutex<Vec<AgentEvent>>,
log_writer: Option<&Mutex<AgentLogWriter>>,
) {
if let Ok(mut log) = event_log.lock() {
log.push(event.clone());
}
if let Some(writer) = log_writer
&& let Ok(mut w) = writer.lock()
&& let Err(e) = w.write_event(&event)
{
eprintln!("[agent_log] Failed to write event to log file: {e}");
}
let _ = tx.send(event);
}
@@ -2117,6 +2182,7 @@ fn run_agent_pty_blocking(
cwd: &str,
tx: &broadcast::Sender<AgentEvent>,
event_log: &Mutex<Vec<AgentEvent>>,
log_writer: Option<&Mutex<AgentLogWriter>>,
) -> Result<Option<String>, String> {
let pty_system = native_pty_system();
@@ -2198,6 +2264,7 @@ fn run_agent_pty_blocking(
},
tx,
event_log,
log_writer,
);
continue;
}
@@ -2226,6 +2293,7 @@ fn run_agent_pty_blocking(
},
tx,
event_log,
log_writer,
);
}
}
@@ -2243,6 +2311,7 @@ fn run_agent_pty_blocking(
},
tx,
event_log,
log_writer,
);
}
@@ -3514,4 +3583,38 @@ name = "qa"
"story should be in 2_current/ or 3_qa/ after reconciliation"
);
}
#[test]
fn test_emit_event_writes_to_log_writer() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path();
let log_writer =
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-emit").unwrap();
let log_mutex = Mutex::new(log_writer);
let (tx, _rx) = broadcast::channel::<AgentEvent>(64);
let event_log: Mutex<Vec<AgentEvent>> = Mutex::new(Vec::new());
let event = AgentEvent::Status {
story_id: "42_story_foo".to_string(),
agent_name: "coder-1".to_string(),
status: "running".to_string(),
};
emit_event(event, &tx, &event_log, Some(&log_mutex));
// Verify event was added to in-memory log
let mem_events = event_log.lock().unwrap();
assert_eq!(mem_events.len(), 1);
drop(mem_events);
// Verify event was written to the log file
let log_path =
crate::agent_log::log_file_path(root, "42_story_foo", "coder-1", "sess-emit");
let entries = crate::agent_log::read_log(&log_path).unwrap();
assert_eq!(entries.len(), 1);
assert_eq!(entries[0].event["type"], "status");
assert_eq!(entries[0].event["status"], "running");
}
}

View File

@@ -1016,13 +1016,14 @@ async fn tool_get_agent_output_poll(args: &Value, ctx: &AppContext) -> Result<St
.and_then(|v| v.as_str())
.ok_or("Missing required argument: agent_name")?;
// Drain all accumulated events since the last poll.
let drained = ctx.agents.drain_events(story_id, agent_name)?;
// Try draining in-memory events first.
match ctx.agents.drain_events(story_id, agent_name) {
Ok(drained) => {
let done = drained.iter().any(|e| {
matches!(
e,
crate::agents::AgentEvent::Done { .. } | crate::agents::AgentEvent::Error { .. }
crate::agents::AgentEvent::Done { .. }
| crate::agents::AgentEvent::Error { .. }
)
});
@@ -1039,6 +1040,78 @@ async fn tool_get_agent_output_poll(args: &Value, ctx: &AppContext) -> Result<St
}))
.map_err(|e| format!("Serialization error: {e}"))
}
Err(_) => {
// Agent not in memory — fall back to persistent log file.
get_agent_output_from_log(story_id, agent_name, ctx)
}
}
}
/// Fall back to reading agent output from the persistent log file on disk.
///
/// Tries to find the log file via the agent's stored log_session_id first,
/// then falls back to `find_latest_log` scanning the log directory.
fn get_agent_output_from_log(
story_id: &str,
agent_name: &str,
ctx: &AppContext,
) -> Result<String, String> {
use crate::agent_log;
let project_root = ctx.agents.get_project_root(&ctx.state)?;
// Try to find the log file: first from in-memory agent info, then by scanning.
let log_path = ctx
.agents
.get_log_info(story_id, agent_name)
.map(|(session_id, root)| agent_log::log_file_path(&root, story_id, agent_name, &session_id))
.filter(|p| p.exists())
.or_else(|| agent_log::find_latest_log(&project_root, story_id, agent_name));
let log_path = match log_path {
Some(p) => p,
None => {
return serde_json::to_string_pretty(&json!({
"events": [],
"done": true,
"event_count": 0,
"message": format!("No agent '{agent_name}' for story '{story_id}' and no log file found."),
"source": "none",
}))
.map_err(|e| format!("Serialization error: {e}"));
}
};
match agent_log::read_log(&log_path) {
Ok(entries) => {
let events: Vec<serde_json::Value> = entries
.into_iter()
.map(|e| {
let mut val = e.event;
if let serde_json::Value::Object(ref mut map) = val {
map.insert(
"timestamp".to_string(),
serde_json::Value::String(e.timestamp),
);
}
val
})
.collect();
let count = events.len();
serde_json::to_string_pretty(&json!({
"events": events,
"done": true,
"event_count": count,
"message": "Events loaded from persistent log file.",
"source": "log_file",
"log_file": log_path.to_string_lossy(),
}))
.map_err(|e| format!("Serialization error: {e}"))
}
Err(e) => Err(format!("Failed to read log file: {e}")),
}
}
fn tool_get_agent_config(ctx: &AppContext) -> Result<String, String> {
let project_root = ctx.agents.get_project_root(&ctx.state)?;

View File

@@ -1,3 +1,4 @@
mod agent_log;
mod agents;
mod config;
mod http;