story-kit: merge 93_story_expose_server_logs_to_agents_via_mcp

Adds log_buffer ring buffer and slog! macro for in-memory server log
capture, plus get_server_logs MCP tool for agents to read recent logs.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Dave
2026-02-23 20:53:37 +00:00
11 changed files with 245 additions and 68 deletions

View File

@@ -1,4 +1,5 @@
use crate::agent_log::AgentLogWriter;
use crate::slog;
use crate::config::ProjectConfig;
use crate::worktree::{self, WorktreeInfo};
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
@@ -384,7 +385,7 @@ impl AgentPool {
// Preserve worktree for inspection — don't destroy agent's work on stop.
if let Some(ref wt) = worktree_info {
eprintln!(
slog!(
"[agents] Worktree preserved for {story_id}:{agent_name}: {}",
wt.path.display()
);
@@ -581,7 +582,7 @@ impl AgentPool {
let agents = match self.agents.lock() {
Ok(a) => a,
Err(e) => {
eprintln!("[pipeline] Failed to lock agents for '{story_id}:{agent_name}': {e}");
slog!("[pipeline] Failed to lock agents for '{story_id}:{agent_name}': {e}");
return;
}
};
@@ -599,14 +600,14 @@ impl AgentPool {
let completion = match completion {
Some(c) => c,
None => {
eprintln!("[pipeline] No completion report for '{story_id}:{agent_name}'");
slog!("[pipeline] No completion report for '{story_id}:{agent_name}'");
return;
}
};
let project_root = match project_root {
Some(p) => p,
None => {
eprintln!("[pipeline] No project_root for '{story_id}:{agent_name}'");
slog!("[pipeline] No project_root for '{story_id}:{agent_name}'");
return;
}
};
@@ -619,23 +620,23 @@ impl AgentPool {
}
PipelineStage::Coder => {
if completion.gates_passed {
eprintln!(
slog!(
"[pipeline] Coder '{agent_name}' passed gates for '{story_id}'. Moving to QA."
);
if let Err(e) = move_story_to_qa(&project_root, story_id) {
eprintln!("[pipeline] Failed to move '{story_id}' to 3_qa/: {e}");
slog!("[pipeline] Failed to move '{story_id}' to 3_qa/: {e}");
return;
}
if let Err(e) = self
.start_agent(&project_root, story_id, Some("qa"), None)
.await
{
eprintln!("[pipeline] Failed to start qa agent for '{story_id}': {e}");
slog!("[pipeline] Failed to start qa agent for '{story_id}': {e}");
}
// Coder slot is now free — pick up any other unassigned work in 2_current/.
self.auto_assign_available_work(&project_root).await;
} else {
eprintln!(
slog!(
"[pipeline] Coder '{agent_name}' failed gates for '{story_id}'. Restarting."
);
let context = format!(
@@ -648,7 +649,7 @@ impl AgentPool {
.start_agent(&project_root, story_id, Some(agent_name), Some(&context))
.await
{
eprintln!(
slog!(
"[pipeline] Failed to restart coder '{agent_name}' for '{story_id}': {e}"
);
}
@@ -663,7 +664,7 @@ impl AgentPool {
tokio::task::spawn_blocking(move || run_coverage_gate(&cp))
.await
.unwrap_or_else(|e| {
eprintln!("[pipeline] Coverage gate task panicked: {e}");
slog!("[pipeline] Coverage gate task panicked: {e}");
Ok((false, format!("Coverage gate task panicked: {e}")))
});
let (coverage_passed, coverage_output) = match coverage_result {
@@ -672,23 +673,23 @@ impl AgentPool {
};
if coverage_passed {
eprintln!(
slog!(
"[pipeline] QA passed gates and coverage for '{story_id}'. Moving to merge."
);
if let Err(e) = move_story_to_merge(&project_root, story_id) {
eprintln!("[pipeline] Failed to move '{story_id}' to 4_merge/: {e}");
slog!("[pipeline] Failed to move '{story_id}' to 4_merge/: {e}");
return;
}
if let Err(e) = self
.start_agent(&project_root, story_id, Some("mergemaster"), None)
.await
{
eprintln!("[pipeline] Failed to start mergemaster for '{story_id}': {e}");
slog!("[pipeline] Failed to start mergemaster for '{story_id}': {e}");
}
// QA slot is now free — pick up any other unassigned work in 3_qa/.
self.auto_assign_available_work(&project_root).await;
} else {
eprintln!(
slog!(
"[pipeline] QA coverage gate failed for '{story_id}'. Restarting QA."
);
let context = format!(
@@ -701,11 +702,11 @@ impl AgentPool {
.start_agent(&project_root, story_id, Some("qa"), Some(&context))
.await
{
eprintln!("[pipeline] Failed to restart qa for '{story_id}': {e}");
slog!("[pipeline] Failed to restart qa for '{story_id}': {e}");
}
}
} else {
eprintln!(
slog!(
"[pipeline] QA failed gates for '{story_id}'. Restarting."
);
let context = format!(
@@ -718,20 +719,20 @@ impl AgentPool {
.start_agent(&project_root, story_id, Some("qa"), Some(&context))
.await
{
eprintln!("[pipeline] Failed to restart qa for '{story_id}': {e}");
slog!("[pipeline] Failed to restart qa for '{story_id}': {e}");
}
}
}
PipelineStage::Mergemaster => {
// Run script/test on master (project_root) as the post-merge verification.
eprintln!(
slog!(
"[pipeline] Mergemaster completed for '{story_id}'. Running post-merge tests on master."
);
let root = project_root.clone();
let test_result = tokio::task::spawn_blocking(move || run_project_tests(&root))
.await
.unwrap_or_else(|e| {
eprintln!("[pipeline] Post-merge test task panicked: {e}");
slog!("[pipeline] Post-merge test task panicked: {e}");
Ok((false, format!("Test task panicked: {e}")))
});
let (passed, output) = match test_result {
@@ -740,11 +741,11 @@ impl AgentPool {
};
if passed {
eprintln!(
slog!(
"[pipeline] Post-merge tests passed for '{story_id}'. Archiving."
);
if let Err(e) = move_story_to_archived(&project_root, story_id) {
eprintln!("[pipeline] Failed to archive '{story_id}': {e}");
slog!("[pipeline] Failed to archive '{story_id}': {e}");
}
// Mergemaster slot is now free — pick up any other items in 4_merge/.
self.auto_assign_available_work(&project_root).await;
@@ -756,15 +757,15 @@ impl AgentPool {
// worktree::remove_worktree_by_story_id(&project_root, story_id, &config)
// .await
// {
// eprintln!(
// slog!(
// "[pipeline] Failed to remove worktree for '{story_id}': {e}"
// );
// }
eprintln!(
slog!(
"[pipeline] Story '{story_id}' archived. Worktree preserved for inspection."
);
} else {
eprintln!(
slog!(
"[pipeline] Post-merge tests failed for '{story_id}'. Restarting mergemaster."
);
let context = format!(
@@ -777,7 +778,7 @@ impl AgentPool {
.start_agent(&project_root, story_id, Some("mergemaster"), Some(&context))
.await
{
eprintln!(
slog!(
"[pipeline] Failed to restart mergemaster for '{story_id}': {e}"
);
}
@@ -1083,7 +1084,7 @@ impl AgentPool {
let config = match ProjectConfig::load(project_root) {
Ok(c) => c,
Err(e) => {
eprintln!("[auto-assign] Failed to load project config: {e}");
slog!("[auto-assign] Failed to load project config: {e}");
return;
}
};
@@ -1108,7 +1109,7 @@ impl AgentPool {
let agents = match self.agents.lock() {
Ok(a) => a,
Err(e) => {
eprintln!("[auto-assign] Failed to lock agents: {e}");
slog!("[auto-assign] Failed to lock agents: {e}");
break;
}
};
@@ -1129,21 +1130,21 @@ impl AgentPool {
match free_agent {
Some(agent_name) => {
eprintln!(
slog!(
"[auto-assign] Assigning '{agent_name}' to '{story_id}' in {stage_dir}/"
);
if let Err(e) = self
.start_agent(project_root, story_id, Some(&agent_name), None)
.await
{
eprintln!(
slog!(
"[auto-assign] Failed to start '{agent_name}' for '{story_id}': {e}"
);
}
}
None => {
// No free agents of this type — stop scanning this stage.
eprintln!(
slog!(
"[auto-assign] All {:?} agents busy; remaining items in {stage_dir}/ will wait.",
stage
);
@@ -1437,7 +1438,7 @@ async fn run_server_owned_completion(
};
match lock.get(&key) {
Some(agent) if agent.completion.is_some() => {
eprintln!(
slog!(
"[agents] Completion already recorded for '{story_id}:{agent_name}'; \
skipping server-owned gates."
);
@@ -1478,7 +1479,7 @@ async fn run_server_owned_completion(
)
};
eprintln!(
slog!(
"[agents] Server-owned completion for '{story_id}:{agent_name}': gates_passed={gates_passed}"
);
@@ -1595,7 +1596,7 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
let source_path = source_dir.join(format!("{story_id}.md"));
if !source_path.exists() {
eprintln!(
slog!(
"[lifecycle] Work item '{story_id}' not found in {}; skipping move to 2_current/",
source_dir.display()
);
@@ -1608,7 +1609,7 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
std::fs::rename(&source_path, &current_path)
.map_err(|e| format!("Failed to move '{story_id}' to 2_current/: {e}"))?;
eprintln!(
slog!(
"[lifecycle] Moved '{story_id}' from {} to work/2_current/",
source_dir.display()
);
@@ -1655,7 +1656,7 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
} else {
"work/4_merge/"
};
eprintln!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_archived/");
slog!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_archived/");
Ok(())
}
@@ -1697,7 +1698,7 @@ pub fn move_story_to_merge(project_root: &Path, story_id: &str) -> Result<(), St
} else {
"work/3_qa/"
};
eprintln!("[lifecycle] Moved '{story_id}' from {from_dir} to work/4_merge/");
slog!("[lifecycle] Moved '{story_id}' from {from_dir} to work/4_merge/");
Ok(())
}
@@ -1728,7 +1729,7 @@ pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), Strin
std::fs::rename(&current_path, &qa_path)
.map_err(|e| format!("Failed to move '{story_id}' to 3_qa/: {e}"))?;
eprintln!("[lifecycle] Moved '{story_id}' from work/2_current/ to work/3_qa/");
slog!("[lifecycle] Moved '{story_id}' from work/2_current/ to work/3_qa/");
Ok(())
}
@@ -1765,7 +1766,7 @@ pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), Str
std::fs::rename(&source_path, &archive_path)
.map_err(|e| format!("Failed to move bug '{bug_id}' to 5_archived/: {e}"))?;
eprintln!(
slog!(
"[lifecycle] Closed bug '{bug_id}' → work/5_archived/"
);
@@ -2221,7 +2222,7 @@ fn run_agent_pty_blocking(
cmd.env_remove("CLAUDECODE");
cmd.env_remove("CLAUDE_CODE_ENTRYPOINT");
eprintln!("[agent:{story_id}:{agent_name}] Spawning {command} in {cwd} with args: {args:?}");
slog!("[agent:{story_id}:{agent_name}] Spawning {command} in {cwd} with args: {args:?}");
let mut child = pair
.slave
@@ -2318,7 +2319,7 @@ fn run_agent_pty_blocking(
let _ = child.kill();
let _ = child.wait();
eprintln!(
slog!(
"[agent:{story_id}:{agent_name}] Done. Session: {:?}",
session_id
);