Accept story 30: Worktree-based agent orchestration

Add git worktree isolation for concurrent story agents. Each agent now
runs in its own worktree with setup/teardown commands driven by
.story_kit/project.toml config. Agents stream output via SSE and support
start/stop lifecycle with Pending/Running/Completed/Failed statuses.

Backend: config.rs (TOML parsing), worktree.rs (git worktree lifecycle),
refactored agents.rs (broadcast streaming), agents_sse.rs (SSE endpoint).
Frontend: AgentPanel.tsx with Run/Stop buttons and streaming output log.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Dave
2026-02-19 17:58:53 +00:00
parent 7e56648954
commit 5e5cdd9b2f
15 changed files with 1440 additions and 281 deletions

View File

@@ -22,6 +22,9 @@ rust-embed = { workspace = true }
mime_guess = { workspace = true }
homedir = { workspace = true }
serde_yaml = "0.9"
toml = "0.8"
async-stream = "0.3"
bytes = "1"
portable-pty = { workspace = true }
strip-ansi-escapes = { workspace = true }

View File

@@ -1,167 +1,305 @@
use crate::config::ProjectConfig;
use crate::worktree::{self, WorktreeInfo};
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
use serde::{Deserialize, Serialize};
use serde::Serialize;
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::sync::Mutex;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use tokio::sync::broadcast;
/// Manages multiple concurrent Claude Code agent sessions.
///
/// Each agent is identified by a string name (e.g., "coder-1", "coder-2").
/// Agents run `claude -p` in a PTY for Max subscription billing.
/// Sessions can be resumed for multi-turn conversations.
pub struct AgentPool {
agents: Mutex<HashMap<String, AgentState>>,
/// Events streamed from a running agent to SSE clients.
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum AgentEvent {
/// Agent status changed.
Status { story_id: String, status: String },
/// Raw text output from the agent process.
Output { story_id: String, text: String },
/// Agent produced a JSON event from `--output-format stream-json`.
AgentJson { story_id: String, data: serde_json::Value },
/// Agent finished.
Done {
story_id: String,
session_id: Option<String>,
},
/// Agent errored.
Error { story_id: String, message: String },
}
#[derive(Clone, Serialize)]
pub struct AgentInfo {
pub name: String,
pub role: String,
pub cwd: String,
pub session_id: Option<String>,
pub status: AgentStatus,
pub message_count: usize,
}
#[derive(Clone, Serialize)]
#[derive(Debug, Clone, Serialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum AgentStatus {
Idle,
Pending,
Running,
Completed,
Failed,
}
struct AgentState {
role: String,
cwd: String,
session_id: Option<String>,
message_count: usize,
impl std::fmt::Display for AgentStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Pending => write!(f, "pending"),
Self::Running => write!(f, "running"),
Self::Completed => write!(f, "completed"),
Self::Failed => write!(f, "failed"),
}
}
}
#[derive(Deserialize)]
pub struct CreateAgentRequest {
pub name: String,
pub role: String,
pub cwd: String,
}
#[derive(Deserialize)]
pub struct SendMessageRequest {
pub message: String,
}
#[derive(Serialize)]
pub struct AgentResponse {
pub agent: String,
pub text: String,
#[derive(Serialize, Clone)]
pub struct AgentInfo {
pub story_id: String,
pub status: AgentStatus,
pub session_id: Option<String>,
pub model: Option<String>,
pub api_key_source: Option<String>,
pub rate_limit_type: Option<String>,
pub cost_usd: Option<f64>,
pub input_tokens: Option<u64>,
pub output_tokens: Option<u64>,
pub duration_ms: Option<u64>,
pub worktree_path: Option<String>,
}
struct StoryAgent {
status: AgentStatus,
worktree_info: Option<WorktreeInfo>,
config: ProjectConfig,
session_id: Option<String>,
tx: broadcast::Sender<AgentEvent>,
task_handle: Option<tokio::task::JoinHandle<()>>,
}
/// Manages concurrent story agents, each in its own worktree.
pub struct AgentPool {
agents: Arc<Mutex<HashMap<String, StoryAgent>>>,
}
impl AgentPool {
pub fn new() -> Self {
Self {
agents: Mutex::new(HashMap::new()),
agents: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn create_agent(&self, req: CreateAgentRequest) -> Result<AgentInfo, String> {
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
if agents.contains_key(&req.name) {
return Err(format!("Agent '{}' already exists", req.name));
/// Start an agent for a story: load config, create worktree, spawn agent.
pub async fn start_agent(
&self,
project_root: &Path,
story_id: &str,
) -> Result<AgentInfo, String> {
// Check not already running
{
let agents = self.agents.lock().map_err(|e| e.to_string())?;
if let Some(agent) = agents.get(story_id)
&& (agent.status == AgentStatus::Running || agent.status == AgentStatus::Pending) {
return Err(format!(
"Agent for story '{story_id}' is already {}",
agent.status
));
}
}
let state = AgentState {
role: req.role.clone(),
cwd: req.cwd.clone(),
session_id: None,
message_count: 0,
};
let config = ProjectConfig::load(project_root)?;
let (tx, _) = broadcast::channel::<AgentEvent>(256);
let info = AgentInfo {
name: req.name.clone(),
role: req.role,
cwd: req.cwd,
session_id: None,
status: AgentStatus::Idle,
message_count: 0,
};
// Register as pending
{
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
agents.insert(
story_id.to_string(),
StoryAgent {
status: AgentStatus::Pending,
worktree_info: None,
config: config.clone(),
session_id: None,
tx: tx.clone(),
task_handle: None,
},
);
}
agents.insert(req.name, state);
Ok(info)
let _ = tx.send(AgentEvent::Status {
story_id: story_id.to_string(),
status: "pending".to_string(),
});
// Create worktree
let wt_info = worktree::create_worktree(project_root, story_id, &config).await?;
// Update with worktree info
{
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
if let Some(agent) = agents.get_mut(story_id) {
agent.worktree_info = Some(wt_info.clone());
}
}
// Spawn the agent process
let wt_path_str = wt_info.path.to_string_lossy().to_string();
let rendered = config.render_agent_args(&wt_path_str, story_id);
let (command, args, prompt) = rendered.ok_or_else(|| {
"No [agent] section in config — cannot spawn agent".to_string()
})?;
let sid = story_id.to_string();
let tx_clone = tx.clone();
let agents_ref = self.agents.clone();
let cwd = wt_path_str.clone();
let handle = tokio::spawn(async move {
let _ = tx_clone.send(AgentEvent::Status {
story_id: sid.clone(),
status: "running".to_string(),
});
match run_agent_pty_streaming(&sid, &command, &args, &prompt, &cwd, &tx_clone).await {
Ok(session_id) => {
// Mark completed in the pool
if let Ok(mut agents) = agents_ref.lock()
&& let Some(agent) = agents.get_mut(&sid) {
agent.status = AgentStatus::Completed;
agent.session_id = session_id.clone();
}
let _ = tx_clone.send(AgentEvent::Done {
story_id: sid.clone(),
session_id,
});
}
Err(e) => {
// Mark failed in the pool
if let Ok(mut agents) = agents_ref.lock()
&& let Some(agent) = agents.get_mut(&sid) {
agent.status = AgentStatus::Failed;
}
let _ = tx_clone.send(AgentEvent::Error {
story_id: sid.clone(),
message: e,
});
}
}
});
// Update status to running with task handle
{
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
if let Some(agent) = agents.get_mut(story_id) {
agent.status = AgentStatus::Running;
agent.task_handle = Some(handle);
}
}
Ok(AgentInfo {
story_id: story_id.to_string(),
status: AgentStatus::Running,
session_id: None,
worktree_path: Some(wt_path_str),
})
}
/// Stop a running agent and clean up its worktree.
pub async fn stop_agent(&self, project_root: &Path, story_id: &str) -> Result<(), String> {
let (worktree_info, config, task_handle, tx) = {
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
let agent = agents
.get_mut(story_id)
.ok_or_else(|| format!("No agent for story '{story_id}'"))?;
let wt = agent.worktree_info.clone();
let cfg = agent.config.clone();
let handle = agent.task_handle.take();
let tx = agent.tx.clone();
agent.status = AgentStatus::Failed;
(wt, cfg, handle, tx)
};
// Abort the task
if let Some(handle) = task_handle {
handle.abort();
let _ = handle.await;
}
// Remove worktree
if let Some(ref wt) = worktree_info
&& let Err(e) = worktree::remove_worktree(project_root, wt, &config).await {
eprintln!("[agents] Worktree cleanup warning for {story_id}: {e}");
}
let _ = tx.send(AgentEvent::Status {
story_id: story_id.to_string(),
status: "stopped".to_string(),
});
// Remove from map
{
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
agents.remove(story_id);
}
Ok(())
}
/// List all agents with their status.
pub fn list_agents(&self) -> Result<Vec<AgentInfo>, String> {
let agents = self.agents.lock().map_err(|e| e.to_string())?;
Ok(agents
.iter()
.map(|(name, state)| AgentInfo {
name: name.clone(),
role: state.role.clone(),
cwd: state.cwd.clone(),
session_id: state.session_id.clone(),
status: AgentStatus::Idle,
message_count: state.message_count,
.map(|(story_id, agent)| AgentInfo {
story_id: story_id.clone(),
status: agent.status.clone(),
session_id: agent.session_id.clone(),
worktree_path: agent
.worktree_info
.as_ref()
.map(|wt| wt.path.to_string_lossy().to_string()),
})
.collect())
}
/// Send a message to an agent and wait for the complete response.
/// This spawns a `claude -p` process in a PTY, optionally resuming
/// a previous session for multi-turn conversations.
pub async fn send_message(
/// Subscribe to events for a story agent.
pub fn subscribe(&self, story_id: &str) -> Result<broadcast::Receiver<AgentEvent>, String> {
let agents = self.agents.lock().map_err(|e| e.to_string())?;
let agent = agents
.get(story_id)
.ok_or_else(|| format!("No agent for story '{story_id}'"))?;
Ok(agent.tx.subscribe())
}
/// Get project root helper.
pub fn get_project_root(
&self,
agent_name: &str,
message: &str,
) -> Result<AgentResponse, String> {
let (cwd, role, session_id) = {
let agents = self.agents.lock().map_err(|e| e.to_string())?;
let state = agents
.get(agent_name)
.ok_or_else(|| format!("Agent '{}' not found", agent_name))?;
(
state.cwd.clone(),
state.role.clone(),
state.session_id.clone(),
)
};
let agent = agent_name.to_string();
let msg = message.to_string();
let role_clone = role.clone();
let result = tokio::task::spawn_blocking(move || {
run_agent_pty(&agent, &msg, &cwd, &role_clone, session_id.as_deref())
})
.await
.map_err(|e| format!("Agent task panicked: {e}"))??;
// Update session_id for next message
if let Some(ref sid) = result.session_id {
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
if let Some(state) = agents.get_mut(agent_name) {
state.session_id = Some(sid.clone());
state.message_count += 1;
}
}
Ok(result)
state: &crate::state::SessionState,
) -> Result<PathBuf, String> {
state.get_project_root()
}
}
fn run_agent_pty(
agent_name: &str,
message: &str,
/// Spawn claude agent in a PTY and stream events through the broadcast channel.
async fn run_agent_pty_streaming(
story_id: &str,
command: &str,
args: &[String],
prompt: &str,
cwd: &str,
role: &str,
resume_session: Option<&str>,
) -> Result<AgentResponse, String> {
tx: &broadcast::Sender<AgentEvent>,
) -> Result<Option<String>, String> {
let sid = story_id.to_string();
let cmd = command.to_string();
let args = args.to_vec();
let prompt = prompt.to_string();
let cwd = cwd.to_string();
let tx = tx.clone();
tokio::task::spawn_blocking(move || {
run_agent_pty_blocking(&sid, &cmd, &args, &prompt, &cwd, &tx)
})
.await
.map_err(|e| format!("Agent task panicked: {e}"))?
}
fn run_agent_pty_blocking(
story_id: &str,
command: &str,
args: &[String],
prompt: &str,
cwd: &str,
tx: &broadcast::Sender<AgentEvent>,
) -> Result<Option<String>, String> {
let pty_system = native_pty_system();
let pair = pty_system
@@ -173,9 +311,17 @@ fn run_agent_pty(
})
.map_err(|e| format!("Failed to open PTY: {e}"))?;
let mut cmd = CommandBuilder::new("claude");
let mut cmd = CommandBuilder::new(command);
// -p <prompt> must come first
cmd.arg("-p");
cmd.arg(message);
cmd.arg(prompt);
// Add configured args (e.g., --directory /path/to/worktree)
for arg in args {
cmd.arg(arg);
}
cmd.arg("--output-format");
cmd.arg("stream-json");
cmd.arg("--verbose");
@@ -184,32 +330,15 @@ fn run_agent_pty(
cmd.arg("--permission-mode");
cmd.arg("bypassPermissions");
// Append role as system prompt context
cmd.arg("--append-system-prompt");
cmd.arg(format!(
"You are agent '{}' with role: {}. Work autonomously on the task given.",
agent_name, role
));
// Resume previous session if available
if let Some(session_id) = resume_session {
cmd.arg("--resume");
cmd.arg(session_id);
}
cmd.cwd(cwd);
cmd.env("NO_COLOR", "1");
eprintln!(
"[agent:{}] Spawning claude -p (session: {:?})",
agent_name,
resume_session.unwrap_or("new")
);
eprintln!("[agent:{story_id}] Spawning {command} in {cwd} with args: {args:?}");
let mut child = pair
.slave
.spawn_command(cmd)
.map_err(|e| format!("Failed to spawn claude for agent {agent_name}: {e}"))?;
.map_err(|e| format!("Failed to spawn agent for {story_id}: {e}"))?;
drop(pair.slave);
@@ -221,18 +350,7 @@ fn run_agent_pty(
drop(pair.master);
let buf_reader = BufReader::new(reader);
let mut response = AgentResponse {
agent: agent_name.to_string(),
text: String::new(),
session_id: None,
model: None,
api_key_source: None,
rate_limit_type: None,
cost_usd: None,
input_tokens: None,
output_tokens: None,
duration_ms: None,
};
let mut session_id: Option<String> = None;
for line in buf_reader.lines() {
let line = match line {
@@ -245,67 +363,57 @@ fn run_agent_pty(
continue;
}
// Try to parse as JSON
let json: serde_json::Value = match serde_json::from_str(trimmed) {
Ok(j) => j,
Err(_) => continue, // skip non-JSON (terminal escapes)
Err(_) => {
// Non-JSON output (terminal escapes etc.) — send as raw output
let _ = tx.send(AgentEvent::Output {
story_id: story_id.to_string(),
text: trimmed.to_string(),
});
continue;
}
};
let event_type = json.get("type").and_then(|t| t.as_str()).unwrap_or("");
match event_type {
"system" => {
response.session_id = json
session_id = json
.get("session_id")
.and_then(|s| s.as_str())
.map(|s| s.to_string());
response.model = json
.get("model")
.and_then(|s| s.as_str())
.map(|s| s.to_string());
response.api_key_source = json
.get("apiKeySource")
.and_then(|s| s.as_str())
.map(|s| s.to_string());
}
"rate_limit_event" => {
if let Some(info) = json.get("rate_limit_info") {
response.rate_limit_type = info
.get("rateLimitType")
.and_then(|s| s.as_str())
.map(|s| s.to_string());
}
}
"assistant" => {
if let Some(message) = json.get("message") {
if let Some(content) = message.get("content").and_then(|c| c.as_array()) {
if let Some(message) = json.get("message")
&& let Some(content) = message.get("content").and_then(|c| c.as_array()) {
for block in content {
if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
response.text.push_str(text);
let _ = tx.send(AgentEvent::Output {
story_id: story_id.to_string(),
text: text.to_string(),
});
}
}
}
}
}
"result" => {
response.cost_usd = json.get("total_cost_usd").and_then(|c| c.as_f64());
response.duration_ms = json.get("duration_ms").and_then(|d| d.as_u64());
if let Some(usage) = json.get("usage") {
response.input_tokens =
usage.get("input_tokens").and_then(|t| t.as_u64());
response.output_tokens =
usage.get("output_tokens").and_then(|t| t.as_u64());
}
}
_ => {}
}
// Forward all JSON events
let _ = tx.send(AgentEvent::AgentJson {
story_id: story_id.to_string(),
data: json,
});
}
let _ = child.kill();
eprintln!(
"[agent:{}] Done. Session: {:?}, tokens: {:?}/{:?}",
agent_name, response.session_id, response.input_tokens, response.output_tokens
"[agent:{story_id}] Done. Session: {:?}",
session_id
);
Ok(response)
Ok(session_id)
}

145
server/src/config.rs Normal file
View File

@@ -0,0 +1,145 @@
use serde::Deserialize;
use std::path::Path;
#[derive(Debug, Clone, Deserialize)]
pub struct ProjectConfig {
#[serde(default)]
pub component: Vec<ComponentConfig>,
pub agent: Option<AgentConfig>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ComponentConfig {
pub name: String,
#[serde(default = "default_path")]
pub path: String,
#[serde(default)]
pub setup: Vec<String>,
#[serde(default)]
pub teardown: Vec<String>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct AgentConfig {
#[serde(default = "default_agent_command")]
pub command: String,
#[serde(default)]
pub args: Vec<String>,
#[serde(default = "default_agent_prompt")]
pub prompt: String,
}
fn default_path() -> String {
".".to_string()
}
fn default_agent_command() -> String {
"claude".to_string()
}
fn default_agent_prompt() -> String {
"Read .story_kit/README.md, then pick up story {{story_id}}".to_string()
}
impl Default for ProjectConfig {
fn default() -> Self {
Self {
component: Vec::new(),
agent: Some(AgentConfig {
command: default_agent_command(),
args: vec![],
prompt: default_agent_prompt(),
}),
}
}
}
impl ProjectConfig {
/// Load from `.story_kit/project.toml` relative to the given root.
/// Falls back to sensible defaults if the file doesn't exist.
pub fn load(project_root: &Path) -> Result<Self, String> {
let config_path = project_root.join(".story_kit/project.toml");
if !config_path.exists() {
return Ok(Self::default());
}
let content =
std::fs::read_to_string(&config_path).map_err(|e| format!("Read config: {e}"))?;
toml::from_str(&content).map_err(|e| format!("Parse config: {e}"))
}
/// Render template variables in agent args and prompt.
pub fn render_agent_args(
&self,
worktree_path: &str,
story_id: &str,
) -> Option<(String, Vec<String>, String)> {
let agent = self.agent.as_ref()?;
let render = |s: &str| {
s.replace("{{worktree_path}}", worktree_path)
.replace("{{story_id}}", story_id)
};
let command = render(&agent.command);
let args: Vec<String> = agent.args.iter().map(|a| render(a)).collect();
let prompt = render(&agent.prompt);
Some((command, args, prompt))
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
#[test]
fn default_config_when_missing() {
let tmp = tempfile::tempdir().unwrap();
let config = ProjectConfig::load(tmp.path()).unwrap();
assert!(config.agent.is_some());
assert!(config.component.is_empty());
}
#[test]
fn parse_project_toml() {
let tmp = tempfile::tempdir().unwrap();
let sk = tmp.path().join(".story_kit");
fs::create_dir_all(&sk).unwrap();
fs::write(
sk.join("project.toml"),
r#"
[[component]]
name = "server"
path = "."
setup = ["cargo check"]
teardown = []
[[component]]
name = "frontend"
path = "frontend"
setup = ["pnpm install"]
[agent]
command = "claude"
args = ["--print", "--directory", "{{worktree_path}}"]
prompt = "Pick up story {{story_id}}"
"#,
)
.unwrap();
let config = ProjectConfig::load(tmp.path()).unwrap();
assert_eq!(config.component.len(), 2);
assert_eq!(config.component[0].name, "server");
assert_eq!(config.component[1].setup, vec!["pnpm install"]);
let agent = config.agent.unwrap();
assert_eq!(agent.command, "claude");
}
#[test]
fn render_template_vars() {
let config = ProjectConfig::default();
let (cmd, args, prompt) = config.render_agent_args("/tmp/wt", "42_foo").unwrap();
assert_eq!(cmd, "claude");
assert!(args.is_empty());
assert!(prompt.contains("42_foo"));
}
}

View File

@@ -9,39 +9,16 @@ enum AgentsTags {
}
#[derive(Object)]
struct CreateAgentPayload {
name: String,
role: String,
cwd: String,
}
#[derive(Object)]
struct SendMessagePayload {
message: String,
struct StoryIdPayload {
story_id: String,
}
#[derive(Object, Serialize)]
struct AgentInfoResponse {
name: String,
role: String,
cwd: String,
session_id: Option<String>,
story_id: String,
status: String,
message_count: usize,
}
#[derive(Object, Serialize)]
struct AgentMessageResponse {
agent: String,
text: String,
session_id: Option<String>,
model: Option<String>,
api_key_source: Option<String>,
rate_limit_type: Option<String>,
cost_usd: Option<f64>,
input_tokens: Option<u64>,
output_tokens: Option<u64>,
duration_ms: Option<u64>,
worktree_path: Option<String>,
}
pub struct AgentsApi {
@@ -50,31 +27,52 @@ pub struct AgentsApi {
#[OpenApi(tag = "AgentsTags::Agents")]
impl AgentsApi {
/// Create a new agent with a name, role, and working directory.
#[oai(path = "/agents", method = "post")]
async fn create_agent(
/// Start an agent for a given story (creates worktree, runs setup, spawns agent).
#[oai(path = "/agents/start", method = "post")]
async fn start_agent(
&self,
payload: Json<CreateAgentPayload>,
payload: Json<StoryIdPayload>,
) -> OpenApiResult<Json<AgentInfoResponse>> {
let req = crate::agents::CreateAgentRequest {
name: payload.0.name,
role: payload.0.role,
cwd: payload.0.cwd,
};
let project_root = self
.ctx
.agents
.get_project_root(&self.ctx.state)
.map_err(bad_request)?;
let info = self.ctx.agents.create_agent(req).map_err(bad_request)?;
let info = self
.ctx
.agents
.start_agent(&project_root, &payload.0.story_id)
.await
.map_err(bad_request)?;
Ok(Json(AgentInfoResponse {
name: info.name,
role: info.role,
cwd: info.cwd,
story_id: info.story_id,
status: info.status.to_string(),
session_id: info.session_id,
status: "idle".to_string(),
message_count: info.message_count,
worktree_path: info.worktree_path,
}))
}
/// List all registered agents.
/// Stop a running agent and clean up its worktree.
#[oai(path = "/agents/stop", method = "post")]
async fn stop_agent(&self, payload: Json<StoryIdPayload>) -> OpenApiResult<Json<bool>> {
let project_root = self
.ctx
.agents
.get_project_root(&self.ctx.state)
.map_err(bad_request)?;
self.ctx
.agents
.stop_agent(&project_root, &payload.0.story_id)
.await
.map_err(bad_request)?;
Ok(Json(true))
}
/// List all agents with their status.
#[oai(path = "/agents", method = "get")]
async fn list_agents(&self) -> OpenApiResult<Json<Vec<AgentInfoResponse>>> {
let agents = self.ctx.agents.list_agents().map_err(bad_request)?;
@@ -83,45 +81,12 @@ impl AgentsApi {
agents
.into_iter()
.map(|info| AgentInfoResponse {
name: info.name,
role: info.role,
cwd: info.cwd,
story_id: info.story_id,
status: info.status.to_string(),
session_id: info.session_id,
status: match info.status {
crate::agents::AgentStatus::Idle => "idle".to_string(),
crate::agents::AgentStatus::Running => "running".to_string(),
},
message_count: info.message_count,
worktree_path: info.worktree_path,
})
.collect(),
))
}
/// Send a message to an agent and wait for its response.
#[oai(path = "/agents/:name/message", method = "post")]
async fn send_message(
&self,
name: poem_openapi::param::Path<String>,
payload: Json<SendMessagePayload>,
) -> OpenApiResult<Json<AgentMessageResponse>> {
let result = self
.ctx
.agents
.send_message(&name.0, &payload.0.message)
.await
.map_err(bad_request)?;
Ok(Json(AgentMessageResponse {
agent: result.agent,
text: result.text,
session_id: result.session_id,
model: result.model,
api_key_source: result.api_key_source,
rate_limit_type: result.rate_limit_type,
cost_usd: result.cost_usd,
input_tokens: result.input_tokens,
output_tokens: result.output_tokens,
duration_ms: result.duration_ms,
}))
}
}

View File

@@ -0,0 +1,58 @@
use crate::http::context::AppContext;
use poem::handler;
use poem::http::StatusCode;
use poem::web::{Data, Path};
use poem::{Body, IntoResponse, Response};
use std::sync::Arc;
/// SSE endpoint: `GET /agents/:story_id/stream`
///
/// Streams `AgentEvent`s as Server-Sent Events. Each event is JSON-encoded
/// with `data:` prefix and double newline terminator per the SSE spec.
#[handler]
pub async fn agent_stream(
Path(story_id): Path<String>,
ctx: Data<&Arc<AppContext>>,
) -> impl IntoResponse {
let mut rx = match ctx.agents.subscribe(&story_id) {
Ok(rx) => rx,
Err(e) => {
return Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::from_string(e));
}
};
let stream = async_stream::stream! {
loop {
match rx.recv().await {
Ok(event) => {
if let Ok(json) = serde_json::to_string(&event) {
yield Ok::<_, std::io::Error>(format!("data: {json}\n\n"));
}
// Check for terminal events
match &event {
crate::agents::AgentEvent::Done { .. }
| crate::agents::AgentEvent::Error { .. } => break,
crate::agents::AgentEvent::Status { status, .. }
if status == "stopped" => break,
_ => {}
}
}
Err(tokio::sync::broadcast::error::RecvError::Lagged(n)) => {
let msg = format!("{{\"type\":\"warning\",\"message\":\"Skipped {n} events\"}}");
yield Ok::<_, std::io::Error>(format!("data: {msg}\n\n"));
}
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
}
}
};
Response::builder()
.header("Content-Type", "text/event-stream")
.header("Cache-Control", "no-cache")
.header("Connection", "keep-alive")
.body(Body::from_bytes_stream(
futures::StreamExt::map(stream, |r| r.map(bytes::Bytes::from)),
))
}

View File

@@ -1,4 +1,5 @@
pub mod agents;
pub mod agents_sse;
pub mod anthropic;
pub mod assets;
pub mod chat;
@@ -33,6 +34,10 @@ pub fn build_routes(ctx: AppContext) -> impl poem::Endpoint {
.nest("/api", api_service)
.nest("/docs", docs_service.swagger_ui())
.at("/ws", get(ws::ws_handler))
.at(
"/agents/:story_id/stream",
get(agents_sse::agent_stream),
)
.at("/health", get(health::health))
.at("/assets/*path", get(assets::embedded_asset))
.at("/", get(assets::embedded_index))

View File

@@ -158,8 +158,8 @@ fn run_pty_session(
eprintln!("[pty-debug] processing: {}...", &trimmed[..trimmed.len().min(120)]);
// Try to parse as JSON
if let Ok(json) = serde_json::from_str::<serde_json::Value>(trimmed) {
if let Some(event_type) = json.get("type").and_then(|t| t.as_str()) {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(trimmed)
&& let Some(event_type) = json.get("type").and_then(|t| t.as_str()) {
match event_type {
// Streaming deltas (when --include-partial-messages is used)
"stream_event" => {
@@ -169,15 +169,14 @@ fn run_pty_session(
}
// Complete assistant message
"assistant" => {
if let Some(message) = json.get("message") {
if let Some(content) = message.get("content").and_then(|c| c.as_array()) {
if let Some(message) = json.get("message")
&& let Some(content) = message.get("content").and_then(|c| c.as_array()) {
for block in content {
if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
let _ = token_tx.send(text.to_string());
}
}
}
}
}
// Final result with usage stats
"result" => {
@@ -209,7 +208,6 @@ fn run_pty_session(
_ => {}
}
}
}
// Ignore non-JSON lines (terminal escape sequences)
if got_result {
@@ -223,15 +221,14 @@ fn run_pty_session(
// Drain remaining lines
while let Ok(Some(line)) = line_rx.try_recv() {
let trimmed = line.trim();
if let Ok(json) = serde_json::from_str::<serde_json::Value>(trimmed) {
if let Some(event) = json
if let Ok(json) = serde_json::from_str::<serde_json::Value>(trimmed)
&& let Some(event) = json
.get("type")
.filter(|t| t.as_str() == Some("stream_event"))
.and_then(|_| json.get("event"))
{
handle_stream_event(event, &token_tx);
}
}
}
break;
}

View File

@@ -1,10 +1,12 @@
mod agents;
mod config;
mod http;
mod io;
mod llm;
mod state;
mod store;
mod workflow;
mod worktree;
use crate::agents::AgentPool;
use crate::http::build_routes;

197
server/src/worktree.rs Normal file
View File

@@ -0,0 +1,197 @@
use crate::config::ProjectConfig;
use std::path::{Path, PathBuf};
use std::process::Command;
#[derive(Debug, Clone)]
pub struct WorktreeInfo {
pub path: PathBuf,
pub branch: String,
}
/// Worktree path as a sibling of the project root: `{project_root}-story-{id}`.
/// E.g. `/path/to/story-kit-app` → `/path/to/story-kit-app-story-42_foo`.
fn worktree_path(project_root: &Path, story_id: &str) -> PathBuf {
let parent = project_root.parent().unwrap_or(project_root);
let dir_name = project_root
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "project".to_string());
parent.join(format!("{dir_name}-story-{story_id}"))
}
fn branch_name(story_id: &str) -> String {
format!("feature/story-{story_id}")
}
/// Create a git worktree for the given story.
///
/// - Creates the worktree at `{project_root}-story-{story_id}` (sibling directory)
/// on branch `feature/story-{story_id}`.
/// - Runs setup commands from the config for each component.
/// - If the worktree/branch already exists, reuses rather than errors.
pub async fn create_worktree(
project_root: &Path,
story_id: &str,
config: &ProjectConfig,
) -> Result<WorktreeInfo, String> {
let wt_path = worktree_path(project_root, story_id);
let branch = branch_name(story_id);
let root = project_root.to_path_buf();
// Already exists — reuse
if wt_path.exists() {
run_setup_commands(&wt_path, config).await?;
return Ok(WorktreeInfo {
path: wt_path,
branch,
});
}
let wt = wt_path.clone();
let br = branch.clone();
tokio::task::spawn_blocking(move || create_worktree_sync(&root, &wt, &br))
.await
.map_err(|e| format!("spawn_blocking: {e}"))??;
run_setup_commands(&wt_path, config).await?;
Ok(WorktreeInfo {
path: wt_path,
branch,
})
}
fn create_worktree_sync(
project_root: &Path,
wt_path: &Path,
branch: &str,
) -> Result<(), String> {
// Ensure the parent directory exists
if let Some(parent) = wt_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("Create worktree dir: {e}"))?;
}
// Try to create branch. If it already exists that's fine.
let _ = Command::new("git")
.args(["branch", branch])
.current_dir(project_root)
.output();
// Create worktree
let output = Command::new("git")
.args([
"worktree",
"add",
&wt_path.to_string_lossy(),
branch,
])
.current_dir(project_root)
.output()
.map_err(|e| format!("git worktree add: {e}"))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
// If it says already checked out, that's fine
if stderr.contains("already checked out") || stderr.contains("already exists") {
return Ok(());
}
return Err(format!("git worktree add failed: {stderr}"));
}
Ok(())
}
/// Remove a git worktree and its branch.
pub async fn remove_worktree(
project_root: &Path,
info: &WorktreeInfo,
config: &ProjectConfig,
) -> Result<(), String> {
run_teardown_commands(&info.path, config).await?;
let root = project_root.to_path_buf();
let wt_path = info.path.clone();
let branch = info.branch.clone();
tokio::task::spawn_blocking(move || remove_worktree_sync(&root, &wt_path, &branch))
.await
.map_err(|e| format!("spawn_blocking: {e}"))?
}
fn remove_worktree_sync(
project_root: &Path,
wt_path: &Path,
branch: &str,
) -> Result<(), String> {
// Remove worktree
let output = Command::new("git")
.args([
"worktree",
"remove",
"--force",
&wt_path.to_string_lossy(),
])
.current_dir(project_root)
.output()
.map_err(|e| format!("git worktree remove: {e}"))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("[worktree] remove warning: {stderr}");
}
// Delete branch (best effort)
let _ = Command::new("git")
.args(["branch", "-d", branch])
.current_dir(project_root)
.output();
Ok(())
}
async fn run_setup_commands(wt_path: &Path, config: &ProjectConfig) -> Result<(), String> {
for component in &config.component {
let cmd_dir = wt_path.join(&component.path);
for cmd in &component.setup {
run_shell_command(cmd, &cmd_dir).await?;
}
}
Ok(())
}
async fn run_teardown_commands(wt_path: &Path, config: &ProjectConfig) -> Result<(), String> {
for component in &config.component {
let cmd_dir = wt_path.join(&component.path);
for cmd in &component.teardown {
// Best effort — don't fail teardown
if let Err(e) = run_shell_command(cmd, &cmd_dir).await {
eprintln!("[worktree] teardown warning for {}: {e}", component.name);
}
}
}
Ok(())
}
async fn run_shell_command(cmd: &str, cwd: &Path) -> Result<(), String> {
let cmd = cmd.to_string();
let cwd = cwd.to_path_buf();
tokio::task::spawn_blocking(move || {
eprintln!("[worktree] Running: {cmd} in {}", cwd.display());
let output = Command::new("sh")
.args(["-c", &cmd])
.current_dir(&cwd)
.output()
.map_err(|e| format!("Run '{cmd}': {e}"))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!("Command '{cmd}' failed: {stderr}"));
}
Ok(())
})
.await
.map_err(|e| format!("spawn_blocking: {e}"))?
}