2026-02-19 17:58:53 +00:00
|
|
|
use crate::config::ProjectConfig;
|
|
|
|
|
use crate::worktree::{self, WorktreeInfo};
|
2026-02-19 15:25:22 +00:00
|
|
|
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
|
2026-02-19 17:58:53 +00:00
|
|
|
use serde::Serialize;
|
2026-02-19 15:25:22 +00:00
|
|
|
use std::collections::HashMap;
|
|
|
|
|
use std::io::{BufRead, BufReader};
|
2026-02-19 17:58:53 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2026-02-20 15:02:34 +00:00
|
|
|
use std::process::Command;
|
2026-02-19 17:58:53 +00:00
|
|
|
use std::sync::{Arc, Mutex};
|
|
|
|
|
use tokio::sync::broadcast;
|
|
|
|
|
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
/// Build the composite key used to track agents in the pool.
|
|
|
|
|
fn composite_key(story_id: &str, agent_name: &str) -> String {
|
|
|
|
|
format!("{story_id}:{agent_name}")
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Events streamed from a running agent to SSE clients.
|
|
|
|
|
#[derive(Debug, Clone, Serialize)]
|
|
|
|
|
#[serde(tag = "type", rename_all = "snake_case")]
|
|
|
|
|
pub enum AgentEvent {
|
|
|
|
|
/// Agent status changed.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
Status {
|
|
|
|
|
story_id: String,
|
|
|
|
|
agent_name: String,
|
|
|
|
|
status: String,
|
|
|
|
|
},
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Raw text output from the agent process.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
Output {
|
|
|
|
|
story_id: String,
|
|
|
|
|
agent_name: String,
|
|
|
|
|
text: String,
|
|
|
|
|
},
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Agent produced a JSON event from `--output-format stream-json`.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
AgentJson {
|
|
|
|
|
story_id: String,
|
|
|
|
|
agent_name: String,
|
|
|
|
|
data: serde_json::Value,
|
|
|
|
|
},
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Agent finished.
|
|
|
|
|
Done {
|
|
|
|
|
story_id: String,
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: String,
|
2026-02-19 17:58:53 +00:00
|
|
|
session_id: Option<String>,
|
|
|
|
|
},
|
|
|
|
|
/// Agent errored.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
Error {
|
|
|
|
|
story_id: String,
|
|
|
|
|
agent_name: String,
|
|
|
|
|
message: String,
|
|
|
|
|
},
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
#[derive(Debug, Clone, Serialize, PartialEq)]
|
2026-02-19 15:25:22 +00:00
|
|
|
#[serde(rename_all = "snake_case")]
|
|
|
|
|
pub enum AgentStatus {
|
2026-02-19 17:58:53 +00:00
|
|
|
Pending,
|
2026-02-19 15:25:22 +00:00
|
|
|
Running,
|
2026-02-19 17:58:53 +00:00
|
|
|
Completed,
|
|
|
|
|
Failed,
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
impl std::fmt::Display for AgentStatus {
|
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
|
match self {
|
|
|
|
|
Self::Pending => write!(f, "pending"),
|
|
|
|
|
Self::Running => write!(f, "running"),
|
|
|
|
|
Self::Completed => write!(f, "completed"),
|
|
|
|
|
Self::Failed => write!(f, "failed"),
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
/// Pipeline stages for automatic story advancement.
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
|
pub enum PipelineStage {
|
|
|
|
|
/// Coding agents (coder-1, coder-2, etc.)
|
|
|
|
|
Coder,
|
|
|
|
|
/// QA review agent
|
|
|
|
|
Qa,
|
|
|
|
|
/// Mergemaster agent
|
|
|
|
|
Mergemaster,
|
|
|
|
|
/// Supervisors and unknown agents — no automatic advancement.
|
|
|
|
|
Other,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Determine the pipeline stage from an agent name.
|
|
|
|
|
pub fn pipeline_stage(agent_name: &str) -> PipelineStage {
|
|
|
|
|
match agent_name {
|
|
|
|
|
"qa" => PipelineStage::Qa,
|
|
|
|
|
"mergemaster" => PipelineStage::Mergemaster,
|
|
|
|
|
name if name.starts_with("coder") => PipelineStage::Coder,
|
|
|
|
|
_ => PipelineStage::Other,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 15:02:34 +00:00
|
|
|
/// Report produced by an agent calling `report_completion`.
|
|
|
|
|
#[derive(Debug, Serialize, Clone)]
|
|
|
|
|
pub struct CompletionReport {
|
|
|
|
|
pub summary: String,
|
|
|
|
|
pub gates_passed: bool,
|
|
|
|
|
pub gate_output: String,
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:16:04 +00:00
|
|
|
#[derive(Debug, Serialize, Clone)]
|
2026-02-19 17:58:53 +00:00
|
|
|
pub struct AgentInfo {
|
|
|
|
|
pub story_id: String,
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
pub agent_name: String,
|
2026-02-19 17:58:53 +00:00
|
|
|
pub status: AgentStatus,
|
|
|
|
|
pub session_id: Option<String>,
|
|
|
|
|
pub worktree_path: Option<String>,
|
2026-02-20 12:48:50 +00:00
|
|
|
pub base_branch: Option<String>,
|
2026-02-20 15:02:34 +00:00
|
|
|
pub completion: Option<CompletionReport>,
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
struct StoryAgent {
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: String,
|
2026-02-19 17:58:53 +00:00
|
|
|
status: AgentStatus,
|
|
|
|
|
worktree_info: Option<WorktreeInfo>,
|
|
|
|
|
session_id: Option<String>,
|
|
|
|
|
tx: broadcast::Sender<AgentEvent>,
|
|
|
|
|
task_handle: Option<tokio::task::JoinHandle<()>>,
|
2026-02-20 11:57:25 +00:00
|
|
|
/// Accumulated events for polling via get_agent_output.
|
|
|
|
|
event_log: Arc<Mutex<Vec<AgentEvent>>>,
|
2026-02-20 15:02:34 +00:00
|
|
|
/// Set when the agent calls report_completion.
|
|
|
|
|
completion: Option<CompletionReport>,
|
2026-02-23 13:13:41 +00:00
|
|
|
/// Project root, stored for pipeline advancement after completion.
|
|
|
|
|
project_root: Option<PathBuf>,
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:16:04 +00:00
|
|
|
/// Build an `AgentInfo` snapshot from a `StoryAgent` map entry.
|
|
|
|
|
fn agent_info_from_entry(story_id: &str, agent: &StoryAgent) -> AgentInfo {
|
|
|
|
|
AgentInfo {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
agent_name: agent.agent_name.clone(),
|
|
|
|
|
status: agent.status.clone(),
|
|
|
|
|
session_id: agent.session_id.clone(),
|
|
|
|
|
worktree_path: agent
|
|
|
|
|
.worktree_info
|
|
|
|
|
.as_ref()
|
|
|
|
|
.map(|wt| wt.path.to_string_lossy().to_string()),
|
|
|
|
|
base_branch: agent
|
|
|
|
|
.worktree_info
|
|
|
|
|
.as_ref()
|
|
|
|
|
.map(|wt| wt.base_branch.clone()),
|
2026-02-20 15:02:34 +00:00
|
|
|
completion: agent.completion.clone(),
|
2026-02-20 13:16:04 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Manages concurrent story agents, each in its own worktree.
|
|
|
|
|
pub struct AgentPool {
|
|
|
|
|
agents: Arc<Mutex<HashMap<String, StoryAgent>>>,
|
2026-02-20 13:24:35 +00:00
|
|
|
port: u16,
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl AgentPool {
|
2026-02-20 13:24:35 +00:00
|
|
|
pub fn new(port: u16) -> Self {
|
2026-02-19 15:25:22 +00:00
|
|
|
Self {
|
2026-02-19 17:58:53 +00:00
|
|
|
agents: Arc::new(Mutex::new(HashMap::new())),
|
2026-02-20 13:24:35 +00:00
|
|
|
port,
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Start an agent for a story: load config, create worktree, spawn agent.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
/// If `agent_name` is None, defaults to the first configured agent.
|
2026-02-23 13:13:41 +00:00
|
|
|
/// If `resume_context` is provided, it is appended to the rendered prompt
|
|
|
|
|
/// so the agent can pick up from a previous failed attempt.
|
2026-02-19 17:58:53 +00:00
|
|
|
pub async fn start_agent(
|
|
|
|
|
&self,
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: Option<&str>,
|
2026-02-23 13:13:41 +00:00
|
|
|
resume_context: Option<&str>,
|
2026-02-19 17:58:53 +00:00
|
|
|
) -> Result<AgentInfo, String> {
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
let config = ProjectConfig::load(project_root)?;
|
|
|
|
|
|
|
|
|
|
// Resolve agent name from config
|
|
|
|
|
let resolved_name = match agent_name {
|
|
|
|
|
Some(name) => {
|
|
|
|
|
config
|
|
|
|
|
.find_agent(name)
|
|
|
|
|
.ok_or_else(|| format!("No agent named '{name}' in config"))?;
|
|
|
|
|
name.to_string()
|
|
|
|
|
}
|
|
|
|
|
None => config
|
|
|
|
|
.default_agent()
|
|
|
|
|
.ok_or_else(|| "No agents configured".to_string())?
|
|
|
|
|
.name
|
|
|
|
|
.clone(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let key = composite_key(story_id, &resolved_name);
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
// Check not already running
|
|
|
|
|
{
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
if let Some(agent) = agents.get(&key)
|
|
|
|
|
&& (agent.status == AgentStatus::Running || agent.status == AgentStatus::Pending)
|
|
|
|
|
{
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Agent '{resolved_name}' for story '{story_id}' is already {}",
|
|
|
|
|
agent.status
|
|
|
|
|
));
|
|
|
|
|
}
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
2026-02-19 15:25:22 +00:00
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
let (tx, _) = broadcast::channel::<AgentEvent>(1024);
|
|
|
|
|
|
|
|
|
|
let event_log: Arc<Mutex<Vec<AgentEvent>>> = Arc::new(Mutex::new(Vec::new()));
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
// Register as pending
|
|
|
|
|
{
|
|
|
|
|
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
agents.insert(
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
key.clone(),
|
2026-02-19 17:58:53 +00:00
|
|
|
StoryAgent {
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: resolved_name.clone(),
|
2026-02-19 17:58:53 +00:00
|
|
|
status: AgentStatus::Pending,
|
|
|
|
|
worktree_info: None,
|
|
|
|
|
session_id: None,
|
|
|
|
|
tx: tx.clone(),
|
|
|
|
|
task_handle: None,
|
2026-02-20 11:57:25 +00:00
|
|
|
event_log: event_log.clone(),
|
2026-02-20 15:02:34 +00:00
|
|
|
completion: None,
|
2026-02-23 13:13:41 +00:00
|
|
|
project_root: Some(project_root.to_path_buf()),
|
2026-02-19 17:58:53 +00:00
|
|
|
},
|
|
|
|
|
);
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
let _ = tx.send(AgentEvent::Status {
|
|
|
|
|
story_id: story_id.to_string(),
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: resolved_name.clone(),
|
2026-02-19 17:58:53 +00:00
|
|
|
status: "pending".to_string(),
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-20 15:31:13 +00:00
|
|
|
// Move story from upcoming/ to current/ and auto-commit before creating the worktree.
|
|
|
|
|
move_story_to_current(project_root, story_id)?;
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
// Create worktree
|
2026-02-20 13:24:35 +00:00
|
|
|
let wt_info = worktree::create_worktree(project_root, story_id, &config, self.port).await?;
|
2026-02-19 15:25:22 +00:00
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
// Update with worktree info
|
|
|
|
|
{
|
|
|
|
|
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
if let Some(agent) = agents.get_mut(&key) {
|
2026-02-19 17:58:53 +00:00
|
|
|
agent.worktree_info = Some(wt_info.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Spawn the agent process
|
|
|
|
|
let wt_path_str = wt_info.path.to_string_lossy().to_string();
|
2026-02-23 13:13:41 +00:00
|
|
|
let (command, args, mut prompt) =
|
2026-02-20 12:48:50 +00:00
|
|
|
config.render_agent_args(&wt_path_str, story_id, Some(&resolved_name), Some(&wt_info.base_branch))?;
|
2026-02-19 17:58:53 +00:00
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
// Append resume context if this is a restart with failure information.
|
|
|
|
|
if let Some(ctx) = resume_context {
|
|
|
|
|
prompt.push_str(ctx);
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
let sid = story_id.to_string();
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
let aname = resolved_name.clone();
|
2026-02-19 17:58:53 +00:00
|
|
|
let tx_clone = tx.clone();
|
|
|
|
|
let agents_ref = self.agents.clone();
|
|
|
|
|
let cwd = wt_path_str.clone();
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
let key_clone = key.clone();
|
2026-02-20 11:57:25 +00:00
|
|
|
let log_clone = event_log.clone();
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
let handle = tokio::spawn(async move {
|
|
|
|
|
let _ = tx_clone.send(AgentEvent::Status {
|
|
|
|
|
story_id: sid.clone(),
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: aname.clone(),
|
2026-02-19 17:58:53 +00:00
|
|
|
status: "running".to_string(),
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
match run_agent_pty_streaming(
|
|
|
|
|
&sid, &aname, &command, &args, &prompt, &cwd, &tx_clone, &log_clone,
|
|
|
|
|
)
|
|
|
|
|
.await
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
{
|
2026-02-19 17:58:53 +00:00
|
|
|
Ok(session_id) => {
|
|
|
|
|
if let Ok(mut agents) = agents_ref.lock()
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
&& let Some(agent) = agents.get_mut(&key_clone)
|
|
|
|
|
{
|
|
|
|
|
agent.status = AgentStatus::Completed;
|
|
|
|
|
agent.session_id = session_id.clone();
|
|
|
|
|
}
|
2026-02-19 17:58:53 +00:00
|
|
|
let _ = tx_clone.send(AgentEvent::Done {
|
|
|
|
|
story_id: sid.clone(),
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: aname.clone(),
|
2026-02-19 17:58:53 +00:00
|
|
|
session_id,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
if let Ok(mut agents) = agents_ref.lock()
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
&& let Some(agent) = agents.get_mut(&key_clone)
|
|
|
|
|
{
|
|
|
|
|
agent.status = AgentStatus::Failed;
|
|
|
|
|
}
|
2026-02-19 17:58:53 +00:00
|
|
|
let _ = tx_clone.send(AgentEvent::Error {
|
|
|
|
|
story_id: sid.clone(),
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: aname.clone(),
|
2026-02-19 17:58:53 +00:00
|
|
|
message: e,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Update status to running with task handle
|
|
|
|
|
{
|
|
|
|
|
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
if let Some(agent) = agents.get_mut(&key) {
|
2026-02-19 17:58:53 +00:00
|
|
|
agent.status = AgentStatus::Running;
|
|
|
|
|
agent.task_handle = Some(handle);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(AgentInfo {
|
|
|
|
|
story_id: story_id.to_string(),
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: resolved_name,
|
2026-02-19 17:58:53 +00:00
|
|
|
status: AgentStatus::Running,
|
2026-02-19 15:25:22 +00:00
|
|
|
session_id: None,
|
2026-02-19 17:58:53 +00:00
|
|
|
worktree_path: Some(wt_path_str),
|
2026-02-20 12:48:50 +00:00
|
|
|
base_branch: Some(wt_info.base_branch.clone()),
|
2026-02-20 15:02:34 +00:00
|
|
|
completion: None,
|
2026-02-19 17:58:53 +00:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
/// Stop a running agent. Worktree is preserved for inspection.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
pub async fn stop_agent(
|
|
|
|
|
&self,
|
2026-02-20 11:57:25 +00:00
|
|
|
_project_root: &Path,
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
) -> Result<(), String> {
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
let (worktree_info, task_handle, tx) = {
|
2026-02-19 17:58:53 +00:00
|
|
|
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let agent = agents
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
.get_mut(&key)
|
|
|
|
|
.ok_or_else(|| format!("No agent '{agent_name}' for story '{story_id}'"))?;
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
let wt = agent.worktree_info.clone();
|
|
|
|
|
let handle = agent.task_handle.take();
|
|
|
|
|
let tx = agent.tx.clone();
|
|
|
|
|
agent.status = AgentStatus::Failed;
|
2026-02-20 11:57:25 +00:00
|
|
|
(wt, handle, tx)
|
2026-02-19 15:25:22 +00:00
|
|
|
};
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
// Abort the task
|
|
|
|
|
if let Some(handle) = task_handle {
|
|
|
|
|
handle.abort();
|
|
|
|
|
let _ = handle.await;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
// Preserve worktree for inspection — don't destroy agent's work on stop.
|
|
|
|
|
if let Some(ref wt) = worktree_info {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[agents] Worktree preserved for {story_id}:{agent_name}: {}",
|
|
|
|
|
wt.path.display()
|
|
|
|
|
);
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
}
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
let _ = tx.send(AgentEvent::Status {
|
|
|
|
|
story_id: story_id.to_string(),
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: agent_name.to_string(),
|
2026-02-19 17:58:53 +00:00
|
|
|
status: "stopped".to_string(),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Remove from map
|
|
|
|
|
{
|
|
|
|
|
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agents.remove(&key);
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// List all agents with their status.
|
2026-02-19 15:25:22 +00:00
|
|
|
pub fn list_agents(&self) -> Result<Vec<AgentInfo>, String> {
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
Ok(agents
|
|
|
|
|
.iter()
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
.map(|(key, agent)| {
|
|
|
|
|
// Extract story_id from composite key "story_id:agent_name"
|
|
|
|
|
let story_id = key
|
|
|
|
|
.rsplit_once(':')
|
|
|
|
|
.map(|(sid, _)| sid.to_string())
|
|
|
|
|
.unwrap_or_else(|| key.clone());
|
2026-02-20 13:16:04 +00:00
|
|
|
agent_info_from_entry(&story_id, agent)
|
2026-02-19 15:25:22 +00:00
|
|
|
})
|
|
|
|
|
.collect())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Subscribe to events for a story agent.
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
pub fn subscribe(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
) -> Result<broadcast::Receiver<AgentEvent>, String> {
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
2026-02-19 17:58:53 +00:00
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let agent = agents
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
.get(&key)
|
|
|
|
|
.ok_or_else(|| format!("No agent '{agent_name}' for story '{story_id}'"))?;
|
2026-02-19 17:58:53 +00:00
|
|
|
Ok(agent.tx.subscribe())
|
|
|
|
|
}
|
2026-02-19 15:25:22 +00:00
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
/// Drain accumulated events for polling. Returns all events since the last drain.
|
|
|
|
|
pub fn drain_events(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
) -> Result<Vec<AgentEvent>, String> {
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let agent = agents
|
|
|
|
|
.get(&key)
|
|
|
|
|
.ok_or_else(|| format!("No agent '{agent_name}' for story '{story_id}'"))?;
|
|
|
|
|
let mut log = agent.event_log.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
Ok(log.drain(..).collect())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:16:04 +00:00
|
|
|
/// Block until the agent reaches a terminal state (completed, failed, stopped).
|
|
|
|
|
/// Returns the agent's final `AgentInfo`.
|
|
|
|
|
/// `timeout_ms` caps how long to wait; returns an error if the deadline passes.
|
|
|
|
|
pub async fn wait_for_agent(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
timeout_ms: u64,
|
|
|
|
|
) -> Result<AgentInfo, String> {
|
|
|
|
|
// Subscribe before checking status so we don't miss the terminal event
|
|
|
|
|
// if the agent completes in the window between the two operations.
|
|
|
|
|
let mut rx = self.subscribe(story_id, agent_name)?;
|
|
|
|
|
|
|
|
|
|
// Return immediately if already in a terminal state.
|
|
|
|
|
{
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
if let Some(agent) = agents.get(&key)
|
|
|
|
|
&& matches!(agent.status, AgentStatus::Completed | AgentStatus::Failed)
|
|
|
|
|
{
|
|
|
|
|
return Ok(agent_info_from_entry(story_id, agent));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let deadline =
|
|
|
|
|
tokio::time::Instant::now() + std::time::Duration::from_millis(timeout_ms);
|
|
|
|
|
|
|
|
|
|
loop {
|
|
|
|
|
let remaining = deadline.saturating_duration_since(tokio::time::Instant::now());
|
|
|
|
|
if remaining.is_zero() {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Timed out after {timeout_ms}ms waiting for agent '{agent_name}' on story '{story_id}'"
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match tokio::time::timeout(remaining, rx.recv()).await {
|
|
|
|
|
Ok(Ok(event)) => {
|
|
|
|
|
let is_terminal = match &event {
|
|
|
|
|
AgentEvent::Done { .. } | AgentEvent::Error { .. } => true,
|
|
|
|
|
AgentEvent::Status { status, .. } if status == "stopped" => true,
|
|
|
|
|
_ => false,
|
|
|
|
|
};
|
|
|
|
|
if is_terminal {
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
return Ok(if let Some(agent) = agents.get(&key) {
|
|
|
|
|
agent_info_from_entry(story_id, agent)
|
|
|
|
|
} else {
|
|
|
|
|
// Agent was removed from map (e.g. stop_agent removes it after
|
|
|
|
|
// the "stopped" status event is sent).
|
|
|
|
|
let (status, session_id) = match event {
|
|
|
|
|
AgentEvent::Done { session_id, .. } => {
|
|
|
|
|
(AgentStatus::Completed, session_id)
|
|
|
|
|
}
|
|
|
|
|
_ => (AgentStatus::Failed, None),
|
|
|
|
|
};
|
|
|
|
|
AgentInfo {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
status,
|
|
|
|
|
session_id,
|
|
|
|
|
worktree_path: None,
|
|
|
|
|
base_branch: None,
|
2026-02-20 15:02:34 +00:00
|
|
|
completion: None,
|
2026-02-20 13:16:04 +00:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(Err(broadcast::error::RecvError::Lagged(_))) => {
|
|
|
|
|
// Missed some buffered events — check current status before resuming.
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
if let Some(agent) = agents.get(&key)
|
|
|
|
|
&& matches!(agent.status, AgentStatus::Completed | AgentStatus::Failed)
|
|
|
|
|
{
|
|
|
|
|
return Ok(agent_info_from_entry(story_id, agent));
|
|
|
|
|
}
|
|
|
|
|
// Still running — continue the loop.
|
|
|
|
|
}
|
|
|
|
|
Ok(Err(broadcast::error::RecvError::Closed)) => {
|
|
|
|
|
// Channel closed: no more events will arrive. Return current state.
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
if let Some(agent) = agents.get(&key) {
|
|
|
|
|
return Ok(agent_info_from_entry(story_id, agent));
|
|
|
|
|
}
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Agent '{agent_name}' for story '{story_id}' channel closed unexpectedly"
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
Err(_) => {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Timed out after {timeout_ms}ms waiting for agent '{agent_name}' on story '{story_id}'"
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 14:09:52 +00:00
|
|
|
/// Create a worktree for the given story using the server port (writes .mcp.json).
|
|
|
|
|
pub async fn create_worktree(
|
|
|
|
|
&self,
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
) -> Result<worktree::WorktreeInfo, String> {
|
|
|
|
|
let config = ProjectConfig::load(project_root)?;
|
|
|
|
|
worktree::create_worktree(project_root, story_id, &config, self.port).await
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
/// Advance the pipeline after an agent completes.
|
|
|
|
|
///
|
|
|
|
|
/// Called internally by `report_completion` as a background task.
|
|
|
|
|
/// Reads the stored completion report and project_root from the agent,
|
|
|
|
|
/// then drives the next pipeline stage based on the agent's role:
|
|
|
|
|
///
|
|
|
|
|
/// - **Coder** + gates passed → move story to `work/3_qa/`, start `qa` agent.
|
|
|
|
|
/// - **Coder** + gates failed → restart the same coder agent with failure context.
|
2026-02-23 13:40:12 +00:00
|
|
|
/// - **QA** + gates passed + coverage passed → move story to `work/4_merge/`, start `mergemaster` agent.
|
|
|
|
|
/// - **QA** + gates passed + coverage failed → restart `qa` with coverage failure context.
|
2026-02-23 13:13:41 +00:00
|
|
|
/// - **QA** + gates failed → restart `qa` with failure context.
|
|
|
|
|
/// - **Mergemaster** → run `script/test` on master; if pass: archive + cleanup worktree;
|
|
|
|
|
/// if fail: restart `mergemaster` with failure context.
|
|
|
|
|
/// - **Other** (supervisor, unknown) → no automatic advancement.
|
|
|
|
|
async fn run_pipeline_advance_for_completed_agent(&self, story_id: &str, agent_name: &str) {
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
|
2026-02-23 13:40:12 +00:00
|
|
|
let (completion, project_root, worktree_path) = {
|
2026-02-23 13:13:41 +00:00
|
|
|
let agents = match self.agents.lock() {
|
|
|
|
|
Ok(a) => a,
|
|
|
|
|
Err(e) => {
|
|
|
|
|
eprintln!("[pipeline] Failed to lock agents for '{story_id}:{agent_name}': {e}");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let agent = match agents.get(&key) {
|
|
|
|
|
Some(a) => a,
|
|
|
|
|
None => return,
|
|
|
|
|
};
|
2026-02-23 13:40:12 +00:00
|
|
|
let wt_path = agent
|
|
|
|
|
.worktree_info
|
|
|
|
|
.as_ref()
|
|
|
|
|
.map(|wt| wt.path.clone());
|
|
|
|
|
(agent.completion.clone(), agent.project_root.clone(), wt_path)
|
2026-02-23 13:13:41 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let completion = match completion {
|
|
|
|
|
Some(c) => c,
|
|
|
|
|
None => {
|
|
|
|
|
eprintln!("[pipeline] No completion report for '{story_id}:{agent_name}'");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let project_root = match project_root {
|
|
|
|
|
Some(p) => p,
|
|
|
|
|
None => {
|
|
|
|
|
eprintln!("[pipeline] No project_root for '{story_id}:{agent_name}'");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let stage = pipeline_stage(agent_name);
|
|
|
|
|
|
|
|
|
|
match stage {
|
|
|
|
|
PipelineStage::Other => {
|
|
|
|
|
// Supervisors and unknown agents do not advance the pipeline.
|
|
|
|
|
}
|
|
|
|
|
PipelineStage::Coder => {
|
|
|
|
|
if completion.gates_passed {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Coder '{agent_name}' passed gates for '{story_id}'. Moving to QA."
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = move_story_to_qa(&project_root, story_id) {
|
|
|
|
|
eprintln!("[pipeline] Failed to move '{story_id}' to 3_qa/: {e}");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if let Err(e) = self
|
|
|
|
|
.start_agent(&project_root, story_id, Some("qa"), None)
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!("[pipeline] Failed to start qa agent for '{story_id}': {e}");
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Coder '{agent_name}' failed gates for '{story_id}'. Restarting."
|
|
|
|
|
);
|
|
|
|
|
let context = format!(
|
|
|
|
|
"\n\n---\n## Previous Attempt Failed\n\
|
|
|
|
|
The acceptance gates failed with the following output:\n{}\n\n\
|
|
|
|
|
Please review the failures above, fix the issues, and try again.",
|
|
|
|
|
completion.gate_output
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = self
|
|
|
|
|
.start_agent(&project_root, story_id, Some(agent_name), Some(&context))
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Failed to restart coder '{agent_name}' for '{story_id}': {e}"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
PipelineStage::Qa => {
|
|
|
|
|
if completion.gates_passed {
|
2026-02-23 13:40:12 +00:00
|
|
|
// Run coverage gate in the QA worktree before advancing to merge.
|
|
|
|
|
let coverage_path = worktree_path.clone().unwrap_or_else(|| project_root.clone());
|
|
|
|
|
let cp = coverage_path.clone();
|
|
|
|
|
let coverage_result =
|
|
|
|
|
tokio::task::spawn_blocking(move || run_coverage_gate(&cp))
|
|
|
|
|
.await
|
|
|
|
|
.unwrap_or_else(|e| {
|
|
|
|
|
eprintln!("[pipeline] Coverage gate task panicked: {e}");
|
|
|
|
|
Ok((false, format!("Coverage gate task panicked: {e}")))
|
|
|
|
|
});
|
|
|
|
|
let (coverage_passed, coverage_output) = match coverage_result {
|
|
|
|
|
Ok(pair) => pair,
|
|
|
|
|
Err(e) => (false, e),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if coverage_passed {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] QA passed gates and coverage for '{story_id}'. Moving to merge."
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = move_story_to_merge(&project_root, story_id) {
|
|
|
|
|
eprintln!("[pipeline] Failed to move '{story_id}' to 4_merge/: {e}");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if let Err(e) = self
|
|
|
|
|
.start_agent(&project_root, story_id, Some("mergemaster"), None)
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!("[pipeline] Failed to start mergemaster for '{story_id}': {e}");
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] QA coverage gate failed for '{story_id}'. Restarting QA."
|
|
|
|
|
);
|
|
|
|
|
let context = format!(
|
|
|
|
|
"\n\n---\n## Coverage Gate Failed\n\
|
|
|
|
|
The coverage gate (script/test_coverage) failed with the following output:\n{}\n\n\
|
|
|
|
|
Please improve test coverage until the coverage gate passes.",
|
|
|
|
|
coverage_output
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = self
|
|
|
|
|
.start_agent(&project_root, story_id, Some("qa"), Some(&context))
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!("[pipeline] Failed to restart qa for '{story_id}': {e}");
|
|
|
|
|
}
|
2026-02-23 13:13:41 +00:00
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] QA failed gates for '{story_id}'. Restarting."
|
|
|
|
|
);
|
|
|
|
|
let context = format!(
|
|
|
|
|
"\n\n---\n## Previous QA Attempt Failed\n\
|
|
|
|
|
The acceptance gates failed with the following output:\n{}\n\n\
|
|
|
|
|
Please re-run and fix the issues.",
|
|
|
|
|
completion.gate_output
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = self
|
|
|
|
|
.start_agent(&project_root, story_id, Some("qa"), Some(&context))
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!("[pipeline] Failed to restart qa for '{story_id}': {e}");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
PipelineStage::Mergemaster => {
|
|
|
|
|
// Run script/test on master (project_root) as the post-merge verification.
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Mergemaster completed for '{story_id}'. Running post-merge tests on master."
|
|
|
|
|
);
|
|
|
|
|
let root = project_root.clone();
|
|
|
|
|
let test_result = tokio::task::spawn_blocking(move || run_project_tests(&root))
|
|
|
|
|
.await
|
|
|
|
|
.unwrap_or_else(|e| {
|
|
|
|
|
eprintln!("[pipeline] Post-merge test task panicked: {e}");
|
|
|
|
|
Ok((false, format!("Test task panicked: {e}")))
|
|
|
|
|
});
|
|
|
|
|
let (passed, output) = match test_result {
|
|
|
|
|
Ok(pair) => pair,
|
|
|
|
|
Err(e) => (false, e),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if passed {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Post-merge tests passed for '{story_id}'. Archiving."
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = move_story_to_archived(&project_root, story_id) {
|
|
|
|
|
eprintln!("[pipeline] Failed to archive '{story_id}': {e}");
|
|
|
|
|
}
|
|
|
|
|
let config =
|
|
|
|
|
crate::config::ProjectConfig::load(&project_root).unwrap_or_default();
|
|
|
|
|
if let Err(e) =
|
|
|
|
|
worktree::remove_worktree_by_story_id(&project_root, story_id, &config)
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Failed to remove worktree for '{story_id}': {e}"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Story '{story_id}' archived and worktree cleaned up."
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Post-merge tests failed for '{story_id}'. Restarting mergemaster."
|
|
|
|
|
);
|
|
|
|
|
let context = format!(
|
|
|
|
|
"\n\n---\n## Post-Merge Test Failed\n\
|
|
|
|
|
The tests on master failed with the following output:\n{}\n\n\
|
|
|
|
|
Please investigate and resolve the failures, then call merge_agent_work again.",
|
|
|
|
|
output
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = self
|
|
|
|
|
.start_agent(&project_root, story_id, Some("mergemaster"), Some(&context))
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
eprintln!(
|
|
|
|
|
"[pipeline] Failed to restart mergemaster for '{story_id}': {e}"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 15:02:34 +00:00
|
|
|
/// Report that an agent has finished work on a story.
|
|
|
|
|
///
|
|
|
|
|
/// - Rejects with an error if the worktree has uncommitted changes.
|
|
|
|
|
/// - Runs acceptance gates (cargo clippy + cargo nextest run / cargo test).
|
|
|
|
|
/// - Stores the `CompletionReport` on the agent record.
|
|
|
|
|
/// - Transitions status to `Completed` (gates passed) or `Failed` (gates failed).
|
|
|
|
|
/// - Emits a `Done` event so `wait_for_agent` unblocks.
|
|
|
|
|
pub async fn report_completion(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
summary: &str,
|
|
|
|
|
) -> Result<CompletionReport, String> {
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
|
|
|
|
|
// Verify agent exists, is Running, and grab its worktree path.
|
|
|
|
|
let worktree_path = {
|
|
|
|
|
let agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let agent = agents
|
|
|
|
|
.get(&key)
|
|
|
|
|
.ok_or_else(|| format!("No agent '{agent_name}' for story '{story_id}'"))?;
|
|
|
|
|
|
|
|
|
|
if agent.status != AgentStatus::Running {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Agent '{agent_name}' for story '{story_id}' is not running (status: {}). \
|
|
|
|
|
report_completion can only be called by a running agent.",
|
|
|
|
|
agent.status
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
agent
|
|
|
|
|
.worktree_info
|
|
|
|
|
.as_ref()
|
|
|
|
|
.map(|wt| wt.path.clone())
|
|
|
|
|
.ok_or_else(|| {
|
|
|
|
|
format!(
|
|
|
|
|
"Agent '{agent_name}' for story '{story_id}' has no worktree. \
|
|
|
|
|
Cannot run acceptance gates."
|
|
|
|
|
)
|
|
|
|
|
})?
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let path = worktree_path.clone();
|
|
|
|
|
|
|
|
|
|
// Run gate checks in a blocking thread to avoid stalling the async runtime.
|
|
|
|
|
let (gates_passed, gate_output) = tokio::task::spawn_blocking(move || {
|
|
|
|
|
// Step 1: Reject if worktree is dirty.
|
|
|
|
|
check_uncommitted_changes(&path)?;
|
|
|
|
|
// Step 2: Run clippy + tests and return (passed, output).
|
|
|
|
|
run_acceptance_gates(&path)
|
|
|
|
|
})
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("Gate check task panicked: {e}"))??;
|
|
|
|
|
|
|
|
|
|
let report = CompletionReport {
|
|
|
|
|
summary: summary.to_string(),
|
|
|
|
|
gates_passed,
|
|
|
|
|
gate_output,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Store the completion report and advance status.
|
|
|
|
|
let (tx, session_id) = {
|
|
|
|
|
let mut agents = self.agents.lock().map_err(|e| e.to_string())?;
|
|
|
|
|
let agent = agents.get_mut(&key).ok_or_else(|| {
|
|
|
|
|
format!("Agent '{agent_name}' for story '{story_id}' disappeared during gate check")
|
|
|
|
|
})?;
|
|
|
|
|
agent.completion = Some(report.clone());
|
|
|
|
|
agent.status = if gates_passed {
|
|
|
|
|
AgentStatus::Completed
|
|
|
|
|
} else {
|
|
|
|
|
AgentStatus::Failed
|
|
|
|
|
};
|
|
|
|
|
(agent.tx.clone(), agent.session_id.clone())
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Emit Done so wait_for_agent unblocks.
|
|
|
|
|
let _ = tx.send(AgentEvent::Done {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
session_id,
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
// Advance the pipeline state machine in a background task.
|
|
|
|
|
// Only advance when the agent completed (not failed) to avoid spurious restarts
|
|
|
|
|
// from agents that never ran acceptance gates properly.
|
|
|
|
|
let pool_clone = Self {
|
|
|
|
|
agents: Arc::clone(&self.agents),
|
|
|
|
|
port: self.port,
|
|
|
|
|
};
|
|
|
|
|
let sid = story_id.to_string();
|
|
|
|
|
let aname = agent_name.to_string();
|
|
|
|
|
tokio::spawn(async move {
|
|
|
|
|
pool_clone
|
|
|
|
|
.run_pipeline_advance_for_completed_agent(&sid, &aname)
|
|
|
|
|
.await;
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-20 15:02:34 +00:00
|
|
|
Ok(report)
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:36:35 +00:00
|
|
|
/// Run the full mergemaster pipeline for a completed story:
|
|
|
|
|
///
|
|
|
|
|
/// 1. Squash-merge the story's feature branch into the current branch (master).
|
|
|
|
|
/// 2. If conflicts are found: abort the merge and report them.
|
|
|
|
|
/// 3. If the merge succeeds: run quality gates (cargo clippy + tests + pnpm).
|
|
|
|
|
/// 4. If all gates pass: archive the story and clean up the worktree.
|
|
|
|
|
///
|
|
|
|
|
/// Returns a `MergeReport` with full details of what happened.
|
|
|
|
|
pub async fn merge_agent_work(
|
|
|
|
|
&self,
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
) -> Result<MergeReport, String> {
|
|
|
|
|
let branch = format!("feature/story-{story_id}");
|
|
|
|
|
let wt_path = worktree::worktree_path(project_root, story_id);
|
|
|
|
|
let root = project_root.to_path_buf();
|
|
|
|
|
let sid = story_id.to_string();
|
|
|
|
|
let br = branch.clone();
|
|
|
|
|
|
|
|
|
|
// Run blocking operations (git + cargo) off the async runtime.
|
|
|
|
|
let (merge_success, had_conflicts, conflict_details, merge_output) =
|
|
|
|
|
tokio::task::spawn_blocking(move || run_squash_merge(&root, &br, &sid))
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("Merge task panicked: {e}"))??;
|
|
|
|
|
|
|
|
|
|
if !merge_success {
|
|
|
|
|
return Ok(MergeReport {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
success: false,
|
|
|
|
|
had_conflicts,
|
|
|
|
|
conflict_details,
|
|
|
|
|
gates_passed: false,
|
|
|
|
|
gate_output: merge_output,
|
|
|
|
|
worktree_cleaned_up: false,
|
|
|
|
|
story_archived: false,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Merge succeeded — run quality gates in the project root.
|
|
|
|
|
let root2 = project_root.to_path_buf();
|
|
|
|
|
let (gates_passed, gate_output) =
|
|
|
|
|
tokio::task::spawn_blocking(move || run_merge_quality_gates(&root2))
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("Gate check task panicked: {e}"))??;
|
|
|
|
|
|
|
|
|
|
if !gates_passed {
|
|
|
|
|
return Ok(MergeReport {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
success: true,
|
|
|
|
|
had_conflicts: false,
|
|
|
|
|
conflict_details: None,
|
|
|
|
|
gates_passed: false,
|
|
|
|
|
gate_output,
|
|
|
|
|
worktree_cleaned_up: false,
|
|
|
|
|
story_archived: false,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Gates passed — archive the story.
|
|
|
|
|
let story_archived = move_story_to_archived(project_root, story_id).is_ok();
|
|
|
|
|
|
|
|
|
|
// Clean up the worktree if it exists.
|
|
|
|
|
let worktree_cleaned_up = if wt_path.exists() {
|
|
|
|
|
let config = crate::config::ProjectConfig::load(project_root)
|
|
|
|
|
.unwrap_or_default();
|
|
|
|
|
worktree::remove_worktree_by_story_id(project_root, story_id, &config)
|
|
|
|
|
.await
|
|
|
|
|
.is_ok()
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok(MergeReport {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
success: true,
|
|
|
|
|
had_conflicts: false,
|
|
|
|
|
conflict_details: None,
|
|
|
|
|
gates_passed: true,
|
|
|
|
|
gate_output,
|
|
|
|
|
worktree_cleaned_up,
|
|
|
|
|
story_archived,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 14:09:52 +00:00
|
|
|
/// Return the port this server is running on.
|
2026-02-20 15:31:13 +00:00
|
|
|
#[allow(dead_code)]
|
2026-02-20 14:09:52 +00:00
|
|
|
pub fn port(&self) -> u16 {
|
|
|
|
|
self.port
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Get project root helper.
|
|
|
|
|
pub fn get_project_root(
|
|
|
|
|
&self,
|
|
|
|
|
state: &crate::state::SessionState,
|
|
|
|
|
) -> Result<PathBuf, String> {
|
|
|
|
|
state.get_project_root()
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
2026-02-20 13:16:04 +00:00
|
|
|
|
|
|
|
|
/// Test helper: inject a pre-built agent entry so unit tests can exercise
|
|
|
|
|
/// wait/subscribe logic without spawning a real process.
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
pub fn inject_test_agent(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
status: AgentStatus,
|
|
|
|
|
) -> broadcast::Sender<AgentEvent> {
|
|
|
|
|
let (tx, _) = broadcast::channel::<AgentEvent>(64);
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
let mut agents = self.agents.lock().unwrap();
|
|
|
|
|
agents.insert(
|
|
|
|
|
key,
|
|
|
|
|
StoryAgent {
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
status,
|
|
|
|
|
worktree_info: None,
|
|
|
|
|
session_id: None,
|
|
|
|
|
tx: tx.clone(),
|
|
|
|
|
task_handle: None,
|
|
|
|
|
event_log: Arc::new(Mutex::new(Vec::new())),
|
2026-02-20 15:02:34 +00:00
|
|
|
completion: None,
|
2026-02-23 13:13:41 +00:00
|
|
|
project_root: None,
|
2026-02-20 15:02:34 +00:00
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
tx
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Test helper: inject an agent with a specific worktree path for testing
|
|
|
|
|
/// gate-related logic.
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
pub fn inject_test_agent_with_path(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
status: AgentStatus,
|
|
|
|
|
worktree_path: PathBuf,
|
|
|
|
|
) -> broadcast::Sender<AgentEvent> {
|
|
|
|
|
let (tx, _) = broadcast::channel::<AgentEvent>(64);
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
let mut agents = self.agents.lock().unwrap();
|
|
|
|
|
agents.insert(
|
|
|
|
|
key,
|
|
|
|
|
StoryAgent {
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
status,
|
|
|
|
|
worktree_info: Some(WorktreeInfo {
|
|
|
|
|
path: worktree_path,
|
|
|
|
|
branch: format!("feature/story-{story_id}"),
|
|
|
|
|
base_branch: "master".to_string(),
|
|
|
|
|
}),
|
|
|
|
|
session_id: None,
|
|
|
|
|
tx: tx.clone(),
|
|
|
|
|
task_handle: None,
|
|
|
|
|
event_log: Arc::new(Mutex::new(Vec::new())),
|
|
|
|
|
completion: None,
|
2026-02-23 13:13:41 +00:00
|
|
|
project_root: None,
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
tx
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Test helper: inject an agent with a completion report and project_root
|
|
|
|
|
/// for testing pipeline advance logic without spawning real agents.
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
pub fn inject_test_agent_with_completion(
|
|
|
|
|
&self,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
agent_name: &str,
|
|
|
|
|
status: AgentStatus,
|
|
|
|
|
project_root: PathBuf,
|
|
|
|
|
completion: CompletionReport,
|
|
|
|
|
) -> broadcast::Sender<AgentEvent> {
|
|
|
|
|
let (tx, _) = broadcast::channel::<AgentEvent>(64);
|
|
|
|
|
let key = composite_key(story_id, agent_name);
|
|
|
|
|
let mut agents = self.agents.lock().unwrap();
|
|
|
|
|
agents.insert(
|
|
|
|
|
key,
|
|
|
|
|
StoryAgent {
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
status,
|
|
|
|
|
worktree_info: None,
|
|
|
|
|
session_id: None,
|
|
|
|
|
tx: tx.clone(),
|
|
|
|
|
task_handle: None,
|
|
|
|
|
event_log: Arc::new(Mutex::new(Vec::new())),
|
|
|
|
|
completion: Some(completion),
|
|
|
|
|
project_root: Some(project_root),
|
2026-02-20 13:16:04 +00:00
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
tx
|
|
|
|
|
}
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:36:35 +00:00
|
|
|
/// Result of a mergemaster merge operation.
|
|
|
|
|
#[derive(Debug, Serialize, Clone)]
|
|
|
|
|
pub struct MergeReport {
|
|
|
|
|
pub story_id: String,
|
|
|
|
|
pub success: bool,
|
|
|
|
|
pub had_conflicts: bool,
|
|
|
|
|
pub conflict_details: Option<String>,
|
|
|
|
|
pub gates_passed: bool,
|
|
|
|
|
pub gate_output: String,
|
|
|
|
|
pub worktree_cleaned_up: bool,
|
|
|
|
|
pub story_archived: bool,
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Determine the work item type from its ID (new naming: `{N}_{type}_{slug}`).
|
|
|
|
|
/// Returns "bug", "spike", or "story".
|
2026-02-20 17:36:35 +00:00
|
|
|
#[allow(dead_code)]
|
2026-02-20 16:21:30 +00:00
|
|
|
fn item_type_from_id(item_id: &str) -> &'static str {
|
2026-02-20 17:16:48 +00:00
|
|
|
// New format: {digits}_{type}_{slug}
|
|
|
|
|
let after_num = item_id.trim_start_matches(|c: char| c.is_ascii_digit());
|
|
|
|
|
if after_num.starts_with("_bug_") {
|
2026-02-20 16:21:30 +00:00
|
|
|
"bug"
|
2026-02-20 17:16:48 +00:00
|
|
|
} else if after_num.starts_with("_spike_") {
|
2026-02-20 16:21:30 +00:00
|
|
|
"spike"
|
|
|
|
|
} else {
|
|
|
|
|
"story"
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Return the source directory path for a work item (always work/1_upcoming/).
|
|
|
|
|
fn item_source_dir(project_root: &Path, _item_id: &str) -> PathBuf {
|
|
|
|
|
project_root.join(".story_kit").join("work").join("1_upcoming")
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Return the archive directory path for a work item (always work/5_archived/).
|
|
|
|
|
fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf {
|
|
|
|
|
project_root.join(".story_kit").join("work").join("5_archived")
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Move a work item (story, bug, or spike) from `work/1_upcoming/` to `work/2_current/`.
|
2026-02-20 15:31:13 +00:00
|
|
|
///
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Idempotent: if the item is already in `2_current/`, returns Ok without committing.
|
|
|
|
|
/// If the item is not found in `1_upcoming/`, logs a warning and returns Ok.
|
2026-02-20 15:31:13 +00:00
|
|
|
pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(), String> {
|
2026-02-20 17:16:48 +00:00
|
|
|
let sk = project_root.join(".story_kit").join("work");
|
|
|
|
|
let current_dir = sk.join("2_current");
|
2026-02-20 16:21:30 +00:00
|
|
|
let current_path = current_dir.join(format!("{story_id}.md"));
|
2026-02-20 15:31:13 +00:00
|
|
|
|
|
|
|
|
if current_path.exists() {
|
2026-02-20 17:16:48 +00:00
|
|
|
// Already in 2_current/ — idempotent, nothing to do.
|
2026-02-20 15:31:13 +00:00
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 16:21:30 +00:00
|
|
|
let source_dir = item_source_dir(project_root, story_id);
|
|
|
|
|
let source_path = source_dir.join(format!("{story_id}.md"));
|
|
|
|
|
|
|
|
|
|
if !source_path.exists() {
|
2026-02-20 15:31:13 +00:00
|
|
|
eprintln!(
|
2026-02-20 17:16:48 +00:00
|
|
|
"[lifecycle] Work item '{story_id}' not found in {}; skipping move to 2_current/",
|
2026-02-20 16:21:30 +00:00
|
|
|
source_dir.display()
|
2026-02-20 15:31:13 +00:00
|
|
|
);
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::fs::create_dir_all(¤t_dir)
|
2026-02-20 17:16:48 +00:00
|
|
|
.map_err(|e| format!("Failed to create work/2_current/ directory: {e}"))?;
|
2026-02-20 15:31:13 +00:00
|
|
|
|
2026-02-20 16:21:30 +00:00
|
|
|
std::fs::rename(&source_path, ¤t_path)
|
2026-02-20 17:16:48 +00:00
|
|
|
.map_err(|e| format!("Failed to move '{story_id}' to 2_current/: {e}"))?;
|
2026-02-20 15:31:13 +00:00
|
|
|
|
2026-02-20 16:21:30 +00:00
|
|
|
eprintln!(
|
2026-02-20 17:16:48 +00:00
|
|
|
"[lifecycle] Moved '{story_id}' from {} to work/2_current/",
|
2026-02-20 16:21:30 +00:00
|
|
|
source_dir.display()
|
|
|
|
|
);
|
2026-02-20 15:31:13 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
Ok(())
|
2026-02-20 15:31:13 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Move a story from `work/2_current/` to `work/5_archived/` and auto-commit.
|
2026-02-20 15:09:39 +00:00
|
|
|
///
|
2026-02-20 17:16:48 +00:00
|
|
|
/// * If the story is in `2_current/`, it is moved to `5_archived/` and committed.
|
2026-02-20 17:36:35 +00:00
|
|
|
/// * If the story is in `4_merge/`, it is moved to `5_archived/` and committed.
|
2026-02-20 17:16:48 +00:00
|
|
|
/// * If the story is already in `5_archived/`, this is a no-op (idempotent).
|
2026-02-20 17:36:35 +00:00
|
|
|
/// * If the story is not found in `2_current/`, `4_merge/`, or `5_archived/`, an error is returned.
|
2026-02-20 15:09:39 +00:00
|
|
|
pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(), String> {
|
2026-02-20 17:16:48 +00:00
|
|
|
let sk = project_root.join(".story_kit").join("work");
|
|
|
|
|
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
|
2026-02-20 17:36:35 +00:00
|
|
|
let merge_path = sk.join("4_merge").join(format!("{story_id}.md"));
|
2026-02-20 17:16:48 +00:00
|
|
|
let archived_dir = sk.join("5_archived");
|
2026-02-20 16:21:30 +00:00
|
|
|
let archived_path = archived_dir.join(format!("{story_id}.md"));
|
2026-02-20 15:09:39 +00:00
|
|
|
|
|
|
|
|
if archived_path.exists() {
|
|
|
|
|
// Already archived — idempotent, nothing to do.
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:36:35 +00:00
|
|
|
// Check 2_current/ first, then 4_merge/
|
|
|
|
|
let source_path = if current_path.exists() {
|
|
|
|
|
current_path.clone()
|
|
|
|
|
} else if merge_path.exists() {
|
|
|
|
|
merge_path.clone()
|
|
|
|
|
} else {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Story '{story_id}' not found in work/2_current/ or work/4_merge/. Cannot accept story."
|
|
|
|
|
));
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
std::fs::create_dir_all(&archived_dir)
|
|
|
|
|
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?;
|
|
|
|
|
std::fs::rename(&source_path, &archived_path)
|
|
|
|
|
.map_err(|e| format!("Failed to move story '{story_id}' to 5_archived/: {e}"))?;
|
|
|
|
|
|
|
|
|
|
let from_dir = if source_path == current_path {
|
|
|
|
|
"work/2_current/"
|
|
|
|
|
} else {
|
|
|
|
|
"work/4_merge/"
|
|
|
|
|
};
|
|
|
|
|
eprintln!("[lifecycle] Moved story '{story_id}' from {from_dir} to work/5_archived/");
|
|
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
Ok(())
|
2026-02-20 17:36:35 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
/// Move a story/bug from `work/2_current/` or `work/3_qa/` to `work/4_merge/`.
|
2026-02-20 17:36:35 +00:00
|
|
|
///
|
|
|
|
|
/// This stages a work item as ready for the mergemaster to pick up and merge into master.
|
|
|
|
|
/// Idempotent: if already in `4_merge/`, returns Ok without committing.
|
|
|
|
|
pub fn move_story_to_merge(project_root: &Path, story_id: &str) -> Result<(), String> {
|
|
|
|
|
let sk = project_root.join(".story_kit").join("work");
|
|
|
|
|
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
|
2026-02-23 13:13:41 +00:00
|
|
|
let qa_path = sk.join("3_qa").join(format!("{story_id}.md"));
|
2026-02-20 17:36:35 +00:00
|
|
|
let merge_dir = sk.join("4_merge");
|
|
|
|
|
let merge_path = merge_dir.join(format!("{story_id}.md"));
|
|
|
|
|
|
|
|
|
|
if merge_path.exists() {
|
|
|
|
|
// Already in 4_merge/ — idempotent, nothing to do.
|
2026-02-20 15:09:39 +00:00
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
// Accept from 2_current/ (manual trigger) or 3_qa/ (pipeline advancement from QA stage).
|
|
|
|
|
let source_path = if current_path.exists() {
|
|
|
|
|
current_path.clone()
|
|
|
|
|
} else if qa_path.exists() {
|
|
|
|
|
qa_path.clone()
|
|
|
|
|
} else {
|
2026-02-20 17:36:35 +00:00
|
|
|
return Err(format!(
|
2026-02-23 13:13:41 +00:00
|
|
|
"Work item '{story_id}' not found in work/2_current/ or work/3_qa/. Cannot move to 4_merge/."
|
2026-02-20 17:36:35 +00:00
|
|
|
));
|
2026-02-23 13:13:41 +00:00
|
|
|
};
|
2026-02-20 17:36:35 +00:00
|
|
|
|
|
|
|
|
std::fs::create_dir_all(&merge_dir)
|
|
|
|
|
.map_err(|e| format!("Failed to create work/4_merge/ directory: {e}"))?;
|
2026-02-23 13:13:41 +00:00
|
|
|
std::fs::rename(&source_path, &merge_path)
|
2026-02-20 17:36:35 +00:00
|
|
|
.map_err(|e| format!("Failed to move '{story_id}' to 4_merge/: {e}"))?;
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
let from_dir = if source_path == current_path {
|
|
|
|
|
"work/2_current/"
|
|
|
|
|
} else {
|
|
|
|
|
"work/3_qa/"
|
|
|
|
|
};
|
|
|
|
|
eprintln!("[lifecycle] Moved '{story_id}' from {from_dir} to work/4_merge/");
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
Ok(())
|
2026-02-20 15:09:39 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:44:06 +00:00
|
|
|
/// Move a story/bug from `work/2_current/` to `work/3_qa/` and auto-commit.
|
|
|
|
|
///
|
|
|
|
|
/// This stages a work item for QA review before merging to master.
|
|
|
|
|
/// Idempotent: if already in `3_qa/`, returns Ok without committing.
|
|
|
|
|
pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), String> {
|
|
|
|
|
let sk = project_root.join(".story_kit").join("work");
|
|
|
|
|
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
|
|
|
|
|
let qa_dir = sk.join("3_qa");
|
|
|
|
|
let qa_path = qa_dir.join(format!("{story_id}.md"));
|
|
|
|
|
|
|
|
|
|
if qa_path.exists() {
|
|
|
|
|
// Already in 3_qa/ — idempotent, nothing to do.
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !current_path.exists() {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Work item '{story_id}' not found in work/2_current/. Cannot move to 3_qa/."
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::fs::create_dir_all(&qa_dir)
|
|
|
|
|
.map_err(|e| format!("Failed to create work/3_qa/ directory: {e}"))?;
|
|
|
|
|
std::fs::rename(¤t_path, &qa_path)
|
|
|
|
|
.map_err(|e| format!("Failed to move '{story_id}' to 3_qa/: {e}"))?;
|
|
|
|
|
|
|
|
|
|
eprintln!("[lifecycle] Moved '{story_id}' from work/2_current/ to work/3_qa/");
|
|
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
Ok(())
|
2026-02-20 17:44:06 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:16:48 +00:00
|
|
|
/// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_archived/` and auto-commit.
|
2026-02-20 16:21:30 +00:00
|
|
|
///
|
2026-02-20 17:16:48 +00:00
|
|
|
/// * If the bug is in `2_current/`, it is moved to `5_archived/` and committed.
|
|
|
|
|
/// * If the bug is still in `1_upcoming/` (never started), it is moved directly to `5_archived/`.
|
|
|
|
|
/// * If the bug is already in `5_archived/`, this is a no-op (idempotent).
|
2026-02-20 16:21:30 +00:00
|
|
|
/// * If the bug is not found anywhere, an error is returned.
|
|
|
|
|
pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> {
|
2026-02-20 17:16:48 +00:00
|
|
|
let sk = project_root.join(".story_kit").join("work");
|
|
|
|
|
let current_path = sk.join("2_current").join(format!("{bug_id}.md"));
|
|
|
|
|
let upcoming_path = sk.join("1_upcoming").join(format!("{bug_id}.md"));
|
2026-02-20 16:34:32 +00:00
|
|
|
let archive_dir = item_archive_dir(project_root, bug_id);
|
2026-02-20 16:21:30 +00:00
|
|
|
let archive_path = archive_dir.join(format!("{bug_id}.md"));
|
|
|
|
|
|
|
|
|
|
if archive_path.exists() {
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let source_path = if current_path.exists() {
|
|
|
|
|
current_path.clone()
|
2026-02-20 17:16:48 +00:00
|
|
|
} else if upcoming_path.exists() {
|
|
|
|
|
upcoming_path.clone()
|
2026-02-20 16:21:30 +00:00
|
|
|
} else {
|
|
|
|
|
return Err(format!(
|
2026-02-20 17:16:48 +00:00
|
|
|
"Bug '{bug_id}' not found in work/2_current/ or work/1_upcoming/. Cannot close bug."
|
2026-02-20 16:21:30 +00:00
|
|
|
));
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
std::fs::create_dir_all(&archive_dir)
|
2026-02-20 17:16:48 +00:00
|
|
|
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?;
|
2026-02-20 16:21:30 +00:00
|
|
|
std::fs::rename(&source_path, &archive_path)
|
2026-02-20 17:16:48 +00:00
|
|
|
.map_err(|e| format!("Failed to move bug '{bug_id}' to 5_archived/: {e}"))?;
|
2026-02-20 16:21:30 +00:00
|
|
|
|
|
|
|
|
eprintln!(
|
2026-02-20 17:16:48 +00:00
|
|
|
"[lifecycle] Closed bug '{bug_id}' → work/5_archived/"
|
2026-02-20 16:21:30 +00:00
|
|
|
);
|
|
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
Ok(())
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 15:02:34 +00:00
|
|
|
// ── Acceptance-gate helpers ───────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
/// Check whether the given directory has any uncommitted git changes.
|
|
|
|
|
/// Returns `Err` with a descriptive message if there are any.
|
|
|
|
|
fn check_uncommitted_changes(path: &Path) -> Result<(), String> {
|
|
|
|
|
let output = Command::new("git")
|
|
|
|
|
.args(["status", "--porcelain"])
|
|
|
|
|
.current_dir(path)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run git status: {e}"))?;
|
|
|
|
|
|
|
|
|
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
|
|
|
|
if !stdout.trim().is_empty() {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Worktree has uncommitted changes. Commit your work before calling \
|
|
|
|
|
report_completion:\n{stdout}"
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
/// Run the project's test suite.
|
|
|
|
|
///
|
|
|
|
|
/// Uses `script/test` if present, treating it as the canonical single test entry point.
|
|
|
|
|
/// Falls back to `cargo nextest run` / `cargo test` when `script/test` is absent.
|
|
|
|
|
/// Returns `(tests_passed, output)`.
|
|
|
|
|
fn run_project_tests(path: &Path) -> Result<(bool, String), String> {
|
|
|
|
|
let script_test = path.join("script").join("test");
|
|
|
|
|
if script_test.exists() {
|
|
|
|
|
let mut output = String::from("=== script/test ===\n");
|
|
|
|
|
let result = Command::new(&script_test)
|
|
|
|
|
.current_dir(path)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run script/test: {e}"))?;
|
|
|
|
|
let out = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&result.stdout),
|
|
|
|
|
String::from_utf8_lossy(&result.stderr)
|
|
|
|
|
);
|
|
|
|
|
output.push_str(&out);
|
|
|
|
|
output.push('\n');
|
|
|
|
|
return Ok((result.status.success(), output));
|
2026-02-20 15:02:34 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
// Fallback: cargo nextest run / cargo test
|
|
|
|
|
let mut output = String::from("=== tests ===\n");
|
|
|
|
|
let (success, test_out) = match Command::new("cargo")
|
2026-02-20 15:02:34 +00:00
|
|
|
.args(["nextest", "run"])
|
|
|
|
|
.current_dir(path)
|
|
|
|
|
.output()
|
|
|
|
|
{
|
|
|
|
|
Ok(o) => {
|
|
|
|
|
let combined = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&o.stdout),
|
|
|
|
|
String::from_utf8_lossy(&o.stderr)
|
|
|
|
|
);
|
|
|
|
|
(o.status.success(), combined)
|
|
|
|
|
}
|
|
|
|
|
Err(_) => {
|
|
|
|
|
// nextest not available — fall back to cargo test
|
|
|
|
|
let o = Command::new("cargo")
|
|
|
|
|
.args(["test"])
|
|
|
|
|
.current_dir(path)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run cargo test: {e}"))?;
|
|
|
|
|
let combined = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&o.stdout),
|
|
|
|
|
String::from_utf8_lossy(&o.stderr)
|
|
|
|
|
);
|
|
|
|
|
(o.status.success(), combined)
|
|
|
|
|
}
|
|
|
|
|
};
|
2026-02-23 12:59:55 +00:00
|
|
|
output.push_str(&test_out);
|
|
|
|
|
output.push('\n');
|
|
|
|
|
Ok((success, output))
|
|
|
|
|
}
|
2026-02-20 15:02:34 +00:00
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
/// Run `cargo clippy` and the project test suite (via `script/test` if present,
|
|
|
|
|
/// otherwise `cargo nextest run` / `cargo test`) in the given directory.
|
|
|
|
|
/// Returns `(gates_passed, combined_output)`.
|
|
|
|
|
fn run_acceptance_gates(path: &Path) -> Result<(bool, String), String> {
|
|
|
|
|
let mut all_output = String::new();
|
|
|
|
|
let mut all_passed = true;
|
|
|
|
|
|
|
|
|
|
// ── cargo clippy ──────────────────────────────────────────────
|
|
|
|
|
let clippy = Command::new("cargo")
|
|
|
|
|
.args(["clippy", "--all-targets", "--all-features"])
|
|
|
|
|
.current_dir(path)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run cargo clippy: {e}"))?;
|
|
|
|
|
|
|
|
|
|
all_output.push_str("=== cargo clippy ===\n");
|
|
|
|
|
let clippy_stdout = String::from_utf8_lossy(&clippy.stdout);
|
|
|
|
|
let clippy_stderr = String::from_utf8_lossy(&clippy.stderr);
|
|
|
|
|
if !clippy_stdout.is_empty() {
|
|
|
|
|
all_output.push_str(&clippy_stdout);
|
|
|
|
|
}
|
|
|
|
|
if !clippy_stderr.is_empty() {
|
|
|
|
|
all_output.push_str(&clippy_stderr);
|
|
|
|
|
}
|
2026-02-20 15:02:34 +00:00
|
|
|
all_output.push('\n');
|
|
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
if !clippy.status.success() {
|
|
|
|
|
all_passed = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── tests (script/test if available, else cargo nextest/test) ─
|
|
|
|
|
let (test_success, test_out) = run_project_tests(path)?;
|
|
|
|
|
all_output.push_str(&test_out);
|
2026-02-20 15:02:34 +00:00
|
|
|
if !test_success {
|
|
|
|
|
all_passed = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok((all_passed, all_output))
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:40:12 +00:00
|
|
|
/// Run `script/test_coverage` in the given directory if the script exists.
|
|
|
|
|
///
|
|
|
|
|
/// Used as a QA gate before advancing a story from `3_qa/` to `4_merge/`.
|
|
|
|
|
/// Returns `(passed, output)`. If the script does not exist, returns `(true, …)`.
|
|
|
|
|
fn run_coverage_gate(path: &Path) -> Result<(bool, String), String> {
|
|
|
|
|
let script = path.join("script").join("test_coverage");
|
|
|
|
|
if !script.exists() {
|
|
|
|
|
return Ok((
|
|
|
|
|
true,
|
|
|
|
|
"script/test_coverage not found; coverage gate skipped.\n".to_string(),
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let mut output = String::from("=== script/test_coverage ===\n");
|
|
|
|
|
let result = Command::new(&script)
|
|
|
|
|
.current_dir(path)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run script/test_coverage: {e}"))?;
|
|
|
|
|
|
|
|
|
|
let combined = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&result.stdout),
|
|
|
|
|
String::from_utf8_lossy(&result.stderr)
|
|
|
|
|
);
|
|
|
|
|
output.push_str(&combined);
|
|
|
|
|
output.push('\n');
|
|
|
|
|
|
|
|
|
|
Ok((result.status.success(), output))
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:36:35 +00:00
|
|
|
// ── Mergemaster helpers ───────────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
/// Squash-merge a feature branch into the current branch in the project root.
|
|
|
|
|
///
|
|
|
|
|
/// Returns `(success, had_conflicts, conflict_details, output)`.
|
|
|
|
|
fn run_squash_merge(
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
branch: &str,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
) -> Result<(bool, bool, Option<String>, String), String> {
|
|
|
|
|
let mut all_output = String::new();
|
|
|
|
|
|
|
|
|
|
// ── git merge --squash ────────────────────────────────────────
|
|
|
|
|
all_output.push_str(&format!("=== git merge --squash {branch} ===\n"));
|
|
|
|
|
let merge = Command::new("git")
|
|
|
|
|
.args(["merge", "--squash", branch])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run git merge: {e}"))?;
|
|
|
|
|
|
|
|
|
|
let merge_stdout = String::from_utf8_lossy(&merge.stdout).to_string();
|
|
|
|
|
let merge_stderr = String::from_utf8_lossy(&merge.stderr).to_string();
|
|
|
|
|
all_output.push_str(&merge_stdout);
|
|
|
|
|
all_output.push_str(&merge_stderr);
|
|
|
|
|
all_output.push('\n');
|
|
|
|
|
|
|
|
|
|
if !merge.status.success() {
|
|
|
|
|
// Conflicts detected — abort the merge and report.
|
|
|
|
|
let conflict_details = format!(
|
|
|
|
|
"Merge conflicts in branch '{branch}':\n{merge_stdout}{merge_stderr}"
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Abort the merge to restore clean state.
|
|
|
|
|
let _ = Command::new("git")
|
|
|
|
|
.args(["merge", "--abort"])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output();
|
|
|
|
|
|
|
|
|
|
all_output.push_str("=== Merge aborted due to conflicts ===\n");
|
|
|
|
|
return Ok((false, true, Some(conflict_details), all_output));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── git commit ─────────────────────────────────────────────
|
|
|
|
|
all_output.push_str("=== git commit ===\n");
|
|
|
|
|
let commit_msg = format!("story-kit: merge {story_id}");
|
|
|
|
|
let commit = Command::new("git")
|
|
|
|
|
.args(["commit", "-m", &commit_msg])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run git commit: {e}"))?;
|
|
|
|
|
|
|
|
|
|
let commit_stdout = String::from_utf8_lossy(&commit.stdout).to_string();
|
|
|
|
|
let commit_stderr = String::from_utf8_lossy(&commit.stderr).to_string();
|
|
|
|
|
all_output.push_str(&commit_stdout);
|
|
|
|
|
all_output.push_str(&commit_stderr);
|
|
|
|
|
all_output.push('\n');
|
|
|
|
|
|
|
|
|
|
if !commit.status.success() {
|
|
|
|
|
// Nothing to commit (e.g. empty diff) — treat as success.
|
|
|
|
|
if commit_stderr.contains("nothing to commit")
|
|
|
|
|
|| commit_stdout.contains("nothing to commit")
|
|
|
|
|
{
|
|
|
|
|
return Ok((true, false, None, all_output));
|
|
|
|
|
}
|
|
|
|
|
return Ok((false, false, None, all_output));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok((true, false, None, all_output))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Run quality gates in the project root after a successful merge.
|
|
|
|
|
///
|
|
|
|
|
/// Runs: cargo clippy, cargo nextest run / cargo test, and pnpm gates if frontend/ exists.
|
|
|
|
|
/// Returns `(gates_passed, combined_output)`.
|
|
|
|
|
fn run_merge_quality_gates(project_root: &Path) -> Result<(bool, String), String> {
|
|
|
|
|
let mut all_output = String::new();
|
|
|
|
|
let mut all_passed = true;
|
|
|
|
|
|
|
|
|
|
// ── cargo clippy ──────────────────────────────────────────────
|
|
|
|
|
let clippy = Command::new("cargo")
|
|
|
|
|
.args(["clippy", "--all-targets", "--all-features"])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run cargo clippy: {e}"))?;
|
|
|
|
|
|
|
|
|
|
all_output.push_str("=== cargo clippy ===\n");
|
|
|
|
|
let clippy_out = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&clippy.stdout),
|
|
|
|
|
String::from_utf8_lossy(&clippy.stderr)
|
|
|
|
|
);
|
|
|
|
|
all_output.push_str(&clippy_out);
|
|
|
|
|
all_output.push('\n');
|
|
|
|
|
|
|
|
|
|
if !clippy.status.success() {
|
|
|
|
|
all_passed = false;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
// ── tests (script/test if available, else cargo nextest/test) ─
|
|
|
|
|
let (test_success, test_out) = run_project_tests(project_root)?;
|
2026-02-20 17:36:35 +00:00
|
|
|
all_output.push_str(&test_out);
|
|
|
|
|
if !test_success {
|
|
|
|
|
all_passed = false;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
// ── pnpm build (if frontend/ directory exists) ────────────────
|
|
|
|
|
// pnpm test is handled by script/test when present; only run it here as
|
|
|
|
|
// a standalone fallback when there is no script/test.
|
2026-02-20 17:36:35 +00:00
|
|
|
let frontend_dir = project_root.join("frontend");
|
|
|
|
|
if frontend_dir.exists() {
|
|
|
|
|
all_output.push_str("=== pnpm build ===\n");
|
|
|
|
|
let pnpm_build = Command::new("pnpm")
|
|
|
|
|
.args(["run", "build"])
|
|
|
|
|
.current_dir(&frontend_dir)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run pnpm build: {e}"))?;
|
|
|
|
|
|
|
|
|
|
let build_out = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&pnpm_build.stdout),
|
|
|
|
|
String::from_utf8_lossy(&pnpm_build.stderr)
|
|
|
|
|
);
|
|
|
|
|
all_output.push_str(&build_out);
|
|
|
|
|
all_output.push('\n');
|
|
|
|
|
|
|
|
|
|
if !pnpm_build.status.success() {
|
|
|
|
|
all_passed = false;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
// Only run pnpm test separately when script/test is absent (it would
|
|
|
|
|
// already cover frontend tests in that case).
|
|
|
|
|
let script_test = project_root.join("script").join("test");
|
|
|
|
|
if !script_test.exists() {
|
|
|
|
|
all_output.push_str("=== pnpm test ===\n");
|
|
|
|
|
let pnpm_test = Command::new("pnpm")
|
|
|
|
|
.args(["test", "--run"])
|
|
|
|
|
.current_dir(&frontend_dir)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Failed to run pnpm test: {e}"))?;
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
let pnpm_test_out = format!(
|
|
|
|
|
"{}{}",
|
|
|
|
|
String::from_utf8_lossy(&pnpm_test.stdout),
|
|
|
|
|
String::from_utf8_lossy(&pnpm_test.stderr)
|
|
|
|
|
);
|
|
|
|
|
all_output.push_str(&pnpm_test_out);
|
|
|
|
|
all_output.push('\n');
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-23 12:59:55 +00:00
|
|
|
if !pnpm_test.status.success() {
|
|
|
|
|
all_passed = false;
|
|
|
|
|
}
|
2026-02-20 17:36:35 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok((all_passed, all_output))
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Spawn claude agent in a PTY and stream events through the broadcast channel.
|
2026-02-20 11:57:25 +00:00
|
|
|
#[allow(clippy::too_many_arguments)]
|
2026-02-19 17:58:53 +00:00
|
|
|
async fn run_agent_pty_streaming(
|
|
|
|
|
story_id: &str,
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: &str,
|
2026-02-19 17:58:53 +00:00
|
|
|
command: &str,
|
|
|
|
|
args: &[String],
|
|
|
|
|
prompt: &str,
|
2026-02-19 15:25:22 +00:00
|
|
|
cwd: &str,
|
2026-02-19 17:58:53 +00:00
|
|
|
tx: &broadcast::Sender<AgentEvent>,
|
2026-02-20 11:57:25 +00:00
|
|
|
event_log: &Arc<Mutex<Vec<AgentEvent>>>,
|
2026-02-19 17:58:53 +00:00
|
|
|
) -> Result<Option<String>, String> {
|
|
|
|
|
let sid = story_id.to_string();
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
let aname = agent_name.to_string();
|
2026-02-19 17:58:53 +00:00
|
|
|
let cmd = command.to_string();
|
|
|
|
|
let args = args.to_vec();
|
|
|
|
|
let prompt = prompt.to_string();
|
|
|
|
|
let cwd = cwd.to_string();
|
|
|
|
|
let tx = tx.clone();
|
2026-02-20 11:57:25 +00:00
|
|
|
let event_log = event_log.clone();
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
tokio::task::spawn_blocking(move || {
|
2026-02-20 11:57:25 +00:00
|
|
|
run_agent_pty_blocking(&sid, &aname, &cmd, &args, &prompt, &cwd, &tx, &event_log)
|
2026-02-19 17:58:53 +00:00
|
|
|
})
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("Agent task panicked: {e}"))?
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
/// Helper to send an event to both broadcast and event log.
|
|
|
|
|
fn emit_event(
|
|
|
|
|
event: AgentEvent,
|
|
|
|
|
tx: &broadcast::Sender<AgentEvent>,
|
|
|
|
|
event_log: &Mutex<Vec<AgentEvent>>,
|
|
|
|
|
) {
|
|
|
|
|
if let Ok(mut log) = event_log.lock() {
|
|
|
|
|
log.push(event.clone());
|
|
|
|
|
}
|
|
|
|
|
let _ = tx.send(event);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
2026-02-19 17:58:53 +00:00
|
|
|
fn run_agent_pty_blocking(
|
|
|
|
|
story_id: &str,
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
agent_name: &str,
|
2026-02-19 17:58:53 +00:00
|
|
|
command: &str,
|
|
|
|
|
args: &[String],
|
|
|
|
|
prompt: &str,
|
|
|
|
|
cwd: &str,
|
|
|
|
|
tx: &broadcast::Sender<AgentEvent>,
|
2026-02-20 11:57:25 +00:00
|
|
|
event_log: &Mutex<Vec<AgentEvent>>,
|
2026-02-19 17:58:53 +00:00
|
|
|
) -> Result<Option<String>, String> {
|
2026-02-19 15:25:22 +00:00
|
|
|
let pty_system = native_pty_system();
|
|
|
|
|
|
|
|
|
|
let pair = pty_system
|
|
|
|
|
.openpty(PtySize {
|
|
|
|
|
rows: 50,
|
|
|
|
|
cols: 200,
|
|
|
|
|
pixel_width: 0,
|
|
|
|
|
pixel_height: 0,
|
|
|
|
|
})
|
|
|
|
|
.map_err(|e| format!("Failed to open PTY: {e}"))?;
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
let mut cmd = CommandBuilder::new(command);
|
|
|
|
|
|
|
|
|
|
// -p <prompt> must come first
|
2026-02-19 15:25:22 +00:00
|
|
|
cmd.arg("-p");
|
2026-02-19 17:58:53 +00:00
|
|
|
cmd.arg(prompt);
|
|
|
|
|
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
// Add configured args (e.g., --directory /path/to/worktree, --model, etc.)
|
2026-02-19 17:58:53 +00:00
|
|
|
for arg in args {
|
|
|
|
|
cmd.arg(arg);
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 15:25:22 +00:00
|
|
|
cmd.arg("--output-format");
|
|
|
|
|
cmd.arg("stream-json");
|
|
|
|
|
cmd.arg("--verbose");
|
|
|
|
|
|
2026-02-19 15:56:05 +00:00
|
|
|
// Supervised agents don't need interactive permission prompts
|
|
|
|
|
cmd.arg("--permission-mode");
|
|
|
|
|
cmd.arg("bypassPermissions");
|
|
|
|
|
|
2026-02-19 15:25:22 +00:00
|
|
|
cmd.cwd(cwd);
|
|
|
|
|
cmd.env("NO_COLOR", "1");
|
|
|
|
|
|
2026-02-20 11:57:25 +00:00
|
|
|
// Allow spawning Claude Code from within a Claude Code session
|
|
|
|
|
cmd.env_remove("CLAUDECODE");
|
|
|
|
|
cmd.env_remove("CLAUDE_CODE_ENTRYPOINT");
|
|
|
|
|
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
eprintln!("[agent:{story_id}:{agent_name}] Spawning {command} in {cwd} with args: {args:?}");
|
2026-02-19 15:25:22 +00:00
|
|
|
|
|
|
|
|
let mut child = pair
|
|
|
|
|
.slave
|
|
|
|
|
.spawn_command(cmd)
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
.map_err(|e| format!("Failed to spawn agent for {story_id}:{agent_name}: {e}"))?;
|
2026-02-19 15:25:22 +00:00
|
|
|
|
|
|
|
|
drop(pair.slave);
|
|
|
|
|
|
|
|
|
|
let reader = pair
|
|
|
|
|
.master
|
|
|
|
|
.try_clone_reader()
|
|
|
|
|
.map_err(|e| format!("Failed to clone PTY reader: {e}"))?;
|
|
|
|
|
|
|
|
|
|
drop(pair.master);
|
|
|
|
|
|
|
|
|
|
let buf_reader = BufReader::new(reader);
|
2026-02-19 17:58:53 +00:00
|
|
|
let mut session_id: Option<String> = None;
|
2026-02-19 15:25:22 +00:00
|
|
|
|
|
|
|
|
for line in buf_reader.lines() {
|
|
|
|
|
let line = match line {
|
|
|
|
|
Ok(l) => l,
|
|
|
|
|
Err(_) => break,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let trimmed = line.trim();
|
|
|
|
|
if trimmed.is_empty() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
// Try to parse as JSON
|
2026-02-19 15:25:22 +00:00
|
|
|
let json: serde_json::Value = match serde_json::from_str(trimmed) {
|
|
|
|
|
Ok(j) => j,
|
2026-02-19 17:58:53 +00:00
|
|
|
Err(_) => {
|
|
|
|
|
// Non-JSON output (terminal escapes etc.) — send as raw output
|
2026-02-20 11:57:25 +00:00
|
|
|
emit_event(
|
|
|
|
|
AgentEvent::Output {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
text: trimmed.to_string(),
|
|
|
|
|
},
|
|
|
|
|
tx,
|
|
|
|
|
event_log,
|
|
|
|
|
);
|
2026-02-19 17:58:53 +00:00
|
|
|
continue;
|
|
|
|
|
}
|
2026-02-19 15:25:22 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let event_type = json.get("type").and_then(|t| t.as_str()).unwrap_or("");
|
|
|
|
|
|
|
|
|
|
match event_type {
|
|
|
|
|
"system" => {
|
2026-02-19 17:58:53 +00:00
|
|
|
session_id = json
|
2026-02-19 15:25:22 +00:00
|
|
|
.get("session_id")
|
|
|
|
|
.and_then(|s| s.as_str())
|
|
|
|
|
.map(|s| s.to_string());
|
|
|
|
|
}
|
|
|
|
|
"assistant" => {
|
2026-02-19 17:58:53 +00:00
|
|
|
if let Some(message) = json.get("message")
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
&& let Some(content) = message.get("content").and_then(|c| c.as_array())
|
|
|
|
|
{
|
|
|
|
|
for block in content {
|
|
|
|
|
if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
|
2026-02-20 11:57:25 +00:00
|
|
|
emit_event(
|
|
|
|
|
AgentEvent::Output {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
text: text.to_string(),
|
|
|
|
|
},
|
|
|
|
|
tx,
|
|
|
|
|
event_log,
|
|
|
|
|
);
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
}
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
}
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
// Forward all JSON events
|
2026-02-20 11:57:25 +00:00
|
|
|
emit_event(
|
|
|
|
|
AgentEvent::AgentJson {
|
|
|
|
|
story_id: story_id.to_string(),
|
|
|
|
|
agent_name: agent_name.to_string(),
|
|
|
|
|
data: json,
|
|
|
|
|
},
|
|
|
|
|
tx,
|
|
|
|
|
event_log,
|
|
|
|
|
);
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let _ = child.kill();
|
2026-02-20 11:57:25 +00:00
|
|
|
let _ = child.wait();
|
2026-02-19 15:25:22 +00:00
|
|
|
|
|
|
|
|
eprintln!(
|
Accept story 34: Per-Project Agent Configuration and Role Definitions
Replace single [agent] config with multi-agent [[agent]] roster system.
Each agent has name, role, model, allowed_tools, max_turns, max_budget_usd,
and system_prompt fields that map to Claude CLI flags at spawn time.
- AgentConfig expanded with structured fields, validated at startup (panics
on duplicate names, empty names, non-positive budgets/turns)
- Backwards-compatible: legacy [agent] format auto-wraps with deprecation warning
- AgentPool uses composite "story_id:agent_name" keys for concurrent agents
- agent_name added to AgentEvent variants, AgentInfo, start/stop/subscribe APIs
- GET /agents/config returns roster, POST /agents/config/reload hot-reloads
- POST /agents/start accepts optional agent_name, /agents/stop requires it
- SSE route updated to /agents/:story_id/:agent_name/stream
- Frontend: roster badges, agent selector dropdown, composite-key state
- Project root initialized to cwd at startup so config endpoints work immediately
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 18:46:14 +00:00
|
|
|
"[agent:{story_id}:{agent_name}] Done. Session: {:?}",
|
2026-02-19 17:58:53 +00:00
|
|
|
session_id
|
2026-02-19 15:25:22 +00:00
|
|
|
);
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
Ok(session_id)
|
2026-02-19 15:25:22 +00:00
|
|
|
}
|
2026-02-20 13:16:04 +00:00
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn wait_for_agent_returns_immediately_if_completed() {
|
2026-02-20 13:24:35 +00:00
|
|
|
let pool = AgentPool::new(3001);
|
2026-02-20 13:16:04 +00:00
|
|
|
pool.inject_test_agent("s1", "bot", AgentStatus::Completed);
|
|
|
|
|
|
|
|
|
|
let info = pool.wait_for_agent("s1", "bot", 1000).await.unwrap();
|
|
|
|
|
assert_eq!(info.status, AgentStatus::Completed);
|
|
|
|
|
assert_eq!(info.story_id, "s1");
|
|
|
|
|
assert_eq!(info.agent_name, "bot");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn wait_for_agent_returns_immediately_if_failed() {
|
2026-02-20 13:24:35 +00:00
|
|
|
let pool = AgentPool::new(3001);
|
2026-02-20 13:16:04 +00:00
|
|
|
pool.inject_test_agent("s2", "bot", AgentStatus::Failed);
|
|
|
|
|
|
|
|
|
|
let info = pool.wait_for_agent("s2", "bot", 1000).await.unwrap();
|
|
|
|
|
assert_eq!(info.status, AgentStatus::Failed);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn wait_for_agent_completes_on_done_event() {
|
2026-02-20 13:24:35 +00:00
|
|
|
let pool = AgentPool::new(3001);
|
2026-02-20 13:16:04 +00:00
|
|
|
let tx = pool.inject_test_agent("s3", "bot", AgentStatus::Running);
|
|
|
|
|
|
|
|
|
|
// Send Done event after a short delay
|
|
|
|
|
let tx_clone = tx.clone();
|
|
|
|
|
tokio::spawn(async move {
|
|
|
|
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
|
|
|
|
// Mark status via event; real code also updates the map, but for
|
|
|
|
|
// this unit test the map entry stays Running — we verify the
|
|
|
|
|
// wait loop reacts to the event.
|
|
|
|
|
let _ = tx_clone.send(AgentEvent::Done {
|
|
|
|
|
story_id: "s3".to_string(),
|
|
|
|
|
agent_name: "bot".to_string(),
|
|
|
|
|
session_id: Some("sess-abc".to_string()),
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let info = pool.wait_for_agent("s3", "bot", 2000).await.unwrap();
|
|
|
|
|
// Status comes from the map entry (Running in this unit test)
|
|
|
|
|
// — the important thing is that wait_for_agent returned without timing out.
|
|
|
|
|
assert_eq!(info.story_id, "s3");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn wait_for_agent_times_out() {
|
2026-02-20 13:24:35 +00:00
|
|
|
let pool = AgentPool::new(3001);
|
2026-02-20 13:16:04 +00:00
|
|
|
pool.inject_test_agent("s4", "bot", AgentStatus::Running);
|
|
|
|
|
|
|
|
|
|
let result = pool.wait_for_agent("s4", "bot", 50).await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
let msg = result.unwrap_err();
|
|
|
|
|
assert!(msg.contains("Timed out"), "unexpected message: {msg}");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn wait_for_agent_errors_for_nonexistent() {
|
2026-02-20 13:24:35 +00:00
|
|
|
let pool = AgentPool::new(3001);
|
2026-02-20 13:16:04 +00:00
|
|
|
let result = pool.wait_for_agent("no_story", "no_bot", 100).await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn wait_for_agent_completes_on_stopped_status_event() {
|
2026-02-20 13:24:35 +00:00
|
|
|
let pool = AgentPool::new(3001);
|
2026-02-20 13:16:04 +00:00
|
|
|
let tx = pool.inject_test_agent("s5", "bot", AgentStatus::Running);
|
|
|
|
|
|
|
|
|
|
let tx_clone = tx.clone();
|
|
|
|
|
tokio::spawn(async move {
|
|
|
|
|
tokio::time::sleep(std::time::Duration::from_millis(30)).await;
|
|
|
|
|
let _ = tx_clone.send(AgentEvent::Status {
|
|
|
|
|
story_id: "s5".to_string(),
|
|
|
|
|
agent_name: "bot".to_string(),
|
|
|
|
|
status: "stopped".to_string(),
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let info = pool.wait_for_agent("s5", "bot", 2000).await.unwrap();
|
|
|
|
|
assert_eq!(info.story_id, "s5");
|
|
|
|
|
}
|
2026-02-20 15:02:34 +00:00
|
|
|
|
|
|
|
|
// ── report_completion tests ────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn report_completion_rejects_nonexistent_agent() {
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
let result = pool
|
|
|
|
|
.report_completion("no_story", "no_bot", "done")
|
|
|
|
|
.await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
let msg = result.unwrap_err();
|
|
|
|
|
assert!(msg.contains("No agent"), "unexpected: {msg}");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn report_completion_rejects_non_running_agent() {
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
pool.inject_test_agent("s6", "bot", AgentStatus::Completed);
|
|
|
|
|
|
|
|
|
|
let result = pool.report_completion("s6", "bot", "done").await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
let msg = result.unwrap_err();
|
|
|
|
|
assert!(
|
|
|
|
|
msg.contains("not running"),
|
|
|
|
|
"expected 'not running' in: {msg}"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn report_completion_rejects_dirty_worktree() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let repo = tmp.path();
|
|
|
|
|
|
|
|
|
|
// Init a real git repo and make an initial commit
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["init"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
// Write an uncommitted file
|
|
|
|
|
fs::write(repo.join("dirty.txt"), "not committed").unwrap();
|
|
|
|
|
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
pool.inject_test_agent_with_path("s7", "bot", AgentStatus::Running, repo.to_path_buf());
|
|
|
|
|
|
|
|
|
|
let result = pool.report_completion("s7", "bot", "done").await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
let msg = result.unwrap_err();
|
|
|
|
|
assert!(
|
|
|
|
|
msg.contains("uncommitted"),
|
|
|
|
|
"expected 'uncommitted' in: {msg}"
|
|
|
|
|
);
|
|
|
|
|
}
|
2026-02-20 15:31:13 +00:00
|
|
|
|
|
|
|
|
// ── move_story_to_current tests ────────────────────────────────────────────
|
2026-02-20 19:39:19 +00:00
|
|
|
// No git repo needed: the watcher handles commits asynchronously.
|
2026-02-20 15:31:13 +00:00
|
|
|
|
|
|
|
|
fn init_git_repo(repo: &std::path::Path) {
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["init"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["config", "user.email", "test@test.com"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["config", "user.name", "Test"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-02-20 19:39:19 +00:00
|
|
|
fn move_story_to_current_moves_file() {
|
2026-02-20 15:31:13 +00:00
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let upcoming = root.join(".story_kit/work/1_upcoming");
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
2026-02-20 15:31:13 +00:00
|
|
|
fs::create_dir_all(&upcoming).unwrap();
|
2026-02-20 19:39:19 +00:00
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(upcoming.join("10_story_foo.md"), "test").unwrap();
|
2026-02-20 15:31:13 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
move_story_to_current(root, "10_story_foo").unwrap();
|
2026-02-20 15:31:13 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!upcoming.join("10_story_foo.md").exists());
|
|
|
|
|
assert!(current.join("10_story_foo.md").exists());
|
2026-02-20 15:31:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_current_is_idempotent_when_already_current() {
|
|
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("11_story_foo.md"), "test").unwrap();
|
2026-02-20 15:31:13 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
move_story_to_current(root, "11_story_foo").unwrap();
|
|
|
|
|
assert!(current.join("11_story_foo.md").exists());
|
2026-02-20 15:31:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_current_noop_when_not_in_upcoming() {
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
assert!(move_story_to_current(tmp.path(), "99_missing").is_ok());
|
2026-02-20 15:31:13 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 16:21:30 +00:00
|
|
|
#[test]
|
2026-02-20 19:39:19 +00:00
|
|
|
fn move_bug_to_current_moves_from_upcoming() {
|
2026-02-20 16:21:30 +00:00
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let upcoming = root.join(".story_kit/work/1_upcoming");
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(&upcoming).unwrap();
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(upcoming.join("1_bug_test.md"), "# Bug 1\n").unwrap();
|
2026-02-20 16:21:30 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
move_story_to_current(root, "1_bug_test").unwrap();
|
2026-02-20 16:21:30 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!upcoming.join("1_bug_test.md").exists());
|
|
|
|
|
assert!(current.join("1_bug_test.md").exists());
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn close_bug_moves_from_current_to_archive() {
|
|
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("2_bug_test.md"), "# Bug 2\n").unwrap();
|
2026-02-20 16:21:30 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
close_bug_to_archive(root, "2_bug_test").unwrap();
|
2026-02-20 16:21:30 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!current.join("2_bug_test.md").exists());
|
|
|
|
|
assert!(root.join(".story_kit/work/5_archived/2_bug_test.md").exists());
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-02-20 19:39:19 +00:00
|
|
|
fn close_bug_moves_from_upcoming_when_not_started() {
|
2026-02-20 16:21:30 +00:00
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let upcoming = root.join(".story_kit/work/1_upcoming");
|
|
|
|
|
fs::create_dir_all(&upcoming).unwrap();
|
|
|
|
|
fs::write(upcoming.join("3_bug_test.md"), "# Bug 3\n").unwrap();
|
2026-02-20 16:21:30 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
close_bug_to_archive(root, "3_bug_test").unwrap();
|
2026-02-20 16:21:30 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!upcoming.join("3_bug_test.md").exists());
|
|
|
|
|
assert!(root.join(".story_kit/work/5_archived/3_bug_test.md").exists());
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn item_type_from_id_detects_types() {
|
2026-02-20 17:24:26 +00:00
|
|
|
assert_eq!(item_type_from_id("1_bug_test"), "bug");
|
|
|
|
|
assert_eq!(item_type_from_id("1_spike_research"), "spike");
|
|
|
|
|
assert_eq!(item_type_from_id("50_story_my_story"), "story");
|
|
|
|
|
assert_eq!(item_type_from_id("1_story_simple"), "story");
|
2026-02-20 16:21:30 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
// ── pipeline_stage tests ──────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_stage_detects_coders() {
|
|
|
|
|
assert_eq!(pipeline_stage("coder-1"), PipelineStage::Coder);
|
|
|
|
|
assert_eq!(pipeline_stage("coder-2"), PipelineStage::Coder);
|
|
|
|
|
assert_eq!(pipeline_stage("coder-3"), PipelineStage::Coder);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_stage_detects_qa() {
|
|
|
|
|
assert_eq!(pipeline_stage("qa"), PipelineStage::Qa);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_stage_detects_mergemaster() {
|
|
|
|
|
assert_eq!(pipeline_stage("mergemaster"), PipelineStage::Mergemaster);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn pipeline_stage_supervisor_is_other() {
|
|
|
|
|
assert_eq!(pipeline_stage("supervisor"), PipelineStage::Other);
|
|
|
|
|
assert_eq!(pipeline_stage("default"), PipelineStage::Other);
|
|
|
|
|
assert_eq!(pipeline_stage("unknown"), PipelineStage::Other);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── pipeline advance tests ────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn pipeline_advance_coder_gates_pass_moves_story_to_qa() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
|
|
|
|
|
// Set up story in 2_current/
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("50_story_test.md"), "test").unwrap();
|
|
|
|
|
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
pool.inject_test_agent_with_completion(
|
|
|
|
|
"50_story_test",
|
|
|
|
|
"coder-1",
|
|
|
|
|
AgentStatus::Completed,
|
|
|
|
|
root.to_path_buf(),
|
|
|
|
|
CompletionReport {
|
|
|
|
|
summary: "done".to_string(),
|
|
|
|
|
gates_passed: true,
|
|
|
|
|
gate_output: String::new(),
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Call pipeline advance directly (bypasses background spawn for testing).
|
|
|
|
|
pool.run_pipeline_advance_for_completed_agent("50_story_test", "coder-1")
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
// Story should have moved to 3_qa/ (start_agent for qa will fail in tests but
|
|
|
|
|
// the file move happens before that).
|
|
|
|
|
assert!(
|
|
|
|
|
root.join(".story_kit/work/3_qa/50_story_test.md").exists(),
|
|
|
|
|
"story should be in 3_qa/"
|
|
|
|
|
);
|
|
|
|
|
assert!(
|
|
|
|
|
!current.join("50_story_test.md").exists(),
|
|
|
|
|
"story should not still be in 2_current/"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn pipeline_advance_qa_gates_pass_moves_story_to_merge() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
|
|
|
|
|
// Set up story in 3_qa/
|
|
|
|
|
let qa_dir = root.join(".story_kit/work/3_qa");
|
|
|
|
|
fs::create_dir_all(&qa_dir).unwrap();
|
|
|
|
|
fs::write(qa_dir.join("51_story_test.md"), "test").unwrap();
|
|
|
|
|
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
pool.inject_test_agent_with_completion(
|
|
|
|
|
"51_story_test",
|
|
|
|
|
"qa",
|
|
|
|
|
AgentStatus::Completed,
|
|
|
|
|
root.to_path_buf(),
|
|
|
|
|
CompletionReport {
|
|
|
|
|
summary: "QA done".to_string(),
|
|
|
|
|
gates_passed: true,
|
|
|
|
|
gate_output: String::new(),
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
pool.run_pipeline_advance_for_completed_agent("51_story_test", "qa")
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
// Story should have moved to 4_merge/
|
|
|
|
|
assert!(
|
|
|
|
|
root.join(".story_kit/work/4_merge/51_story_test.md").exists(),
|
|
|
|
|
"story should be in 4_merge/"
|
|
|
|
|
);
|
|
|
|
|
assert!(
|
|
|
|
|
!qa_dir.join("51_story_test.md").exists(),
|
|
|
|
|
"story should not still be in 3_qa/"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn pipeline_advance_supervisor_does_not_advance() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("52_story_test.md"), "test").unwrap();
|
|
|
|
|
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
pool.inject_test_agent_with_completion(
|
|
|
|
|
"52_story_test",
|
|
|
|
|
"supervisor",
|
|
|
|
|
AgentStatus::Completed,
|
|
|
|
|
root.to_path_buf(),
|
|
|
|
|
CompletionReport {
|
|
|
|
|
summary: "supervised".to_string(),
|
|
|
|
|
gates_passed: true,
|
|
|
|
|
gate_output: String::new(),
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
pool.run_pipeline_advance_for_completed_agent("52_story_test", "supervisor")
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
// Story should NOT have moved (supervisors don't advance pipeline)
|
|
|
|
|
assert!(
|
|
|
|
|
current.join("52_story_test.md").exists(),
|
|
|
|
|
"story should still be in 2_current/ for supervisor"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:36:35 +00:00
|
|
|
// ── move_story_to_merge tests ──────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-02-20 19:39:19 +00:00
|
|
|
fn move_story_to_merge_moves_file() {
|
2026-02-20 17:36:35 +00:00
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("20_story_foo.md"), "test").unwrap();
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
move_story_to_merge(root, "20_story_foo").unwrap();
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!current.join("20_story_foo.md").exists());
|
|
|
|
|
assert!(root.join(".story_kit/work/4_merge/20_story_foo.md").exists());
|
2026-02-20 17:36:35 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-23 13:13:41 +00:00
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_merge_from_qa_dir() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let qa_dir = root.join(".story_kit/work/3_qa");
|
|
|
|
|
fs::create_dir_all(&qa_dir).unwrap();
|
|
|
|
|
fs::write(qa_dir.join("40_story_test.md"), "test").unwrap();
|
|
|
|
|
|
|
|
|
|
move_story_to_merge(root, "40_story_test").unwrap();
|
|
|
|
|
|
|
|
|
|
assert!(!qa_dir.join("40_story_test.md").exists());
|
|
|
|
|
assert!(root.join(".story_kit/work/4_merge/40_story_test.md").exists());
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:36:35 +00:00
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_merge_idempotent_when_already_in_merge() {
|
|
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let merge_dir = root.join(".story_kit/work/4_merge");
|
2026-02-20 17:36:35 +00:00
|
|
|
fs::create_dir_all(&merge_dir).unwrap();
|
2026-02-20 19:39:19 +00:00
|
|
|
fs::write(merge_dir.join("21_story_test.md"), "test").unwrap();
|
|
|
|
|
|
|
|
|
|
move_story_to_merge(root, "21_story_test").unwrap();
|
2026-02-20 17:36:35 +00:00
|
|
|
assert!(merge_dir.join("21_story_test.md").exists());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-02-23 13:13:41 +00:00
|
|
|
fn move_story_to_merge_errors_when_not_in_current_or_qa() {
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let result = move_story_to_merge(tmp.path(), "99_nonexistent");
|
2026-02-23 13:13:41 +00:00
|
|
|
assert!(result.unwrap_err().contains("not found in work/2_current/ or work/3_qa/"));
|
2026-02-20 17:36:35 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 17:44:06 +00:00
|
|
|
// ── move_story_to_qa tests ────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-02-20 19:39:19 +00:00
|
|
|
fn move_story_to_qa_moves_file() {
|
2026-02-20 17:44:06 +00:00
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let current = root.join(".story_kit/work/2_current");
|
|
|
|
|
fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
fs::write(current.join("30_story_qa.md"), "test").unwrap();
|
2026-02-20 17:44:06 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
move_story_to_qa(root, "30_story_qa").unwrap();
|
2026-02-20 17:44:06 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!current.join("30_story_qa.md").exists());
|
|
|
|
|
assert!(root.join(".story_kit/work/3_qa/30_story_qa.md").exists());
|
2026-02-20 17:44:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_qa_idempotent_when_already_in_qa() {
|
|
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let qa_dir = root.join(".story_kit/work/3_qa");
|
2026-02-20 17:44:06 +00:00
|
|
|
fs::create_dir_all(&qa_dir).unwrap();
|
2026-02-20 19:39:19 +00:00
|
|
|
fs::write(qa_dir.join("31_story_test.md"), "test").unwrap();
|
|
|
|
|
|
|
|
|
|
move_story_to_qa(root, "31_story_test").unwrap();
|
2026-02-20 17:44:06 +00:00
|
|
|
assert!(qa_dir.join("31_story_test.md").exists());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_qa_errors_when_not_in_current() {
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let result = move_story_to_qa(tmp.path(), "99_nonexistent");
|
2026-02-20 17:44:06 +00:00
|
|
|
assert!(result.unwrap_err().contains("not found in work/2_current/"));
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
// ── move_story_to_archived tests ──────────────────────────────────────────
|
2026-02-20 17:36:35 +00:00
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_archived_finds_in_merge_dir() {
|
|
|
|
|
use std::fs;
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let root = tmp.path();
|
|
|
|
|
let merge_dir = root.join(".story_kit/work/4_merge");
|
2026-02-20 17:36:35 +00:00
|
|
|
fs::create_dir_all(&merge_dir).unwrap();
|
2026-02-20 19:39:19 +00:00
|
|
|
fs::write(merge_dir.join("22_story_test.md"), "test").unwrap();
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
move_story_to_archived(root, "22_story_test").unwrap();
|
2026-02-20 17:36:35 +00:00
|
|
|
|
2026-02-20 19:39:19 +00:00
|
|
|
assert!(!merge_dir.join("22_story_test.md").exists());
|
|
|
|
|
assert!(root.join(".story_kit/work/5_archived/22_story_test.md").exists());
|
2026-02-20 17:36:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn move_story_to_archived_error_when_not_in_current_or_merge() {
|
2026-02-20 19:39:19 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
let result = move_story_to_archived(tmp.path(), "99_nonexistent");
|
|
|
|
|
assert!(result.unwrap_err().contains("4_merge"));
|
2026-02-20 17:36:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── merge_agent_work tests ────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn merge_agent_work_returns_error_when_branch_not_found() {
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let repo = tmp.path();
|
|
|
|
|
init_git_repo(repo);
|
|
|
|
|
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
// branch feature/story-99_nonexistent does not exist
|
|
|
|
|
let result = pool
|
|
|
|
|
.merge_agent_work(repo, "99_nonexistent")
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
// Should fail (no branch) — not panic
|
|
|
|
|
assert!(!result.success, "should fail when branch missing");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn merge_agent_work_succeeds_on_clean_branch() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let repo = tmp.path();
|
|
|
|
|
init_git_repo(repo);
|
|
|
|
|
|
|
|
|
|
// Create a feature branch with a commit
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["checkout", "-b", "feature/story-23_test"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
fs::write(repo.join("feature.txt"), "feature content").unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["add", "."])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "-m", "add feature"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
// Switch back to master (initial branch)
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["checkout", "master"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
// Create the story file in 4_merge/ so we can test archival
|
|
|
|
|
let merge_dir = repo.join(".story_kit/work/4_merge");
|
|
|
|
|
fs::create_dir_all(&merge_dir).unwrap();
|
|
|
|
|
let story_file = merge_dir.join("23_test.md");
|
|
|
|
|
fs::write(&story_file, "---\nname: Test\ntest_plan: approved\n---\n").unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["add", "."])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "-m", "add story in merge"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let pool = AgentPool::new(3001);
|
|
|
|
|
let report = pool.merge_agent_work(repo, "23_test").await.unwrap();
|
|
|
|
|
|
|
|
|
|
// Merge should succeed (gates will run but cargo/pnpm results will depend on env)
|
|
|
|
|
// At minimum the merge itself should succeed
|
|
|
|
|
assert!(!report.had_conflicts, "should have no conflicts");
|
|
|
|
|
// Note: gates_passed may be false in test env without Rust project, that's OK
|
|
|
|
|
// The important thing is the merge itself ran
|
|
|
|
|
assert!(
|
|
|
|
|
report.success || report.gate_output.contains("Failed to run") || !report.gates_passed,
|
|
|
|
|
"report should be coherent: {report:?}"
|
|
|
|
|
);
|
|
|
|
|
// Story should be archived if gates passed
|
|
|
|
|
if report.story_archived {
|
|
|
|
|
let archived = repo.join(".story_kit/work/5_archived/23_test.md");
|
|
|
|
|
assert!(archived.exists(), "archived file should exist");
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-23 12:59:55 +00:00
|
|
|
|
|
|
|
|
// ── run_project_tests tests ───────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[cfg(unix)]
|
|
|
|
|
#[test]
|
|
|
|
|
fn run_project_tests_uses_script_test_when_present_and_passes() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let path = tmp.path();
|
|
|
|
|
let script_dir = path.join("script");
|
|
|
|
|
fs::create_dir_all(&script_dir).unwrap();
|
|
|
|
|
let script_test = script_dir.join("test");
|
|
|
|
|
fs::write(&script_test, "#!/usr/bin/env bash\necho 'all tests passed'\nexit 0\n").unwrap();
|
|
|
|
|
let mut perms = fs::metadata(&script_test).unwrap().permissions();
|
|
|
|
|
perms.set_mode(0o755);
|
|
|
|
|
fs::set_permissions(&script_test, perms).unwrap();
|
|
|
|
|
|
|
|
|
|
let (passed, output) = run_project_tests(path).unwrap();
|
|
|
|
|
assert!(passed, "script/test exiting 0 should pass");
|
|
|
|
|
assert!(output.contains("script/test"), "output should mention script/test");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(unix)]
|
|
|
|
|
#[test]
|
|
|
|
|
fn run_project_tests_reports_failure_when_script_test_exits_nonzero() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let path = tmp.path();
|
|
|
|
|
let script_dir = path.join("script");
|
|
|
|
|
fs::create_dir_all(&script_dir).unwrap();
|
|
|
|
|
let script_test = script_dir.join("test");
|
|
|
|
|
fs::write(&script_test, "#!/usr/bin/env bash\nexit 1\n").unwrap();
|
|
|
|
|
let mut perms = fs::metadata(&script_test).unwrap().permissions();
|
|
|
|
|
perms.set_mode(0o755);
|
|
|
|
|
fs::set_permissions(&script_test, perms).unwrap();
|
|
|
|
|
|
|
|
|
|
let (passed, output) = run_project_tests(path).unwrap();
|
|
|
|
|
assert!(!passed, "script/test exiting 1 should fail");
|
|
|
|
|
assert!(output.contains("script/test"), "output should mention script/test");
|
|
|
|
|
}
|
2026-02-23 13:40:12 +00:00
|
|
|
|
|
|
|
|
// ── run_coverage_gate tests ───────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[cfg(unix)]
|
|
|
|
|
#[test]
|
|
|
|
|
fn coverage_gate_passes_when_script_absent() {
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let (passed, output) = run_coverage_gate(tmp.path()).unwrap();
|
|
|
|
|
assert!(passed, "coverage gate should pass when script is absent");
|
|
|
|
|
assert!(
|
|
|
|
|
output.contains("not found"),
|
|
|
|
|
"output should mention script not found"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(unix)]
|
|
|
|
|
#[test]
|
|
|
|
|
fn coverage_gate_passes_when_script_exits_zero() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let path = tmp.path();
|
|
|
|
|
let script_dir = path.join("script");
|
|
|
|
|
fs::create_dir_all(&script_dir).unwrap();
|
|
|
|
|
let script = script_dir.join("test_coverage");
|
|
|
|
|
fs::write(
|
|
|
|
|
&script,
|
|
|
|
|
"#!/usr/bin/env bash\necho 'Rust line coverage: 85%'\necho 'PASS: Coverage 85% meets threshold 0%'\nexit 0\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
let mut perms = fs::metadata(&script).unwrap().permissions();
|
|
|
|
|
perms.set_mode(0o755);
|
|
|
|
|
fs::set_permissions(&script, perms).unwrap();
|
|
|
|
|
|
|
|
|
|
let (passed, output) = run_coverage_gate(path).unwrap();
|
|
|
|
|
assert!(passed, "coverage gate should pass when script exits 0");
|
|
|
|
|
assert!(
|
|
|
|
|
output.contains("script/test_coverage"),
|
|
|
|
|
"output should mention script/test_coverage"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(unix)]
|
|
|
|
|
#[test]
|
|
|
|
|
fn coverage_gate_fails_when_script_exits_nonzero() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let path = tmp.path();
|
|
|
|
|
let script_dir = path.join("script");
|
|
|
|
|
fs::create_dir_all(&script_dir).unwrap();
|
|
|
|
|
let script = script_dir.join("test_coverage");
|
|
|
|
|
fs::write(
|
|
|
|
|
&script,
|
|
|
|
|
"#!/usr/bin/env bash\necho 'FAIL: Coverage 40% is below threshold 80%'\nexit 1\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
|
|
|
|
let mut perms = fs::metadata(&script).unwrap().permissions();
|
|
|
|
|
perms.set_mode(0o755);
|
|
|
|
|
fs::set_permissions(&script, perms).unwrap();
|
|
|
|
|
|
|
|
|
|
let (passed, output) = run_coverage_gate(path).unwrap();
|
|
|
|
|
assert!(!passed, "coverage gate should fail when script exits 1");
|
|
|
|
|
assert!(
|
|
|
|
|
output.contains("script/test_coverage"),
|
|
|
|
|
"output should mention script/test_coverage"
|
|
|
|
|
);
|
|
|
|
|
}
|
2026-02-20 13:16:04 +00:00
|
|
|
}
|