2026-02-19 17:58:53 +00:00
|
|
|
use crate::config::ProjectConfig;
|
2026-03-20 12:26:02 +00:00
|
|
|
use crate::slog;
|
2026-02-19 17:58:53 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
|
|
|
|
use std::process::Command;
|
|
|
|
|
|
2026-02-20 13:24:35 +00:00
|
|
|
/// Write a `.mcp.json` file in the given directory pointing to the MCP server
|
|
|
|
|
/// at the given port.
|
|
|
|
|
pub fn write_mcp_json(dir: &Path, port: u16) -> Result<(), String> {
|
|
|
|
|
let content = format!(
|
2026-03-20 12:26:02 +00:00
|
|
|
"{{\n \"mcpServers\": {{\n \"storkit\": {{\n \"type\": \"http\",\n \"url\": \"http://localhost:{port}/mcp\"\n }}\n }}\n}}\n"
|
2026-02-20 13:24:35 +00:00
|
|
|
);
|
|
|
|
|
std::fs::write(dir.join(".mcp.json"), content).map_err(|e| format!("Write .mcp.json: {e}"))
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
|
pub struct WorktreeInfo {
|
|
|
|
|
pub path: PathBuf,
|
|
|
|
|
pub branch: String,
|
2026-02-20 12:48:50 +00:00
|
|
|
pub base_branch: String,
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 14:09:52 +00:00
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
|
pub struct WorktreeListEntry {
|
|
|
|
|
pub story_id: String,
|
|
|
|
|
pub path: PathBuf,
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-20 11:34:53 +00:00
|
|
|
/// Worktree path inside the project: `{project_root}/.storkit/worktrees/{story_id}`.
|
2026-02-20 14:09:52 +00:00
|
|
|
pub fn worktree_path(project_root: &Path, story_id: &str) -> PathBuf {
|
|
|
|
|
project_root
|
2026-03-20 11:34:53 +00:00
|
|
|
.join(".storkit")
|
2026-02-20 14:09:52 +00:00
|
|
|
.join("worktrees")
|
|
|
|
|
.join(story_id)
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn branch_name(story_id: &str) -> String {
|
|
|
|
|
format!("feature/story-{story_id}")
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 12:48:50 +00:00
|
|
|
/// Detect the current branch of the project root (the base branch worktrees fork from).
|
|
|
|
|
fn detect_base_branch(project_root: &Path) -> String {
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["rev-parse", "--abbrev-ref", "HEAD"])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.ok()
|
|
|
|
|
.and_then(|o| {
|
|
|
|
|
if o.status.success() {
|
|
|
|
|
Some(String::from_utf8_lossy(&o.stdout).trim().to_string())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.unwrap_or_else(|| "master".to_string())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Create a git worktree for the given story.
|
|
|
|
|
///
|
2026-03-20 11:34:53 +00:00
|
|
|
/// - Creates the worktree at `{project_root}/.storkit/worktrees/{story_id}`
|
2026-02-19 17:58:53 +00:00
|
|
|
/// on branch `feature/story-{story_id}`.
|
2026-02-20 13:24:35 +00:00
|
|
|
/// - Writes `.mcp.json` in the worktree pointing to the MCP server at `port`.
|
2026-02-19 17:58:53 +00:00
|
|
|
/// - Runs setup commands from the config for each component.
|
|
|
|
|
/// - If the worktree/branch already exists, reuses rather than errors.
|
|
|
|
|
pub async fn create_worktree(
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
config: &ProjectConfig,
|
2026-02-20 13:24:35 +00:00
|
|
|
port: u16,
|
2026-02-19 17:58:53 +00:00
|
|
|
) -> Result<WorktreeInfo, String> {
|
|
|
|
|
let wt_path = worktree_path(project_root, story_id);
|
|
|
|
|
let branch = branch_name(story_id);
|
2026-02-20 12:48:50 +00:00
|
|
|
let base_branch = detect_base_branch(project_root);
|
2026-02-19 17:58:53 +00:00
|
|
|
let root = project_root.to_path_buf();
|
|
|
|
|
|
2026-02-23 16:36:15 +00:00
|
|
|
// Already exists — reuse (ensure sparse checkout is configured)
|
2026-02-19 17:58:53 +00:00
|
|
|
if wt_path.exists() {
|
2026-02-23 16:36:15 +00:00
|
|
|
let wt_clone = wt_path.clone();
|
|
|
|
|
tokio::task::spawn_blocking(move || configure_sparse_checkout(&wt_clone))
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("spawn_blocking: {e}"))??;
|
2026-02-20 13:24:35 +00:00
|
|
|
write_mcp_json(&wt_path, port)?;
|
2026-02-24 23:29:56 +00:00
|
|
|
run_setup_commands(&wt_path, config).await;
|
2026-02-19 17:58:53 +00:00
|
|
|
return Ok(WorktreeInfo {
|
|
|
|
|
path: wt_path,
|
|
|
|
|
branch,
|
2026-02-20 12:48:50 +00:00
|
|
|
base_branch,
|
2026-02-19 17:58:53 +00:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let wt = wt_path.clone();
|
|
|
|
|
let br = branch.clone();
|
|
|
|
|
|
|
|
|
|
tokio::task::spawn_blocking(move || create_worktree_sync(&root, &wt, &br))
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("spawn_blocking: {e}"))??;
|
|
|
|
|
|
2026-02-20 13:24:35 +00:00
|
|
|
write_mcp_json(&wt_path, port)?;
|
2026-02-24 23:29:56 +00:00
|
|
|
run_setup_commands(&wt_path, config).await;
|
2026-02-19 17:58:53 +00:00
|
|
|
|
|
|
|
|
Ok(WorktreeInfo {
|
|
|
|
|
path: wt_path,
|
|
|
|
|
branch,
|
2026-02-20 12:48:50 +00:00
|
|
|
base_branch,
|
2026-02-19 17:58:53 +00:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-20 12:26:02 +00:00
|
|
|
fn create_worktree_sync(project_root: &Path, wt_path: &Path, branch: &str) -> Result<(), String> {
|
2026-02-19 17:58:53 +00:00
|
|
|
// Ensure the parent directory exists
|
|
|
|
|
if let Some(parent) = wt_path.parent() {
|
2026-03-20 12:26:02 +00:00
|
|
|
std::fs::create_dir_all(parent).map_err(|e| format!("Create worktree dir: {e}"))?;
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:17:20 +00:00
|
|
|
// Prune stale worktree references (e.g. directories deleted externally)
|
|
|
|
|
let _ = Command::new("git")
|
|
|
|
|
.args(["worktree", "prune"])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output();
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
// Try to create branch. If it already exists that's fine.
|
|
|
|
|
let _ = Command::new("git")
|
|
|
|
|
.args(["branch", branch])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output();
|
|
|
|
|
|
|
|
|
|
// Create worktree
|
|
|
|
|
let output = Command::new("git")
|
2026-03-20 12:26:02 +00:00
|
|
|
.args(["worktree", "add", &wt_path.to_string_lossy(), branch])
|
2026-02-19 17:58:53 +00:00
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("git worktree add: {e}"))?;
|
|
|
|
|
|
|
|
|
|
if !output.status.success() {
|
|
|
|
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
|
|
|
|
// If it says already checked out, that's fine
|
|
|
|
|
if stderr.contains("already checked out") || stderr.contains("already exists") {
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
return Err(format!("git worktree add failed: {stderr}"));
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-23 16:36:15 +00:00
|
|
|
// Enable sparse checkout to exclude pipeline files from the worktree.
|
2026-03-20 11:34:53 +00:00
|
|
|
// This prevents .storkit/work/ changes from ending up in feature branches,
|
2026-02-23 16:36:15 +00:00
|
|
|
// which cause rename/delete merge conflicts when merging back to master.
|
|
|
|
|
configure_sparse_checkout(wt_path)?;
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-20 11:34:53 +00:00
|
|
|
/// Placeholder for worktree isolation of `.storkit/work/`.
|
2026-02-23 16:36:15 +00:00
|
|
|
///
|
2026-02-23 17:47:34 +00:00
|
|
|
/// Previous approaches (sparse checkout, skip-worktree) all leaked state
|
|
|
|
|
/// from worktrees back to the main checkout's config/index. For now this
|
|
|
|
|
/// is a no-op — merge conflicts from pipeline file moves are handled at
|
|
|
|
|
/// merge time by the mergemaster (squash merge ignores work/ diffs).
|
|
|
|
|
fn configure_sparse_checkout(_wt_path: &Path) -> Result<(), String> {
|
2026-02-19 17:58:53 +00:00
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-26 14:58:52 +00:00
|
|
|
/// Remove the git worktree for a story if it exists, deriving the path and
|
|
|
|
|
/// branch name deterministically from `project_root` and `story_id`.
|
|
|
|
|
///
|
|
|
|
|
/// Returns `Ok(())` if the worktree was removed or did not exist.
|
|
|
|
|
/// Removal is best-effort: `remove_worktree_sync` logs failures internally
|
|
|
|
|
/// but always returns `Ok`.
|
|
|
|
|
pub fn prune_worktree_sync(project_root: &Path, story_id: &str) -> Result<(), String> {
|
|
|
|
|
let wt_path = worktree_path(project_root, story_id);
|
|
|
|
|
if !wt_path.exists() {
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
let branch = branch_name(story_id);
|
|
|
|
|
remove_worktree_sync(project_root, &wt_path, &branch)
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-19 17:58:53 +00:00
|
|
|
/// Remove a git worktree and its branch.
|
|
|
|
|
pub async fn remove_worktree(
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
info: &WorktreeInfo,
|
|
|
|
|
config: &ProjectConfig,
|
|
|
|
|
) -> Result<(), String> {
|
|
|
|
|
run_teardown_commands(&info.path, config).await?;
|
|
|
|
|
|
|
|
|
|
let root = project_root.to_path_buf();
|
|
|
|
|
let wt_path = info.path.clone();
|
|
|
|
|
let branch = info.branch.clone();
|
|
|
|
|
|
|
|
|
|
tokio::task::spawn_blocking(move || remove_worktree_sync(&root, &wt_path, &branch))
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("spawn_blocking: {e}"))?
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 14:09:52 +00:00
|
|
|
/// Remove a git worktree by story ID, deriving the path and branch deterministically.
|
|
|
|
|
pub async fn remove_worktree_by_story_id(
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
config: &ProjectConfig,
|
|
|
|
|
) -> Result<(), String> {
|
|
|
|
|
let path = worktree_path(project_root, story_id);
|
|
|
|
|
if !path.exists() {
|
|
|
|
|
return Err(format!("Worktree not found for story: {story_id}"));
|
|
|
|
|
}
|
|
|
|
|
let branch = branch_name(story_id);
|
|
|
|
|
let base_branch = detect_base_branch(project_root);
|
|
|
|
|
let info = WorktreeInfo {
|
|
|
|
|
path,
|
|
|
|
|
branch,
|
|
|
|
|
base_branch,
|
|
|
|
|
};
|
|
|
|
|
remove_worktree(project_root, &info, config).await
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-20 11:34:53 +00:00
|
|
|
/// List all worktrees under `{project_root}/.storkit/worktrees/`.
|
2026-03-19 16:34:11 +00:00
|
|
|
/// Find the worktree path for a given story ID, if it exists.
|
|
|
|
|
pub fn find_worktree_path(project_root: &Path, story_id: &str) -> Option<PathBuf> {
|
|
|
|
|
let wt_path = project_root
|
2026-03-20 11:34:53 +00:00
|
|
|
.join(".storkit")
|
2026-03-19 16:34:11 +00:00
|
|
|
.join("worktrees")
|
|
|
|
|
.join(story_id);
|
|
|
|
|
if wt_path.is_dir() {
|
|
|
|
|
Some(wt_path)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 14:09:52 +00:00
|
|
|
pub fn list_worktrees(project_root: &Path) -> Result<Vec<WorktreeListEntry>, String> {
|
2026-03-20 11:34:53 +00:00
|
|
|
let worktrees_dir = project_root.join(".storkit").join("worktrees");
|
2026-02-20 14:09:52 +00:00
|
|
|
if !worktrees_dir.exists() {
|
|
|
|
|
return Ok(Vec::new());
|
|
|
|
|
}
|
|
|
|
|
let mut entries = Vec::new();
|
2026-03-20 12:26:02 +00:00
|
|
|
for entry in std::fs::read_dir(&worktrees_dir).map_err(|e| format!("list worktrees: {e}"))? {
|
2026-02-20 14:09:52 +00:00
|
|
|
let entry = entry.map_err(|e| format!("list worktrees entry: {e}"))?;
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
if path.is_dir() {
|
|
|
|
|
let story_id = path
|
|
|
|
|
.file_name()
|
|
|
|
|
.map(|n| n.to_string_lossy().to_string())
|
|
|
|
|
.unwrap_or_default();
|
|
|
|
|
entries.push(WorktreeListEntry { story_id, path });
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
entries.sort_by(|a, b| a.story_id.cmp(&b.story_id));
|
|
|
|
|
Ok(entries)
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-20 12:26:02 +00:00
|
|
|
fn remove_worktree_sync(project_root: &Path, wt_path: &Path, branch: &str) -> Result<(), String> {
|
2026-02-19 17:58:53 +00:00
|
|
|
// Remove worktree
|
|
|
|
|
let output = Command::new("git")
|
2026-03-20 12:26:02 +00:00
|
|
|
.args(["worktree", "remove", "--force", &wt_path.to_string_lossy()])
|
2026-02-19 17:58:53 +00:00
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("git worktree remove: {e}"))?;
|
|
|
|
|
|
|
|
|
|
if !output.status.success() {
|
|
|
|
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
feat(story-93): expose server logs to agents via get_server_logs MCP tool
- Add log_buffer module: bounded 1000-line ring buffer with push/get_recent API
- Add slog! macro: drop-in for eprintln! that also captures to ring buffer
- Replace all eprintln! calls across agents, watcher, search, chat, worktree, claude_code with slog!
- Add get_server_logs MCP tool: accepts count (1-500) and optional filter params
- 5 unit tests for log_buffer covering push/retrieve, eviction, filtering, count limits, empty buffer
- 262 tests passing, clippy clean
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-02-23 20:38:19 +00:00
|
|
|
slog!("[worktree] remove warning: {stderr}");
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Delete branch (best effort)
|
|
|
|
|
let _ = Command::new("git")
|
|
|
|
|
.args(["branch", "-d", branch])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output();
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-24 23:29:56 +00:00
|
|
|
async fn run_setup_commands(wt_path: &Path, config: &ProjectConfig) {
|
2026-02-19 17:58:53 +00:00
|
|
|
for component in &config.component {
|
|
|
|
|
let cmd_dir = wt_path.join(&component.path);
|
|
|
|
|
for cmd in &component.setup {
|
2026-02-24 23:29:56 +00:00
|
|
|
if let Err(e) = run_shell_command(cmd, &cmd_dir).await {
|
|
|
|
|
slog!("[worktree] setup warning for {}: {e}", component.name);
|
|
|
|
|
}
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async fn run_teardown_commands(wt_path: &Path, config: &ProjectConfig) -> Result<(), String> {
|
|
|
|
|
for component in &config.component {
|
|
|
|
|
let cmd_dir = wt_path.join(&component.path);
|
|
|
|
|
for cmd in &component.teardown {
|
|
|
|
|
// Best effort — don't fail teardown
|
|
|
|
|
if let Err(e) = run_shell_command(cmd, &cmd_dir).await {
|
feat(story-93): expose server logs to agents via get_server_logs MCP tool
- Add log_buffer module: bounded 1000-line ring buffer with push/get_recent API
- Add slog! macro: drop-in for eprintln! that also captures to ring buffer
- Replace all eprintln! calls across agents, watcher, search, chat, worktree, claude_code with slog!
- Add get_server_logs MCP tool: accepts count (1-500) and optional filter params
- 5 unit tests for log_buffer covering push/retrieve, eviction, filtering, count limits, empty buffer
- 262 tests passing, clippy clean
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-02-23 20:38:19 +00:00
|
|
|
slog!("[worktree] teardown warning for {}: {e}", component.name);
|
2026-02-19 17:58:53 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:24:35 +00:00
|
|
|
async fn run_shell_command(cmd: &str, cwd: &Path) -> Result<(), String> {
|
|
|
|
|
let cmd = cmd.to_string();
|
|
|
|
|
let cwd = cwd.to_path_buf();
|
|
|
|
|
|
|
|
|
|
tokio::task::spawn_blocking(move || {
|
feat(story-93): expose server logs to agents via get_server_logs MCP tool
- Add log_buffer module: bounded 1000-line ring buffer with push/get_recent API
- Add slog! macro: drop-in for eprintln! that also captures to ring buffer
- Replace all eprintln! calls across agents, watcher, search, chat, worktree, claude_code with slog!
- Add get_server_logs MCP tool: accepts count (1-500) and optional filter params
- 5 unit tests for log_buffer covering push/retrieve, eviction, filtering, count limits, empty buffer
- 262 tests passing, clippy clean
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-02-23 20:38:19 +00:00
|
|
|
slog!("[worktree] Running: {cmd} in {}", cwd.display());
|
2026-02-20 13:24:35 +00:00
|
|
|
let output = Command::new("sh")
|
|
|
|
|
.args(["-c", &cmd])
|
|
|
|
|
.current_dir(&cwd)
|
|
|
|
|
.output()
|
|
|
|
|
.map_err(|e| format!("Run '{cmd}': {e}"))?;
|
|
|
|
|
|
|
|
|
|
if !output.status.success() {
|
|
|
|
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
|
|
|
|
return Err(format!("Command '{cmd}' failed: {stderr}"));
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
})
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| format!("spawn_blocking: {e}"))?
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:17:20 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::*;
|
2026-02-26 12:16:07 +00:00
|
|
|
use crate::config::{ComponentConfig, WatcherConfig};
|
2026-02-20 13:17:20 +00:00
|
|
|
use std::fs;
|
|
|
|
|
use tempfile::TempDir;
|
|
|
|
|
|
|
|
|
|
/// Initialise a bare-minimum git repo so worktree operations work.
|
|
|
|
|
fn init_git_repo(dir: &Path) {
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["init"])
|
|
|
|
|
.current_dir(dir)
|
|
|
|
|
.output()
|
|
|
|
|
.expect("git init");
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
|
|
|
|
.current_dir(dir)
|
|
|
|
|
.output()
|
|
|
|
|
.expect("git commit");
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:24:35 +00:00
|
|
|
#[test]
|
|
|
|
|
fn write_mcp_json_uses_given_port() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
write_mcp_json(tmp.path(), 4242).unwrap();
|
|
|
|
|
let content = std::fs::read_to_string(tmp.path().join(".mcp.json")).unwrap();
|
|
|
|
|
assert!(content.contains("http://localhost:4242/mcp"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn write_mcp_json_default_port() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
write_mcp_json(tmp.path(), 3001).unwrap();
|
|
|
|
|
let content = std::fs::read_to_string(tmp.path().join(".mcp.json")).unwrap();
|
|
|
|
|
assert!(content.contains("http://localhost:3001/mcp"));
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 14:09:52 +00:00
|
|
|
#[test]
|
|
|
|
|
fn worktree_path_is_inside_project() {
|
|
|
|
|
let project_root = Path::new("/home/user/my-project");
|
|
|
|
|
let path = worktree_path(project_root, "42_my_story");
|
|
|
|
|
assert_eq!(
|
|
|
|
|
path,
|
2026-03-20 11:34:53 +00:00
|
|
|
Path::new("/home/user/my-project/.storkit/worktrees/42_my_story")
|
2026-02-20 14:09:52 +00:00
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn list_worktrees_empty_when_no_dir() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let entries = list_worktrees(tmp.path()).unwrap();
|
|
|
|
|
assert!(entries.is_empty());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn list_worktrees_returns_subdirs() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
2026-03-20 11:34:53 +00:00
|
|
|
let worktrees_dir = tmp.path().join(".storkit").join("worktrees");
|
2026-02-20 14:09:52 +00:00
|
|
|
fs::create_dir_all(worktrees_dir.join("42_story_a")).unwrap();
|
|
|
|
|
fs::create_dir_all(worktrees_dir.join("43_story_b")).unwrap();
|
|
|
|
|
// A file (not dir) — should be ignored
|
|
|
|
|
fs::write(worktrees_dir.join("readme.txt"), "").unwrap();
|
|
|
|
|
|
|
|
|
|
let entries = list_worktrees(tmp.path()).unwrap();
|
|
|
|
|
assert_eq!(entries.len(), 2);
|
|
|
|
|
assert_eq!(entries[0].story_id, "42_story_a");
|
|
|
|
|
assert_eq!(entries[1].story_id, "43_story_b");
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-20 13:17:20 +00:00
|
|
|
#[test]
|
|
|
|
|
fn create_worktree_after_stale_reference() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let wt_path = tmp.path().join("my-worktree");
|
|
|
|
|
let branch = "feature/test-stale";
|
|
|
|
|
|
|
|
|
|
// First creation should succeed
|
|
|
|
|
create_worktree_sync(&project_root, &wt_path, branch).unwrap();
|
|
|
|
|
assert!(wt_path.exists());
|
|
|
|
|
|
|
|
|
|
// Simulate external deletion (e.g., rm -rf by another agent)
|
|
|
|
|
fs::remove_dir_all(&wt_path).unwrap();
|
|
|
|
|
assert!(!wt_path.exists());
|
|
|
|
|
|
|
|
|
|
// Second creation should succeed despite stale git reference.
|
|
|
|
|
// Without `git worktree prune`, this fails with "already checked out"
|
|
|
|
|
// or "already exists".
|
|
|
|
|
let result = create_worktree_sync(&project_root, &wt_path, branch);
|
|
|
|
|
assert!(
|
|
|
|
|
result.is_ok(),
|
|
|
|
|
"Expected worktree creation to succeed after stale reference, got: {:?}",
|
|
|
|
|
result.err()
|
|
|
|
|
);
|
|
|
|
|
assert!(wt_path.exists());
|
|
|
|
|
}
|
2026-02-23 16:36:15 +00:00
|
|
|
|
|
|
|
|
#[test]
|
2026-02-23 17:47:34 +00:00
|
|
|
fn worktree_has_all_files_including_work() {
|
2026-02-23 16:36:15 +00:00
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
2026-03-20 11:34:53 +00:00
|
|
|
// Create a tracked file under .storkit/work/ on the initial branch
|
|
|
|
|
let work_dir = project_root.join(".storkit").join("work");
|
2026-02-23 16:36:15 +00:00
|
|
|
fs::create_dir_all(&work_dir).unwrap();
|
|
|
|
|
fs::write(work_dir.join("test_story.md"), "# Test").unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["add", "."])
|
|
|
|
|
.current_dir(&project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "-m", "add work file"])
|
|
|
|
|
.current_dir(&project_root)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let wt_path = tmp.path().join("my-worktree");
|
|
|
|
|
let branch = "feature/test-sparse";
|
|
|
|
|
create_worktree_sync(&project_root, &wt_path, branch).unwrap();
|
|
|
|
|
|
2026-03-20 11:34:53 +00:00
|
|
|
// Worktree should have all files including .storkit/work/
|
|
|
|
|
assert!(wt_path.join(".storkit").join("work").exists());
|
2026-02-23 17:41:07 +00:00
|
|
|
assert!(wt_path.join(".git").exists());
|
|
|
|
|
|
2026-02-23 17:47:34 +00:00
|
|
|
// Main checkout must NOT be affected by worktree creation.
|
2026-02-23 16:55:56 +00:00
|
|
|
assert!(
|
|
|
|
|
work_dir.exists(),
|
2026-03-20 11:34:53 +00:00
|
|
|
".storkit/work/ must still exist in the main checkout"
|
2026-02-23 16:55:56 +00:00
|
|
|
);
|
2026-02-23 16:36:15 +00:00
|
|
|
}
|
2026-02-24 00:26:49 +00:00
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn branch_name_format() {
|
|
|
|
|
assert_eq!(branch_name("42_my_story"), "feature/story-42_my_story");
|
|
|
|
|
assert_eq!(branch_name("1_test"), "feature/story-1_test");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn detect_base_branch_returns_branch_in_git_repo() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let branch = detect_base_branch(&project_root);
|
|
|
|
|
assert!(!branch.is_empty());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn detect_base_branch_falls_back_to_master_for_non_git_dir() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let branch = detect_base_branch(tmp.path());
|
|
|
|
|
assert_eq!(branch, "master");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn configure_sparse_checkout_is_noop() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
assert!(configure_sparse_checkout(tmp.path()).is_ok());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn run_shell_command_succeeds_for_echo() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let result = run_shell_command("echo hello", tmp.path()).await;
|
|
|
|
|
assert!(result.is_ok(), "Expected success: {:?}", result.err());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn run_shell_command_fails_for_nonzero_exit() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let result = run_shell_command("exit 1", tmp.path()).await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
assert!(result.unwrap_err().contains("failed"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn run_setup_commands_no_components_succeeds() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
2026-02-24 23:29:56 +00:00
|
|
|
// Should complete without panic
|
|
|
|
|
run_setup_commands(tmp.path(), &config).await;
|
2026-02-24 00:26:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn run_setup_commands_runs_each_command_successfully() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![ComponentConfig {
|
|
|
|
|
name: "test".to_string(),
|
|
|
|
|
path: ".".to_string(),
|
|
|
|
|
setup: vec!["echo setup_ok".to_string()],
|
|
|
|
|
teardown: vec![],
|
|
|
|
|
}],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
2026-02-24 23:29:56 +00:00
|
|
|
// Should complete without panic
|
|
|
|
|
run_setup_commands(tmp.path(), &config).await;
|
2026-02-24 00:26:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
2026-02-24 23:29:56 +00:00
|
|
|
async fn run_setup_commands_ignores_failures() {
|
2026-02-24 00:26:49 +00:00
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![ComponentConfig {
|
|
|
|
|
name: "test".to_string(),
|
|
|
|
|
path: ".".to_string(),
|
|
|
|
|
setup: vec!["exit 1".to_string()],
|
|
|
|
|
teardown: vec![],
|
|
|
|
|
}],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
2026-02-24 23:29:56 +00:00
|
|
|
// Setup command failures are non-fatal — should not panic or propagate
|
|
|
|
|
run_setup_commands(tmp.path(), &config).await;
|
2026-02-24 00:26:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn run_teardown_commands_ignores_failures() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![ComponentConfig {
|
|
|
|
|
name: "test".to_string(),
|
|
|
|
|
path: ".".to_string(),
|
|
|
|
|
setup: vec![],
|
|
|
|
|
teardown: vec!["exit 1".to_string()],
|
|
|
|
|
}],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
|
|
|
|
// Teardown failures are best-effort — should not propagate
|
|
|
|
|
assert!(run_teardown_commands(tmp.path(), &config).await.is_ok());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn create_worktree_fresh_creates_dir_and_mcp_json() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
|
|
|
|
let info = create_worktree(&project_root, "42_fresh_test", &config, 3001)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
assert!(info.path.exists());
|
|
|
|
|
assert!(info.path.join(".mcp.json").exists());
|
|
|
|
|
let mcp = fs::read_to_string(info.path.join(".mcp.json")).unwrap();
|
|
|
|
|
assert!(mcp.contains("3001"));
|
|
|
|
|
assert_eq!(info.branch, "feature/story-42_fresh_test");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn create_worktree_reuses_existing_path_and_updates_port() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
|
|
|
|
// First creation
|
|
|
|
|
let _info1 = create_worktree(&project_root, "43_reuse_test", &config, 3001)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
// Second call — worktree already exists, reuse path, update port
|
|
|
|
|
let info2 = create_worktree(&project_root, "43_reuse_test", &config, 3002)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let mcp = fs::read_to_string(info2.path.join(".mcp.json")).unwrap();
|
2026-03-20 12:26:02 +00:00
|
|
|
assert!(
|
|
|
|
|
mcp.contains("3002"),
|
|
|
|
|
"MCP json should be updated to new port"
|
|
|
|
|
);
|
2026-02-24 00:26:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn remove_worktree_sync_cleans_up_directory() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let wt_path = project_root
|
2026-03-20 11:34:53 +00:00
|
|
|
.join(".storkit")
|
2026-02-24 00:26:49 +00:00
|
|
|
.join("worktrees")
|
|
|
|
|
.join("test_rm");
|
|
|
|
|
create_worktree_sync(&project_root, &wt_path, "feature/test-rm").unwrap();
|
|
|
|
|
assert!(wt_path.exists());
|
|
|
|
|
|
|
|
|
|
remove_worktree_sync(&project_root, &wt_path, "feature/test-rm").unwrap();
|
|
|
|
|
assert!(!wt_path.exists());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn remove_worktree_by_story_id_returns_err_when_not_found() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let result = remove_worktree_by_story_id(tmp.path(), "99_nonexistent", &config).await;
|
|
|
|
|
assert!(result.is_err());
|
|
|
|
|
assert!(
|
|
|
|
|
result
|
|
|
|
|
.unwrap_err()
|
|
|
|
|
.contains("Worktree not found for story: 99_nonexistent")
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn remove_worktree_by_story_id_removes_existing_worktree() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
|
|
|
|
create_worktree(&project_root, "88_remove_by_id", &config, 3001)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
2026-03-20 12:26:02 +00:00
|
|
|
let result = remove_worktree_by_story_id(&project_root, "88_remove_by_id", &config).await;
|
|
|
|
|
assert!(
|
|
|
|
|
result.is_ok(),
|
|
|
|
|
"Expected removal to succeed: {:?}",
|
|
|
|
|
result.err()
|
|
|
|
|
);
|
2026-02-24 00:26:49 +00:00
|
|
|
}
|
|
|
|
|
|
2026-02-26 14:58:52 +00:00
|
|
|
// ── prune_worktree_sync ──────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn prune_worktree_sync_noop_when_no_worktree_dir() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
// No worktree directory exists — must return Ok without touching git.
|
|
|
|
|
let result = prune_worktree_sync(tmp.path(), "42_story_nonexistent");
|
2026-03-20 12:26:02 +00:00
|
|
|
assert!(
|
|
|
|
|
result.is_ok(),
|
|
|
|
|
"Expected Ok when worktree dir absent: {:?}",
|
|
|
|
|
result.err()
|
|
|
|
|
);
|
2026-02-26 14:58:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn prune_worktree_sync_removes_real_worktree() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let story_id = "55_story_prune_test";
|
|
|
|
|
let wt_path = worktree_path(&project_root, story_id);
|
2026-03-20 12:26:02 +00:00
|
|
|
create_worktree_sync(
|
|
|
|
|
&project_root,
|
|
|
|
|
&wt_path,
|
|
|
|
|
&format!("feature/story-{story_id}"),
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
2026-02-26 14:58:52 +00:00
|
|
|
assert!(wt_path.exists(), "worktree dir should exist before prune");
|
|
|
|
|
|
|
|
|
|
let result = prune_worktree_sync(&project_root, story_id);
|
2026-03-20 12:26:02 +00:00
|
|
|
assert!(
|
|
|
|
|
result.is_ok(),
|
|
|
|
|
"prune_worktree_sync must return Ok: {:?}",
|
|
|
|
|
result.err()
|
|
|
|
|
);
|
2026-02-26 14:58:52 +00:00
|
|
|
assert!(!wt_path.exists(), "worktree dir should be gone after prune");
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-24 23:29:56 +00:00
|
|
|
#[tokio::test]
|
|
|
|
|
async fn create_worktree_succeeds_despite_setup_failure() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![ComponentConfig {
|
|
|
|
|
name: "broken-build".to_string(),
|
|
|
|
|
path: ".".to_string(),
|
|
|
|
|
setup: vec!["exit 1".to_string()],
|
|
|
|
|
teardown: vec![],
|
|
|
|
|
}],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 23:29:56 +00:00
|
|
|
};
|
|
|
|
|
// Even though setup commands fail, create_worktree must succeed
|
|
|
|
|
// so the agent can start and fix the problem itself.
|
|
|
|
|
let result = create_worktree(&project_root, "172_setup_fail", &config, 3001).await;
|
|
|
|
|
assert!(
|
|
|
|
|
result.is_ok(),
|
|
|
|
|
"create_worktree must succeed even if setup commands fail: {:?}",
|
|
|
|
|
result.err()
|
|
|
|
|
);
|
|
|
|
|
let info = result.unwrap();
|
|
|
|
|
assert!(info.path.exists());
|
|
|
|
|
assert!(info.path.join(".mcp.json").exists());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[tokio::test]
|
|
|
|
|
async fn create_worktree_reuse_succeeds_despite_setup_failure() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let empty_config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 23:29:56 +00:00
|
|
|
};
|
|
|
|
|
// First creation — no setup commands, should succeed
|
|
|
|
|
create_worktree(&project_root, "173_reuse_fail", &empty_config, 3001)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let failing_config = ProjectConfig {
|
|
|
|
|
component: vec![ComponentConfig {
|
|
|
|
|
name: "broken-build".to_string(),
|
|
|
|
|
path: ".".to_string(),
|
|
|
|
|
setup: vec!["exit 1".to_string()],
|
|
|
|
|
teardown: vec![],
|
|
|
|
|
}],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 23:29:56 +00:00
|
|
|
};
|
|
|
|
|
// Second call — worktree exists, setup commands fail, must still succeed
|
2026-03-20 12:26:02 +00:00
|
|
|
let result = create_worktree(&project_root, "173_reuse_fail", &failing_config, 3002).await;
|
2026-02-24 23:29:56 +00:00
|
|
|
assert!(
|
|
|
|
|
result.is_ok(),
|
|
|
|
|
"create_worktree reuse must succeed even if setup commands fail: {:?}",
|
|
|
|
|
result.err()
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-24 00:26:49 +00:00
|
|
|
#[tokio::test]
|
|
|
|
|
async fn remove_worktree_async_removes_directory() {
|
|
|
|
|
let tmp = TempDir::new().unwrap();
|
|
|
|
|
let project_root = tmp.path().join("my-project");
|
|
|
|
|
fs::create_dir_all(&project_root).unwrap();
|
|
|
|
|
init_git_repo(&project_root);
|
|
|
|
|
|
|
|
|
|
let config = ProjectConfig {
|
|
|
|
|
component: vec![],
|
|
|
|
|
agent: vec![],
|
2026-02-26 12:16:07 +00:00
|
|
|
watcher: WatcherConfig::default(),
|
2026-03-19 11:56:39 +00:00
|
|
|
default_qa: "server".to_string(),
|
2026-03-19 15:58:32 +00:00
|
|
|
default_coder_model: None,
|
|
|
|
|
max_coders: None,
|
2026-03-19 16:34:11 +00:00
|
|
|
max_retries: 2,
|
2026-02-24 00:26:49 +00:00
|
|
|
};
|
|
|
|
|
let info = create_worktree(&project_root, "77_remove_async", &config, 3001)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
let path = info.path.clone();
|
|
|
|
|
assert!(path.exists());
|
2026-03-20 12:26:02 +00:00
|
|
|
remove_worktree(&project_root, &info, &config)
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
2026-02-24 00:26:49 +00:00
|
|
|
assert!(!path.exists());
|
|
|
|
|
}
|
2026-02-20 13:17:20 +00:00
|
|
|
}
|