Story 60: Status-Based Directory Layout with work/ pipeline
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -27,13 +27,13 @@ You have these tools via the story-kit MCP server:
|
||||
- get_agent_output(story_id, agent_name, timeout_ms) - Poll agent output (returns recent events, call repeatedly)
|
||||
- list_agents() - See all running agents and their status
|
||||
- stop_agent(story_id, agent_name) - Stop a running agent
|
||||
- get_story_todos(story_id) - Get unchecked acceptance criteria for a story in current/
|
||||
- get_story_todos(story_id) - Get unchecked acceptance criteria for a story in work/2_current/
|
||||
- ensure_acceptance(story_id) - Check if a story passes acceptance gates
|
||||
|
||||
## Your Workflow
|
||||
1. Read CLAUDE.md and .story_kit/README.md to understand the project and dev process
|
||||
2. Read the story file from .story_kit/stories/ to understand requirements
|
||||
3. Move it to current/ if it is in upcoming/
|
||||
2. Read the story file from .story_kit/work/ to understand requirements
|
||||
3. Move it to work/2_current/ if it is in work/1_upcoming/
|
||||
4. Start coder-1 on the story: call start_agent with story_id="{{story_id}}" and agent_name="coder-1"
|
||||
5. Wait for completion: call wait_for_agent with story_id="{{story_id}}" and agent_name="coder-1". The coder will call report_completion when done, which runs acceptance gates automatically. wait_for_agent returns when the coder reports completion.
|
||||
6. Check the result: inspect the "completion" field in the wait_for_agent response — if gates_passed is true, the work is done; if false, review the gate_output and decide whether to start a fresh coder.
|
||||
@@ -54,7 +54,7 @@ role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Pick up the story from .story_kit/stories/ - move it to current/ if needed. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: When all your work is committed, call report_completion as your FINAL action: report_completion(story_id='{{story_id}}', agent_name='{{agent_name}}', summary='<brief summary of what you implemented>'). The server will run cargo clippy and tests automatically to verify your work."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Pick up the story from .story_kit/work/ - move it to work/2_current/ if needed. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: When all your work is committed, call report_completion as your FINAL action: report_completion(story_id='{{story_id}}', agent_name='{{agent_name}}', summary='<brief summary of what you implemented>'). The server will run cargo clippy and tests automatically to verify your work."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Run cargo clippy and biome checks before considering work complete. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. ALWAYS call report_completion as your absolute final action after committing."
|
||||
|
||||
[[agent]]
|
||||
@@ -63,7 +63,7 @@ role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Pick up the story from .story_kit/stories/ - move it to current/ if needed. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: When all your work is committed, call report_completion as your FINAL action: report_completion(story_id='{{story_id}}', agent_name='{{agent_name}}', summary='<brief summary of what you implemented>'). The server will run cargo clippy and tests automatically to verify your work."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Pick up the story from .story_kit/work/ - move it to work/2_current/ if needed. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: When all your work is committed, call report_completion as your FINAL action: report_completion(story_id='{{story_id}}', agent_name='{{agent_name}}', summary='<brief summary of what you implemented>'). The server will run cargo clippy and tests automatically to verify your work."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Run cargo clippy and biome checks before considering work complete. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. ALWAYS call report_completion as your absolute final action after committing."
|
||||
|
||||
[[agent]]
|
||||
@@ -72,5 +72,5 @@ role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Pick up the story from .story_kit/stories/ - move it to current/ if needed. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: When all your work is committed, call report_completion as your FINAL action: report_completion(story_id='{{story_id}}', agent_name='{{agent_name}}', summary='<brief summary of what you implemented>'). The server will run cargo clippy and tests automatically to verify your work."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. Pick up the story from .story_kit/work/ - move it to work/2_current/ if needed. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: When all your work is committed, call report_completion as your FINAL action: report_completion(story_id='{{story_id}}', agent_name='{{agent_name}}', summary='<brief summary of what you implemented>'). The server will run cargo clippy and tests automatically to verify your work."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Run cargo clippy and biome checks before considering work complete. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. ALWAYS call report_completion as your absolute final action after committing."
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
---
|
||||
name: MCP Server for Workflow API
|
||||
---
|
||||
|
||||
# Spike 1: MCP Server for Workflow API
|
||||
|
||||
## Question
|
||||
|
||||
Can we expose the Story Kit workflow API as MCP tools so that agents call enforced endpoints instead of manipulating files directly?
|
||||
|
||||
## Hypothesis
|
||||
|
||||
A thin stdio MCP server that proxies to the existing Rust HTTP API will let Claude Code agents use `create_story`, `validate_stories`, `record_tests`, and `ensure_acceptance` as native tools — with zero changes to the existing server.
|
||||
|
||||
## Timebox
|
||||
|
||||
2 hours
|
||||
|
||||
## Investigation Plan
|
||||
|
||||
1. Understand the MCP stdio protocol (JSON-RPC over stdin/stdout)
|
||||
2. Identify which workflow endpoints should become MCP tools
|
||||
3. Determine the best language/approach for the MCP server (Rust binary vs Node script vs Rust integrated into existing server)
|
||||
4. Prototype a minimal MCP server with one tool (`create_story`) and test it with `claude mcp add`
|
||||
5. Verify spawned agents (via `claude -p`) inherit MCP tools
|
||||
6. Evaluate whether we can restrict agents from writing to `.story_kit/stories/` directly
|
||||
|
||||
## Findings
|
||||
|
||||
### 1. MCP stdio protocol is simple
|
||||
JSON-RPC 2.0 over stdin/stdout. Three-phase: initialize handshake → tools/list → tools/call. A minimal server needs to handle ~3 message types. No HTTP, no sockets.
|
||||
|
||||
### 2. The `rmcp` Rust crate makes this trivial
|
||||
The official Rust SDK (`rmcp` 0.3) provides `#[tool]` and `#[tool_router]` macros that eliminate boilerplate. A tool is just an async function with typed parameters:
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Deserialize, schemars::JsonSchema)]
|
||||
pub struct CreateStoryRequest {
|
||||
#[schemars(description = "Human-readable story name")]
|
||||
pub name: String,
|
||||
#[schemars(description = "User story text")]
|
||||
pub user_story: Option<String>,
|
||||
#[schemars(description = "List of acceptance criteria")]
|
||||
pub acceptance_criteria: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[tool(description = "Create a new story with correct front matter in upcoming/")]
|
||||
async fn create_story(
|
||||
&self,
|
||||
Parameters(req): Parameters<CreateStoryRequest>,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let resp = self.client.post(&format!("{}/workflow/stories/create", self.api_url))
|
||||
.json(&req).send().await...;
|
||||
Ok(CallToolResult::success(vec![Content::text(resp.story_id)]))
|
||||
}
|
||||
```
|
||||
|
||||
Dependencies needed: `rmcp` (server, transport-io), `schemars`, `reqwest`, `tokio`, `serde`. We already use most of these in the existing server.
|
||||
|
||||
### 3. Architecture: separate binary, same workspace
|
||||
Best approach is a new binary crate (`story-kit-mcp`) in the workspace that:
|
||||
- Reads the API URL from env or CLI arg (default `http://localhost:3000/api`)
|
||||
- Proxies each MCP tool call to the corresponding HTTP endpoint
|
||||
- Returns the API response as tool output
|
||||
|
||||
This keeps the MCP layer thin and the enforcement logic in the existing server. No code duplication — the MCP binary is just a translation layer.
|
||||
|
||||
### 4. Which endpoints become tools
|
||||
|
||||
| MCP Tool | HTTP Endpoint | Why |
|
||||
|---|---|---|
|
||||
| `create_story` | POST /workflow/stories/create | Enforce front matter |
|
||||
| `validate_stories` | GET /workflow/stories/validate | Check all stories |
|
||||
| `record_tests` | POST /workflow/tests/record | Record test results |
|
||||
| `ensure_acceptance` | POST /workflow/acceptance/ensure | Gate story acceptance |
|
||||
| `collect_coverage` | POST /workflow/coverage/collect | Run + record coverage |
|
||||
| `get_story_todos` | GET /workflow/todos | See remaining work |
|
||||
| `list_upcoming` | GET /workflow/upcoming | See backlog |
|
||||
|
||||
### 5. Configuration via `.mcp.json` (project-scoped)
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"story-kit": {
|
||||
"type": "stdio",
|
||||
"command": "./target/release/story-kit-mcp",
|
||||
"args": ["--api-url", "http://localhost:${STORYKIT_PORT:-3000}/api"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This gets checked into the repo. Every Claude Code session and every spawned agent inherits it automatically.
|
||||
|
||||
### 6. Agent restrictions
|
||||
Claude Code's `.claude/settings.local.json` can restrict which tools agents have access to. We could:
|
||||
- Give agents the MCP tools (`story-kit:create_story`, etc.)
|
||||
- Restrict or remove Write access to `.story_kit/stories/` paths
|
||||
- This forces agents through the API for all workflow actions
|
||||
|
||||
Caveat: tool restrictions are advisory in `settings.local.json` — agents with Bash access could still `echo > file`. Full enforcement requires removing Bash or scoping it (which is story 35's problem).
|
||||
|
||||
### 7. Effort estimate
|
||||
The MCP binary itself is ~200-300 lines of Rust. One afternoon of work. Most of the time would be testing the integration with agent spawning and worktrees.
|
||||
|
||||
## Recommendation
|
||||
|
||||
**Proceed with a story.** The spike confirms this is straightforward and high-value. The `rmcp` crate handles the protocol complexity, and our existing HTTP API already does the enforcement. The MCP server is just plumbing.
|
||||
|
||||
Suggested story scope:
|
||||
1. New `story-kit-mcp` binary crate in the workspace
|
||||
2. Expose the 7 tools listed above
|
||||
3. Add `.mcp.json` to the project
|
||||
4. Update agent spawn to ensure MCP tools are available in worktrees
|
||||
5. Test: spawn agent, verify it uses MCP tools instead of file writes
|
||||
@@ -1,129 +0,0 @@
|
||||
# Spike: Claude Code Integration via PTY + CLI
|
||||
|
||||
**Question:** Can we run Claude Code programmatically from our Rust backend while using Max subscription billing instead of per-token API billing?
|
||||
|
||||
**Hypothesis:** Spawning `claude -p` inside a pseudo-terminal (PTY) will make `isatty()` return true, causing Claude Code to use Max subscription billing while giving us structured JSON output.
|
||||
|
||||
**Timebox:** 2 hours
|
||||
|
||||
**Result: HYPOTHESIS CONFIRMED**
|
||||
|
||||
---
|
||||
|
||||
## Proof
|
||||
|
||||
Spawning `claude -p "hi" --output-format stream-json --verbose` inside a PTY from Rust (`portable-pty` crate) produces:
|
||||
|
||||
```json
|
||||
{"type":"system","subtype":"init","apiKeySource":"none","model":"claude-opus-4-6",...}
|
||||
{"type":"rate_limit_event","rate_limit_info":{"status":"allowed","rateLimitType":"five_hour",...}}
|
||||
{"type":"assistant","message":{"model":"claude-opus-4-6","content":[{"type":"text","text":"Hi! How can I help you today?"}],...}}
|
||||
{"type":"result","subtype":"success","total_cost_usd":0.0102,...}
|
||||
```
|
||||
|
||||
Key evidence:
|
||||
- **`apiKeySource: "none"`** — not using an API key
|
||||
- **`rateLimitType: "five_hour"`** — Max subscription rate limiting (not per-token)
|
||||
- **`model: "claude-opus-4-6"`** — Opus on Max plan
|
||||
- Clean NDJSON output, parseable from Rust
|
||||
- Response streamed to browser UI via WebSocket
|
||||
|
||||
## Architecture (Proven)
|
||||
|
||||
```
|
||||
Browser UI → WebSocket → Rust Backend → PTY → claude -p --output-format stream-json
|
||||
↑
|
||||
isatty() = true → Max subscription billing
|
||||
```
|
||||
|
||||
## What Works
|
||||
|
||||
1. `portable-pty` crate spawns Claude Code in a PTY from Rust
|
||||
2. `-p` flag gives single-shot non-interactive mode (no TUI)
|
||||
3. `--output-format stream-json` gives clean NDJSON (no ANSI escapes)
|
||||
4. PTY makes `isatty()` return true → Max billing
|
||||
5. NDJSON events parsed and streamed to frontend via WebSocket
|
||||
6. Session IDs returned for potential multi-turn via `--resume`
|
||||
|
||||
## Event Types from stream-json
|
||||
|
||||
| Type | Purpose | Key Fields |
|
||||
|------|---------|------------|
|
||||
| `system` | Init event | `session_id`, `model`, `apiKeySource`, `tools`, `agents` |
|
||||
| `rate_limit_event` | Billing info | `status`, `rateLimitType` |
|
||||
| `assistant` | Claude's response | `message.content[].text` |
|
||||
| `result` | Final summary | `total_cost_usd`, `usage`, `duration_ms` |
|
||||
| `stream_event` | Token deltas (with `--include-partial-messages`) | `event.delta.text` |
|
||||
|
||||
## Multi-Agent Concurrency (Proven)
|
||||
|
||||
Created an `AgentPool` with REST API (`POST /api/agents`, `POST /api/agents/:name/message`, `GET /api/agents`) and tested 2 concurrent coding agents:
|
||||
|
||||
**Test:** Created `coder-1` (frontend role) and `coder-2` (backend role), sent both messages simultaneously.
|
||||
|
||||
```
|
||||
coder-1: Listed 5 React components in 5s (session: ca3e13fc-...)
|
||||
coder-2: Listed 30 Rust source files in 8s (session: 8a815cf0-...)
|
||||
Both: apiKeySource: "none", rateLimitType: "five_hour" (Max billing)
|
||||
```
|
||||
|
||||
**Session resumption confirmed:** Sent coder-1 a follow-up "How many components did you just list?" — it answered "5" using `--resume <session_id>`.
|
||||
|
||||
**What this proves:**
|
||||
- Multiple PTY sessions run concurrently without conflict
|
||||
- Each gets Max subscription billing independently
|
||||
- `--resume` gives agents multi-turn conversation memory
|
||||
- Supervisor pattern works: coordinator reads agent responses, sends coordinated tasks
|
||||
- Inter-agent communication possible via supervisor relay
|
||||
|
||||
**Architecture for multi-agent orchestration:**
|
||||
- Spawn N PTY sessions, each with `claude -p` pointed at a different worktree
|
||||
- Rust backend coordinates work between agents
|
||||
- Different `--model` per agent (Opus for supervisor, Sonnet/Haiku for workers)
|
||||
- `--allowedTools` to restrict what each agent can do
|
||||
- `--max-turns` and `--max-budget-usd` for safety limits
|
||||
|
||||
## Key Flags for Programmatic Use
|
||||
|
||||
```bash
|
||||
claude -p "prompt" # Single-shot mode
|
||||
--output-format stream-json # NDJSON output
|
||||
--verbose # Include all events
|
||||
--include-partial-messages # Token-by-token streaming
|
||||
--model sonnet # Model selection
|
||||
--allowedTools "Read,Edit,Bash" # Tool permissions
|
||||
--permission-mode bypassPermissions # No approval prompts
|
||||
--resume <session_id> # Continue conversation
|
||||
--max-turns 10 # Safety limit
|
||||
--max-budget-usd 5.00 # Cost cap
|
||||
--append-system-prompt "..." # Custom instructions
|
||||
--cwd /path/to/worktree # Working directory
|
||||
```
|
||||
|
||||
## Agent SDK Comparison
|
||||
|
||||
The Claude Agent SDK (`@anthropic-ai/claude-agent-sdk`) is a richer TypeScript API with hooks, subagents, and MCP integration — but it **requires an API key** (per-token billing). The PTY approach is the only way to get Max subscription billing programmatically.
|
||||
|
||||
| Factor | PTY + CLI | Agent SDK |
|
||||
|--------|-----------|-----------|
|
||||
| Billing | Max subscription | API key (per-token) |
|
||||
| Language | Any (subprocess) | TypeScript/Python |
|
||||
| Streaming | NDJSON parsing | Native async iterators |
|
||||
| Hooks | Not available | Callback functions |
|
||||
| Subagents | Multiple processes | In-process `agents` option |
|
||||
| Sessions | `--resume` flag | In-memory |
|
||||
| Complexity | Low | Medium (needs Node.js) |
|
||||
|
||||
## Caveats
|
||||
|
||||
- Cost reported in `total_cost_usd` is informational, not actual billing
|
||||
- Concurrent PTY sessions may hit Max subscription rate limits
|
||||
- Each `-p` invocation is a fresh process (startup overhead ~2-3s)
|
||||
- PTY dependency (`portable-pty`) adds ~15 crates
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Story:** Add `--include-partial-messages` for real-time token streaming to browser
|
||||
2. **Story:** Production multi-agent orchestration with worktree isolation per agent
|
||||
3. **Story:** Streaming HTTP responses (SSE) instead of blocking request until agent completes
|
||||
4. **Consider:** Whether Rust backend should become a thin orchestration layer over Claude Code rather than reimplementing agent capabilities
|
||||
@@ -1,26 +0,0 @@
|
||||
---
|
||||
name: Cross-Platform Binary Distribution
|
||||
test_plan: approved
|
||||
---
|
||||
|
||||
# Story 54: Cross-Platform Binary Distribution
|
||||
|
||||
## User Story
|
||||
|
||||
As a developer, I want to build self-contained binaries for macOS and Linux so that I can share Story Kit with others without requiring them to have a Rust toolchain.
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `cargo build --release` produces a binary with no non-system dynamic dependencies on macOS (current state — verify)
|
||||
- [ ] CI or a documented process can produce a fully static Linux x86_64 binary using the `x86_64-unknown-linux-musl` target (via cross-compilation or Docker build)
|
||||
- [ ] The Linux binary has zero dynamic library dependencies (`ldd` reports "not a dynamic executable")
|
||||
- [ ] The frontend is embedded in the binary via `rust-embed` (current state — verify still works in release builds)
|
||||
- [ ] A Linux user can download and run the single binary without installing Rust, Node, glibc, or any extra libraries
|
||||
- [ ] Build instructions are documented in the project (e.g. a `Makefile` or `justfile` with `build-linux` / `build-macos` targets)
|
||||
|
||||
## Out of Scope
|
||||
|
||||
- Homebrew formula or package manager publishing
|
||||
- Windows support
|
||||
- Auto-update mechanism
|
||||
- Code signing or notarization
|
||||
@@ -692,48 +692,41 @@ pub fn git_stage_and_commit(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Determine the work item type from its ID.
|
||||
/// Returns "bug" for `bug-*` IDs, "spike" for `spike-*` IDs, "story" otherwise.
|
||||
/// Determine the work item type from its ID (new naming: `{N}_{type}_{slug}`).
|
||||
/// Returns "bug", "spike", or "story".
|
||||
fn item_type_from_id(item_id: &str) -> &'static str {
|
||||
if item_id.starts_with("bug-") {
|
||||
// New format: {digits}_{type}_{slug}
|
||||
let after_num = item_id.trim_start_matches(|c: char| c.is_ascii_digit());
|
||||
if after_num.starts_with("_bug_") {
|
||||
"bug"
|
||||
} else if item_id.starts_with("spike-") {
|
||||
} else if after_num.starts_with("_spike_") {
|
||||
"spike"
|
||||
} else {
|
||||
"story"
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the source directory path for a work item based on its type.
|
||||
fn item_source_dir(project_root: &Path, item_id: &str) -> PathBuf {
|
||||
let sk = project_root.join(".story_kit");
|
||||
match item_type_from_id(item_id) {
|
||||
"bug" => sk.join("bugs"),
|
||||
"spike" => sk.join("spikes"),
|
||||
_ => sk.join("stories").join("upcoming"),
|
||||
}
|
||||
/// Return the source directory path for a work item (always work/1_upcoming/).
|
||||
fn item_source_dir(project_root: &Path, _item_id: &str) -> PathBuf {
|
||||
project_root.join(".story_kit").join("work").join("1_upcoming")
|
||||
}
|
||||
|
||||
/// Return the archive directory path for a work item based on its type.
|
||||
fn item_archive_dir(project_root: &Path, item_id: &str) -> PathBuf {
|
||||
let sk = project_root.join(".story_kit");
|
||||
match item_type_from_id(item_id) {
|
||||
"bug" => sk.join("bugs").join("archive"),
|
||||
"spike" => sk.join("spikes").join("archive"),
|
||||
_ => sk.join("stories").join("archived"),
|
||||
}
|
||||
/// Return the archive directory path for a work item (always work/5_archived/).
|
||||
fn item_archive_dir(project_root: &Path, _item_id: &str) -> PathBuf {
|
||||
project_root.join(".story_kit").join("work").join("5_archived")
|
||||
}
|
||||
|
||||
/// Move a work item (story, bug, or spike) to the unified `.story_kit/current/` directory.
|
||||
/// Move a work item (story, bug, or spike) from `work/1_upcoming/` to `work/2_current/`.
|
||||
///
|
||||
/// Idempotent: if the item is already in `current/`, returns Ok without committing.
|
||||
/// If the item is not found in its source directory, logs a warning and returns Ok.
|
||||
/// Idempotent: if the item is already in `2_current/`, returns Ok without committing.
|
||||
/// If the item is not found in `1_upcoming/`, logs a warning and returns Ok.
|
||||
pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(), String> {
|
||||
let current_dir = project_root.join(".story_kit").join("current");
|
||||
let sk = project_root.join(".story_kit").join("work");
|
||||
let current_dir = sk.join("2_current");
|
||||
let current_path = current_dir.join(format!("{story_id}.md"));
|
||||
|
||||
if current_path.exists() {
|
||||
// Already in current/ — idempotent, nothing to do.
|
||||
// Already in 2_current/ — idempotent, nothing to do.
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -742,20 +735,20 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
|
||||
|
||||
if !source_path.exists() {
|
||||
eprintln!(
|
||||
"[lifecycle] Work item '{story_id}' not found in {}; skipping move to current/",
|
||||
"[lifecycle] Work item '{story_id}' not found in {}; skipping move to 2_current/",
|
||||
source_dir.display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
std::fs::create_dir_all(¤t_dir)
|
||||
.map_err(|e| format!("Failed to create .story_kit/current/ directory: {e}"))?;
|
||||
.map_err(|e| format!("Failed to create work/2_current/ directory: {e}"))?;
|
||||
|
||||
std::fs::rename(&source_path, ¤t_path)
|
||||
.map_err(|e| format!("Failed to move '{story_id}' to current/: {e}"))?;
|
||||
.map_err(|e| format!("Failed to move '{story_id}' to 2_current/: {e}"))?;
|
||||
|
||||
eprintln!(
|
||||
"[lifecycle] Moved '{story_id}' from {} to current/",
|
||||
"[lifecycle] Moved '{story_id}' from {} to work/2_current/",
|
||||
source_dir.display()
|
||||
);
|
||||
|
||||
@@ -767,20 +760,15 @@ pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Move a story from `.story_kit/current/` to `.story_kit/stories/archived/` and auto-commit.
|
||||
/// Move a story from `work/2_current/` to `work/5_archived/` and auto-commit.
|
||||
///
|
||||
/// * If the story is in `current/`, it is renamed to `stories/archived/` and committed.
|
||||
/// * If the story is already in `stories/archived/`, this is a no-op (idempotent).
|
||||
/// * If the story is not found in `current/` or `stories/archived/`, an error is returned.
|
||||
/// * If the story is in `2_current/`, it is moved to `5_archived/` and committed.
|
||||
/// * If the story is already in `5_archived/`, this is a no-op (idempotent).
|
||||
/// * If the story is not found in `2_current/` or `5_archived/`, an error is returned.
|
||||
pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(), String> {
|
||||
let current_path = project_root
|
||||
.join(".story_kit")
|
||||
.join("current")
|
||||
.join(format!("{story_id}.md"));
|
||||
let archived_dir = project_root
|
||||
.join(".story_kit")
|
||||
.join("stories")
|
||||
.join("archived");
|
||||
let sk = project_root.join(".story_kit").join("work");
|
||||
let current_path = sk.join("2_current").join(format!("{story_id}.md"));
|
||||
let archived_dir = sk.join("5_archived");
|
||||
let archived_path = archived_dir.join(format!("{story_id}.md"));
|
||||
|
||||
if archived_path.exists() {
|
||||
@@ -790,10 +778,10 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
|
||||
|
||||
if current_path.exists() {
|
||||
std::fs::create_dir_all(&archived_dir)
|
||||
.map_err(|e| format!("Failed to create stories/archived/ directory: {e}"))?;
|
||||
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?;
|
||||
std::fs::rename(¤t_path, &archived_path)
|
||||
.map_err(|e| format!("Failed to move story '{story_id}' to archived/: {e}"))?;
|
||||
eprintln!("[lifecycle] Moved story '{story_id}' from current/ to stories/archived/");
|
||||
.map_err(|e| format!("Failed to move story '{story_id}' to 5_archived/: {e}"))?;
|
||||
eprintln!("[lifecycle] Moved story '{story_id}' from work/2_current/ to work/5_archived/");
|
||||
|
||||
let msg = format!("story-kit: accept story {story_id}");
|
||||
git_stage_and_commit(
|
||||
@@ -805,20 +793,20 @@ pub fn move_story_to_archived(project_root: &Path, story_id: &str) -> Result<(),
|
||||
}
|
||||
|
||||
Err(format!(
|
||||
"Story '{story_id}' not found in current/. Cannot accept story."
|
||||
"Story '{story_id}' not found in work/2_current/. Cannot accept story."
|
||||
))
|
||||
}
|
||||
|
||||
/// Move a bug from `.story_kit/current/` to `.story_kit/bugs/archive/` and auto-commit.
|
||||
/// Move a bug from `work/2_current/` or `work/1_upcoming/` to `work/5_archived/` and auto-commit.
|
||||
///
|
||||
/// * If the bug is in `current/`, it is moved to `bugs/archive/` and committed.
|
||||
/// * If the bug is still in `bugs/` (never started), it is moved directly to `bugs/archive/`.
|
||||
/// * If the bug is already in `bugs/archive/`, this is a no-op (idempotent).
|
||||
/// * If the bug is in `2_current/`, it is moved to `5_archived/` and committed.
|
||||
/// * If the bug is still in `1_upcoming/` (never started), it is moved directly to `5_archived/`.
|
||||
/// * If the bug is already in `5_archived/`, this is a no-op (idempotent).
|
||||
/// * If the bug is not found anywhere, an error is returned.
|
||||
pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> {
|
||||
let sk = project_root.join(".story_kit");
|
||||
let current_path = sk.join("current").join(format!("{bug_id}.md"));
|
||||
let bugs_path = sk.join("bugs").join(format!("{bug_id}.md"));
|
||||
let sk = project_root.join(".story_kit").join("work");
|
||||
let current_path = sk.join("2_current").join(format!("{bug_id}.md"));
|
||||
let upcoming_path = sk.join("1_upcoming").join(format!("{bug_id}.md"));
|
||||
let archive_dir = item_archive_dir(project_root, bug_id);
|
||||
let archive_path = archive_dir.join(format!("{bug_id}.md"));
|
||||
|
||||
@@ -828,21 +816,21 @@ pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), Str
|
||||
|
||||
let source_path = if current_path.exists() {
|
||||
current_path.clone()
|
||||
} else if bugs_path.exists() {
|
||||
bugs_path.clone()
|
||||
} else if upcoming_path.exists() {
|
||||
upcoming_path.clone()
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Bug '{bug_id}' not found in current/ or bugs/. Cannot close bug."
|
||||
"Bug '{bug_id}' not found in work/2_current/ or work/1_upcoming/. Cannot close bug."
|
||||
));
|
||||
};
|
||||
|
||||
std::fs::create_dir_all(&archive_dir)
|
||||
.map_err(|e| format!("Failed to create bugs/archive/ directory: {e}"))?;
|
||||
.map_err(|e| format!("Failed to create work/5_archived/ directory: {e}"))?;
|
||||
std::fs::rename(&source_path, &archive_path)
|
||||
.map_err(|e| format!("Failed to move bug '{bug_id}' to archive: {e}"))?;
|
||||
.map_err(|e| format!("Failed to move bug '{bug_id}' to 5_archived/: {e}"))?;
|
||||
|
||||
eprintln!(
|
||||
"[lifecycle] Closed bug '{bug_id}' → bugs/archive/"
|
||||
"[lifecycle] Closed bug '{bug_id}' → work/5_archived/"
|
||||
);
|
||||
|
||||
let msg = format!("story-kit: close bug {bug_id}");
|
||||
|
||||
@@ -843,7 +843,7 @@ fn tool_get_story_todos(args: &Value, ctx: &AppContext) -> Result<String, String
|
||||
.ok_or("Missing required argument: story_id")?;
|
||||
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let current_dir = root.join(".story_kit").join("current");
|
||||
let current_dir = root.join(".story_kit").join("work").join("2_current");
|
||||
let filepath = current_dir.join(format!("{story_id}.md"));
|
||||
|
||||
if !filepath.exists() {
|
||||
|
||||
@@ -142,7 +142,7 @@ struct ValidateStoriesResponse {
|
||||
|
||||
pub fn load_upcoming_stories(ctx: &AppContext) -> Result<Vec<UpcomingStory>, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let upcoming_dir = root.join(".story_kit").join("stories").join("upcoming");
|
||||
let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming");
|
||||
|
||||
if !upcoming_dir.exists() {
|
||||
return Ok(Vec::new());
|
||||
@@ -177,7 +177,7 @@ pub fn load_upcoming_stories(ctx: &AppContext) -> Result<Vec<UpcomingStory>, Str
|
||||
|
||||
fn load_current_story_metadata(ctx: &AppContext) -> Result<Vec<(String, StoryMetadata)>, String> {
|
||||
let root = ctx.state.get_project_root()?;
|
||||
let current_dir = root.join(".story_kit").join("current");
|
||||
let current_dir = root.join(".story_kit").join("work").join("2_current");
|
||||
|
||||
if !current_dir.exists() {
|
||||
return Ok(Vec::new());
|
||||
@@ -513,7 +513,7 @@ impl WorkflowApi {
|
||||
#[oai(path = "/workflow/todos", method = "get")]
|
||||
async fn story_todos(&self) -> OpenApiResult<Json<TodoListResponse>> {
|
||||
let root = self.ctx.state.get_project_root().map_err(bad_request)?;
|
||||
let current_dir = root.join(".story_kit").join("current");
|
||||
let current_dir = root.join(".story_kit").join("work").join("2_current");
|
||||
|
||||
if !current_dir.exists() {
|
||||
return Ok(Json(TodoListResponse {
|
||||
@@ -631,15 +631,15 @@ pub fn create_story_file(
|
||||
acceptance_criteria: Option<&[String]>,
|
||||
commit: bool,
|
||||
) -> Result<String, String> {
|
||||
let story_number = next_story_number(root)?;
|
||||
let story_number = next_item_number(root)?;
|
||||
let slug = slugify_name(name);
|
||||
|
||||
if slug.is_empty() {
|
||||
return Err("Name must contain at least one alphanumeric character.".to_string());
|
||||
}
|
||||
|
||||
let filename = format!("{story_number}_{slug}.md");
|
||||
let upcoming_dir = root.join(".story_kit").join("stories").join("upcoming");
|
||||
let filename = format!("{story_number}_story_{slug}.md");
|
||||
let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming");
|
||||
fs::create_dir_all(&upcoming_dir)
|
||||
.map_err(|e| format!("Failed to create upcoming directory: {e}"))?;
|
||||
|
||||
@@ -701,39 +701,9 @@ fn git_commit_story_file(root: &Path, filepath: &Path, story_id: &str) -> Result
|
||||
|
||||
// ── Bug file helpers ──────────────────────────────────────────────
|
||||
|
||||
/// Determine the next bug number by scanning `.story_kit/bugs/` and `.story_kit/bugs/archive/`.
|
||||
fn next_bug_number(root: &Path) -> Result<u32, String> {
|
||||
let bugs_base = root.join(".story_kit").join("bugs");
|
||||
let mut max_num: u32 = 0;
|
||||
|
||||
for dir in [bugs_base.clone(), bugs_base.join("archive")] {
|
||||
if !dir.exists() {
|
||||
continue;
|
||||
}
|
||||
for entry in
|
||||
fs::read_dir(dir).map_err(|e| format!("Failed to read bugs directory: {e}"))?
|
||||
{
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
// Bug filenames: bug-N-slug.md — extract the N after "bug-"
|
||||
if let Some(rest) = name_str.strip_prefix("bug-") {
|
||||
let num_str: String = rest.chars().take_while(|c| c.is_ascii_digit()).collect();
|
||||
if let Ok(n) = num_str.parse::<u32>()
|
||||
&& n > max_num
|
||||
{
|
||||
max_num = n;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(max_num + 1)
|
||||
}
|
||||
|
||||
/// Create a bug file in `.story_kit/bugs/` with a deterministic filename and auto-commit.
|
||||
/// Create a bug file in `work/1_upcoming/` with a deterministic filename and auto-commit.
|
||||
///
|
||||
/// Returns the bug_id (e.g. `"bug-3-login_crash"`).
|
||||
/// Returns the bug_id (e.g. `"4_bug_login_crash"`).
|
||||
pub fn create_bug_file(
|
||||
root: &Path,
|
||||
name: &str,
|
||||
@@ -743,17 +713,17 @@ pub fn create_bug_file(
|
||||
expected_result: &str,
|
||||
acceptance_criteria: Option<&[String]>,
|
||||
) -> Result<String, String> {
|
||||
let bug_number = next_bug_number(root)?;
|
||||
let bug_number = next_item_number(root)?;
|
||||
let slug = slugify_name(name);
|
||||
|
||||
if slug.is_empty() {
|
||||
return Err("Name must contain at least one alphanumeric character.".to_string());
|
||||
}
|
||||
|
||||
let filename = format!("bug-{bug_number}-{slug}.md");
|
||||
let bugs_dir = root.join(".story_kit").join("bugs");
|
||||
let filename = format!("{bug_number}_bug_{slug}.md");
|
||||
let bugs_dir = root.join(".story_kit").join("work").join("1_upcoming");
|
||||
fs::create_dir_all(&bugs_dir)
|
||||
.map_err(|e| format!("Failed to create bugs directory: {e}"))?;
|
||||
.map_err(|e| format!("Failed to create upcoming directory: {e}"))?;
|
||||
|
||||
let filepath = bugs_dir.join(&filename);
|
||||
if filepath.exists() {
|
||||
@@ -797,6 +767,14 @@ pub fn create_bug_file(
|
||||
Ok(bug_id)
|
||||
}
|
||||
|
||||
/// Returns true if the item stem (filename without extension) is a bug item.
|
||||
/// Bug items follow the pattern: {N}_bug_{slug}
|
||||
fn is_bug_item(stem: &str) -> bool {
|
||||
// Format: {digits}_bug_{rest}
|
||||
let after_num = stem.trim_start_matches(|c: char| c.is_ascii_digit());
|
||||
after_num.starts_with("_bug_")
|
||||
}
|
||||
|
||||
/// Extract the human-readable name from a bug file's first heading.
|
||||
fn extract_bug_name(path: &Path) -> Option<String> {
|
||||
let contents = fs::read_to_string(path).ok()?;
|
||||
@@ -811,23 +789,22 @@ fn extract_bug_name(path: &Path) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
/// List all open bugs — files directly in `.story_kit/bugs/` (excluding `archive/` subdir).
|
||||
/// List all open bugs — files in `work/1_upcoming/` matching the `_bug_` naming pattern.
|
||||
///
|
||||
/// Returns a sorted list of `(bug_id, name)` pairs.
|
||||
pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
|
||||
let bugs_dir = root.join(".story_kit").join("bugs");
|
||||
if !bugs_dir.exists() {
|
||||
let upcoming_dir = root.join(".story_kit").join("work").join("1_upcoming");
|
||||
if !upcoming_dir.exists() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut bugs = Vec::new();
|
||||
for entry in
|
||||
fs::read_dir(&bugs_dir).map_err(|e| format!("Failed to read bugs directory: {e}"))?
|
||||
fs::read_dir(&upcoming_dir).map_err(|e| format!("Failed to read upcoming directory: {e}"))?
|
||||
{
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
|
||||
let path = entry.path();
|
||||
|
||||
// Skip subdirectories (archive/)
|
||||
if path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
@@ -836,12 +813,17 @@ pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
|
||||
continue;
|
||||
}
|
||||
|
||||
let bug_id = path
|
||||
let stem = path
|
||||
.file_stem()
|
||||
.and_then(|stem| stem.to_str())
|
||||
.ok_or_else(|| "Invalid bug file name.".to_string())?
|
||||
.to_string();
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| "Invalid file name.".to_string())?;
|
||||
|
||||
// Only include bug items: {N}_bug_{slug}
|
||||
if !is_bug_item(stem) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let bug_id = stem.to_string();
|
||||
let name = extract_bug_name(&path).unwrap_or_else(|| bug_id.clone());
|
||||
bugs.push((bug_id, name));
|
||||
}
|
||||
@@ -850,21 +832,22 @@ pub fn list_bug_files(root: &Path) -> Result<Vec<(String, String)>, String> {
|
||||
Ok(bugs)
|
||||
}
|
||||
|
||||
/// Locate a story file by searching .story_kit/current/ then stories/upcoming/.
|
||||
/// Locate a work item file by searching work/2_current/ then work/1_upcoming/.
|
||||
fn find_story_file(project_root: &Path, story_id: &str) -> Result<PathBuf, String> {
|
||||
let filename = format!("{story_id}.md");
|
||||
// Check unified current/ directory first
|
||||
let current_path = project_root.join(".story_kit").join("current").join(&filename);
|
||||
let sk = project_root.join(".story_kit").join("work");
|
||||
// Check 2_current/ first
|
||||
let current_path = sk.join("2_current").join(&filename);
|
||||
if current_path.exists() {
|
||||
return Ok(current_path);
|
||||
}
|
||||
// Fall back to stories/upcoming/
|
||||
let upcoming_path = project_root.join(".story_kit").join("stories").join("upcoming").join(&filename);
|
||||
// Fall back to 1_upcoming/
|
||||
let upcoming_path = sk.join("1_upcoming").join(&filename);
|
||||
if upcoming_path.exists() {
|
||||
return Ok(upcoming_path);
|
||||
}
|
||||
Err(format!(
|
||||
"Story '{story_id}' not found in current/ or upcoming/."
|
||||
"Story '{story_id}' not found in work/2_current/ or work/1_upcoming/."
|
||||
))
|
||||
}
|
||||
|
||||
@@ -1005,13 +988,13 @@ fn slugify_name(name: &str) -> String {
|
||||
result
|
||||
}
|
||||
|
||||
fn next_story_number(root: &std::path::Path) -> Result<u32, String> {
|
||||
let stories_base = root.join(".story_kit").join("stories");
|
||||
/// Scan all `work/` subdirectories for the highest item number across all types (stories, bugs, spikes).
|
||||
fn next_item_number(root: &std::path::Path) -> Result<u32, String> {
|
||||
let work_base = root.join(".story_kit").join("work");
|
||||
let mut max_num: u32 = 0;
|
||||
|
||||
// Scan stories/upcoming/ and stories/archived/ for story numbers
|
||||
for subdir in &["upcoming", "archived"] {
|
||||
let dir = stories_base.join(subdir);
|
||||
for subdir in &["1_upcoming", "2_current", "3_qa", "4_merge", "5_archived"] {
|
||||
let dir = work_base.join(subdir);
|
||||
if !dir.exists() {
|
||||
continue;
|
||||
}
|
||||
@@ -1021,24 +1004,7 @@ fn next_story_number(root: &std::path::Path) -> Result<u32, String> {
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
let num_str: String = name_str.chars().take_while(|c| c.is_ascii_digit()).collect();
|
||||
if let Ok(n) = num_str.parse::<u32>()
|
||||
&& n > max_num
|
||||
{
|
||||
max_num = n;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also scan unified .story_kit/current/ for story numbers
|
||||
let current_dir = root.join(".story_kit").join("current");
|
||||
if current_dir.exists() {
|
||||
for entry in
|
||||
fs::read_dir(¤t_dir).map_err(|e| format!("Failed to read current directory: {e}"))?
|
||||
{
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
// Filename format: {N}_{type}_{slug}.md — extract leading N
|
||||
let num_str: String = name_str.chars().take_while(|c| c.is_ascii_digit()).collect();
|
||||
if let Ok(n) = num_str.parse::<u32>()
|
||||
&& n > max_num
|
||||
@@ -1056,10 +1022,10 @@ pub fn validate_story_dirs(
|
||||
) -> Result<Vec<StoryValidationResult>, String> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
// Directories to validate: unified current/ + stories/upcoming/
|
||||
// Directories to validate: work/2_current/ + work/1_upcoming/
|
||||
let dirs_to_validate: Vec<PathBuf> = vec![
|
||||
root.join(".story_kit").join("current"),
|
||||
root.join(".story_kit").join("stories").join("upcoming"),
|
||||
root.join(".story_kit").join("work").join("2_current"),
|
||||
root.join(".story_kit").join("work").join("1_upcoming"),
|
||||
];
|
||||
|
||||
for dir in &dirs_to_validate {
|
||||
|
||||
Reference in New Issue
Block a user