Compare commits
53 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f4a97c1135 | |||
| 0969fb5d51 | |||
| 744cc9dca4 | |||
| ce37281333 | |||
| 149a383447 | |||
| d68614e26a | |||
| a4480fa067 | |||
| beb84ade9f | |||
| d235fd41ac | |||
| 2246278845 | |||
| d80fc143c2 | |||
| 1fe4ca2b7a | |||
| c28c86dbc6 | |||
| 70fecafd41 | |||
| c34b119526 | |||
| 0bf715d9bb | |||
| 7fa31c03a3 | |||
| 483489cc44 | |||
| ec40b4771b | |||
| 52b21c22b1 | |||
| 8936abd8cd | |||
| 8482df2f4e | |||
| 327163eb60 | |||
| 8f1dd0ad13 | |||
| 28adef9739 | |||
| badfabcf5e | |||
| d0d2b17484 | |||
| efe434ede3 | |||
| df5ba8ebab | |||
| ff1149750b | |||
| d824dc4b73 | |||
| 28777b0c77 | |||
| f412c7dee6 | |||
| 44fe52195e | |||
| 979cf39228 | |||
| 10d3517648 | |||
| 8a62b62819 | |||
| 2e412af4dd | |||
| 39b1964b68 | |||
| bd04c6acd7 | |||
| 7977b7c5f8 | |||
| d618bc3b32 | |||
| 845b85e7a7 | |||
| ed2526ce41 | |||
| 05655847d8 | |||
| 0cb68e1de9 | |||
| cd189cfe60 | |||
| 69dab063a8 | |||
| 5806156af3 | |||
| 12497eb4f1 | |||
| 8b5275a30b | |||
| 5536803ad6 | |||
| c4462e2918 |
@@ -9,6 +9,7 @@
|
||||
store.json
|
||||
.huskies_port
|
||||
.huskies/bot.toml.bak
|
||||
.huskies/build_hash
|
||||
|
||||
# Coverage report (generated by script/test_coverage, not tracked in git)
|
||||
.coverage_report.json
|
||||
|
||||
+83
-2
@@ -72,10 +72,91 @@ Consult `specs/tech/STACK.md` for project-specific quality gates.
|
||||
| `status` | Get story details, ACs, git state |
|
||||
| `get_story_todos` | List unchecked acceptance criteria |
|
||||
| `check_criterion` | Mark an AC as done |
|
||||
| `run_tests` | Start test suite (async, returns immediately) |
|
||||
| `get_test_result` | Poll for test completion |
|
||||
| `run_tests` | Start test suite (blocks until complete) |
|
||||
| `git_status` | Worktree git status |
|
||||
| `git_add` | Stage files |
|
||||
| `git_commit` | Commit staged changes |
|
||||
| `git_diff` | View changes |
|
||||
| `git_log` | View commit history |
|
||||
|
||||
---
|
||||
|
||||
## 7. Project Architecture
|
||||
|
||||
Huskies is a single Rust binary with an embedded React frontend. Key things to know:
|
||||
|
||||
- **Backend:** `server/src/` — Rust, built with Poem (HTTP framework)
|
||||
- **Frontend:** `frontend/src/` — React + TypeScript, built with Vite
|
||||
- **Gateway mode:** `huskies --gateway` is a deployment mode of the same binary, NOT a separate application. The gateway backend code lives in `server/src/gateway.rs`. Gateway frontend components live in `frontend/src/` alongside everything else.
|
||||
- **Stories that say "UI":** These are primarily frontend (TypeScript/React) work. Check what backend endpoints already exist before adding new ones. Keep Rust changes minimal.
|
||||
- **Stories that say "gateway":** The gateway is just a mode. Don't restructure `gateway.rs` unless the story specifically asks for backend changes.
|
||||
|
||||
---
|
||||
|
||||
## 8. Deployment Modes
|
||||
|
||||
Huskies has three modes, all from the same binary:
|
||||
|
||||
### Standard (single project)
|
||||
|
||||
```
|
||||
huskies [--port 3001] /path/to/project
|
||||
```
|
||||
|
||||
Full server: web UI, MCP endpoint, chat bot, agent pool, pipeline. One project per instance.
|
||||
|
||||
### Headless Build Agent
|
||||
|
||||
```
|
||||
huskies --rendezvous ws://host:port/crdt-sync
|
||||
```
|
||||
|
||||
Connects to an existing huskies instance as a worker node. Syncs the CRDT, claims work from the pipeline, runs agents. No web UI, no chat — just a build worker. Use this to add more compute to a project by running extra containers.
|
||||
|
||||
### Gateway (multi-project)
|
||||
|
||||
```
|
||||
huskies --gateway [--port 3000] /path/to/config
|
||||
```
|
||||
|
||||
Lightweight proxy that sits in front of multiple project containers. Reads a `projects.toml` that maps project names to container URLs:
|
||||
|
||||
```toml
|
||||
[projects.huskies]
|
||||
url = "http://huskies:3001"
|
||||
|
||||
[projects.robot-studio]
|
||||
url = "http://robot-studio:3002"
|
||||
```
|
||||
|
||||
The gateway presents a unified MCP surface to the chat agent. All tool calls are proxied to the active project's container. Gateway-specific tools:
|
||||
|
||||
| Tool | Purpose |
|
||||
|------|---------|
|
||||
| `switch_project` | Change the active project |
|
||||
| `gateway_status` | Show active project and list all registered projects |
|
||||
| `gateway_health` | Health check all containers |
|
||||
|
||||
### Example: multi-project Docker Compose
|
||||
|
||||
```yaml
|
||||
services:
|
||||
gateway:
|
||||
image: huskies
|
||||
command: ["huskies", "--gateway", "--port", "3000", "/workspace"]
|
||||
ports:
|
||||
- "127.0.0.1:3000:3000"
|
||||
depends_on: [huskies, robot-studio]
|
||||
|
||||
huskies:
|
||||
image: huskies
|
||||
volumes:
|
||||
- /path/to/huskies:/workspace
|
||||
|
||||
robot-studio:
|
||||
image: huskies
|
||||
environment:
|
||||
- HUSKIES_PORT=3002
|
||||
volumes:
|
||||
- /path/to/robot-studio:/workspace
|
||||
```
|
||||
|
||||
+37
-70
@@ -5,8 +5,8 @@ role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits and advance the pipeline based on the results. To verify before committing, use the run_tests MCP tool (it starts tests in the background — poll get_test_result to check completion) — never run script/test or cargo test directly via Bash.\n\n## Acceptance Criteria Tracking\nAs you complete each acceptance criterion, call the check_criterion MCP tool (story_id, criterion_index) to mark it done. Index 0 is the first unchecked criterion, 1 is the second, etc. Do this as you go — not all at once at the end.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix. Do NOT explore git history, grep the whole codebase, or re-investigate the root cause when the story already tells you what to do.\n2. If the story does NOT specify the exact location, THEN investigate: use targeted grep to find the relevant code.\n3. Fix with a surgical, minimal change. Do NOT add new abstractions or workarounds.\n4. Commit early. If you've made the fix and tests pass, commit and exit. Do not spend turns verifying that master also has the same failures — that wastes budget.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Use the run_tests MCP tool to verify your changes pass — it starts tests in the background, then poll get_test_result to check completion. Never run script/test or cargo test directly via Bash. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. The server automatically runs acceptance gates when your process exits. For bugs, trust the story description — if it specifies exact files and functions, go directly there. Do not explore git history or grep the whole codebase when the story already tells you where to look. Make surgical fixes, commit early."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map. The story details are in your prompt above. The worktree and feature branch already exist - do not create them.\n\n## Your workflow\n1. Read the story and understand the acceptance criteria.\n2. Implement the changes.\n3. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done.\n4. Run the run_tests MCP tool. It blocks until tests complete and returns the results.\n5. If tests fail, fix the failures and run run_tests again. Do not commit until tests pass.\n6. Once tests pass, commit your work with a descriptive message and exit.\n\nDo NOT accept stories, move them between stages, or merge to master. The server handles all of that after you exit.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix.\n2. If the story does NOT specify the exact location, investigate with targeted grep.\n3. Fix with a surgical, minimal change.\n4. Run tests, fix failures, commit and exit.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Always run the run_tests MCP tool before committing — do not commit until tests pass. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Do not accept stories, move them between stages, or merge to master — the server handles that. For bugs, trust the story description and make surgical fixes."
|
||||
|
||||
[[agent]]
|
||||
name = "coder-2"
|
||||
@@ -15,8 +15,8 @@ role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits and advance the pipeline based on the results. To verify before committing, use the run_tests MCP tool (it starts tests in the background — poll get_test_result to check completion) — never run script/test or cargo test directly via Bash.\n\n## Acceptance Criteria Tracking\nAs you complete each acceptance criterion, call the check_criterion MCP tool (story_id, criterion_index) to mark it done. Index 0 is the first unchecked criterion, 1 is the second, etc. Do this as you go — not all at once at the end.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix. Do NOT explore git history, grep the whole codebase, or re-investigate the root cause when the story already tells you what to do.\n2. If the story does NOT specify the exact location, THEN investigate: use targeted grep to find the relevant code.\n3. Fix with a surgical, minimal change. Do NOT add new abstractions or workarounds.\n4. Commit early. If you've made the fix and tests pass, commit and exit. Do not spend turns verifying that master also has the same failures — that wastes budget.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Use the run_tests MCP tool to verify your changes pass — it starts tests in the background, then poll get_test_result to check completion. Never run script/test or cargo test directly via Bash. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. The server automatically runs acceptance gates when your process exits. For bugs, trust the story description — if it specifies exact files and functions, go directly there. Do not explore git history or grep the whole codebase when the story already tells you where to look. Make surgical fixes, commit early."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map. The story details are in your prompt above. The worktree and feature branch already exist - do not create them.\n\n## Your workflow\n1. Read the story and understand the acceptance criteria.\n2. Implement the changes.\n3. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done.\n4. Run the run_tests MCP tool. It blocks until tests complete and returns the results.\n5. If tests fail, fix the failures and run run_tests again. Do not commit until tests pass.\n6. Once tests pass, commit your work with a descriptive message and exit.\n\nDo NOT accept stories, move them between stages, or merge to master. The server handles all of that after you exit.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix.\n2. If the story does NOT specify the exact location, investigate with targeted grep.\n3. Fix with a surgical, minimal change.\n4. Run tests, fix failures, commit and exit.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Always run the run_tests MCP tool before committing — do not commit until tests pass. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Do not accept stories, move them between stages, or merge to master — the server handles that. For bugs, trust the story description and make surgical fixes."
|
||||
|
||||
[[agent]]
|
||||
name = "coder-3"
|
||||
@@ -25,8 +25,8 @@ role = "Full-stack engineer. Implements features across all components."
|
||||
model = "sonnet"
|
||||
max_turns = 50
|
||||
max_budget_usd = 5.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits and advance the pipeline based on the results. To verify before committing, use the run_tests MCP tool (it starts tests in the background — poll get_test_result to check completion) — never run script/test or cargo test directly via Bash.\n\n## Acceptance Criteria Tracking\nAs you complete each acceptance criterion, call the check_criterion MCP tool (story_id, criterion_index) to mark it done. Index 0 is the first unchecked criterion, 1 is the second, etc. Do this as you go — not all at once at the end.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix. Do NOT explore git history, grep the whole codebase, or re-investigate the root cause when the story already tells you what to do.\n2. If the story does NOT specify the exact location, THEN investigate: use targeted grep to find the relevant code.\n3. Fix with a surgical, minimal change. Do NOT add new abstractions or workarounds.\n4. Commit early. If you've made the fix and tests pass, commit and exit. Do not spend turns verifying that master also has the same failures — that wastes budget.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Follow the Story-Driven Test Workflow strictly. Use the run_tests MCP tool to verify your changes pass — it starts tests in the background, then poll get_test_result to check completion. Never run script/test or cargo test directly via Bash. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. The server automatically runs acceptance gates when your process exits. For bugs, trust the story description — if it specifies exact files and functions, go directly there. Do not explore git history or grep the whole codebase when the story already tells you where to look. Make surgical fixes, commit early."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map. The story details are in your prompt above. The worktree and feature branch already exist - do not create them.\n\n## Your workflow\n1. Read the story and understand the acceptance criteria.\n2. Implement the changes.\n3. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done.\n4. Run the run_tests MCP tool. It blocks until tests complete and returns the results.\n5. If tests fail, fix the failures and run run_tests again. Do not commit until tests pass.\n6. Once tests pass, commit your work with a descriptive message and exit.\n\nDo NOT accept stories, move them between stages, or merge to master. The server handles all of that after you exit.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix.\n2. If the story does NOT specify the exact location, investigate with targeted grep.\n3. Fix with a surgical, minimal change.\n4. Run tests, fix failures, commit and exit.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a full-stack engineer working autonomously in a git worktree. Always run the run_tests MCP tool before committing — do not commit until tests pass. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Do not accept stories, move them between stages, or merge to master — the server handles that. For bugs, trust the story description and make surgical fixes."
|
||||
|
||||
[[agent]]
|
||||
name = "qa-2"
|
||||
@@ -37,7 +37,7 @@ max_turns = 40
|
||||
max_budget_usd = 4.00
|
||||
prompt = """You are the QA agent for story {{story_id}}. Your job is to verify the coder's work satisfies the story's acceptance criteria and produce a structured QA report.
|
||||
|
||||
Read CLAUDE.md first, then .story_kit/README.md to understand the dev process.
|
||||
Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map.
|
||||
|
||||
## Your Workflow
|
||||
|
||||
@@ -48,7 +48,7 @@ Read CLAUDE.md first, then .story_kit/README.md to understand the dev process.
|
||||
|
||||
### 1. Deterministic Gates (Prerequisites)
|
||||
Run these first — if any fail, reject immediately without proceeding to AC review:
|
||||
- Call the `run_tests` MCP tool to start tests, then poll `get_test_result` until complete — all gates must pass (0 lint errors/warnings, all tests green, frontend build clean if applicable). Do NOT run script/test via Bash.
|
||||
- Call the `run_tests` MCP tool — it blocks until complete. All gates must pass (0 lint errors/warnings, all tests green, frontend build clean if applicable).
|
||||
|
||||
### 2. Code Change Review
|
||||
- Run `git diff master...HEAD --stat` to see what files changed
|
||||
@@ -72,7 +72,7 @@ An AC fails if:
|
||||
- A test exists but doesn't actually assert the behaviour described
|
||||
|
||||
### 4. Manual Testing Support (only if all gates PASS and all ACs PASS)
|
||||
- Build: run `script/build` and note success/failure
|
||||
- Build: run `run_build` MCP tool and note success/failure
|
||||
- If build succeeds: find a free port (try 3010-3020), set `HUSKIES_PORT=<port>` and start the server with `script/server`
|
||||
- Generate a testing plan including:
|
||||
- URL to visit in the browser
|
||||
@@ -126,8 +126,8 @@ role = "Senior full-stack engineer for complex tasks. Implements features across
|
||||
model = "opus"
|
||||
max_turns = 80
|
||||
max_budget_usd = 20.00
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .story_kit/README.md to understand the dev process. The story details are in your prompt above. Follow the SDTW process through implementation and verification (Steps 1-3). The worktree and feature branch already exist - do not create them. Check .mcp.json for MCP tools. Do NOT accept the story or merge - commit your work and stop. If the user asks to review your changes, tell them to run: cd \"{{worktree_path}}\" && git difftool {{base_branch}}...HEAD\n\nIMPORTANT: Commit all your work before your process exits. The server will automatically run acceptance gates when your process exits and advance the pipeline based on the results. To verify before committing, use the run_tests MCP tool (it starts tests in the background — poll get_test_result to check completion) — never run script/test or cargo test directly via Bash.\n\n## Acceptance Criteria Tracking\nAs you complete each acceptance criterion, call the check_criterion MCP tool (story_id, criterion_index) to mark it done. Index 0 is the first unchecked criterion, 1 is the second, etc. Do this as you go — not all at once at the end.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix. Do NOT explore git history, grep the whole codebase, or re-investigate the root cause when the story already tells you what to do.\n2. If the story does NOT specify the exact location, THEN investigate: use targeted grep to find the relevant code.\n3. Fix with a surgical, minimal change. Do NOT add new abstractions or workarounds.\n4. Commit early. If you've made the fix and tests pass, commit and exit. Do not spend turns verifying that master also has the same failures — that wastes budget.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a senior full-stack engineer working autonomously in a git worktree. You handle complex tasks requiring deep architectural understanding. Follow the Story-Driven Test Workflow strictly. Use the run_tests MCP tool to verify your changes pass — it starts tests in the background, then poll get_test_result to check completion. Never run script/test or cargo test directly via Bash. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Commit all your work before finishing - use a descriptive commit message. Do not accept stories, move them to archived, or merge to master - a human will do that. Do not coordinate with other agents - focus on your assigned story. The server automatically runs acceptance gates when your process exits. For bugs, trust the story description — if it specifies exact files and functions, go directly there. Do not explore git history or grep the whole codebase when the story already tells you where to look. Make surgical fixes, commit early."
|
||||
prompt = "You are working in a git worktree on story {{story_id}}. Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map. The story details are in your prompt above. The worktree and feature branch already exist - do not create them.\n\n## Your workflow\n1. Read the story and understand the acceptance criteria.\n2. Implement the changes.\n3. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done.\n4. Run the run_tests MCP tool. It blocks until tests complete and returns the results.\n5. If tests fail, fix the failures and run run_tests again. Do not commit until tests pass.\n6. Once tests pass, commit your work with a descriptive message and exit.\n\nDo NOT accept stories, move them between stages, or merge to master. The server handles all of that after you exit.\n\n## Bug Workflow: Trust the Story, Act Fast\nWhen working on bugs:\n1. READ THE STORY DESCRIPTION FIRST. If it specifies exact files, functions, and line numbers — go directly there and make the fix.\n2. If the story does NOT specify the exact location, investigate with targeted grep.\n3. Fix with a surgical, minimal change.\n4. Run tests, fix failures, commit and exit.\n5. Write commit messages that explain what broke and why."
|
||||
system_prompt = "You are a senior full-stack engineer working autonomously in a git worktree. You handle complex tasks requiring deep architectural understanding. Always run the run_tests MCP tool before committing — do not commit until tests pass. As you complete each acceptance criterion, call check_criterion MCP tool to mark it done. Add //! module-level doc comments to any new modules and /// doc comments to any new public functions, structs, or enums. Do not accept stories, move them between stages, or merge to master — the server handles that. For bugs, trust the story description and make surgical fixes."
|
||||
|
||||
[[agent]]
|
||||
name = "qa"
|
||||
@@ -138,7 +138,7 @@ max_turns = 40
|
||||
max_budget_usd = 4.00
|
||||
prompt = """You are the QA agent for story {{story_id}}. Your job is to verify the coder's work satisfies the story's acceptance criteria and produce a structured QA report.
|
||||
|
||||
Read CLAUDE.md first, then .story_kit/README.md to understand the dev process.
|
||||
Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map.
|
||||
|
||||
## Your Workflow
|
||||
|
||||
@@ -149,7 +149,7 @@ Read CLAUDE.md first, then .story_kit/README.md to understand the dev process.
|
||||
|
||||
### 1. Deterministic Gates (Prerequisites)
|
||||
Run these first — if any fail, reject immediately without proceeding to AC review:
|
||||
- Call the `run_tests` MCP tool to start tests, then poll `get_test_result` until complete — all gates must pass (0 lint errors/warnings, all tests green, frontend build clean if applicable). Do NOT run script/test via Bash.
|
||||
- Call the `run_tests` MCP tool — it blocks until complete. All gates must pass (0 lint errors/warnings, all tests green, frontend build clean if applicable).
|
||||
|
||||
### 2. Code Change Review
|
||||
- Run `git diff master...HEAD --stat` to see what files changed
|
||||
@@ -173,7 +173,7 @@ An AC fails if:
|
||||
- A test exists but doesn't actually assert the behaviour described
|
||||
|
||||
### 4. Manual Testing Support (only if all gates PASS and all ACs PASS)
|
||||
- Build: run `script/build` and note success/failure
|
||||
- Build: run `run_build` MCP tool and note success/failure
|
||||
- If build succeeds: find a free port (try 3010-3020), set `HUSKIES_PORT=<port>` and start the server with `script/server`
|
||||
- Generate a testing plan including:
|
||||
- URL to visit in the browser
|
||||
@@ -224,70 +224,37 @@ system_prompt = "You are a QA agent. Your job is read-only: run quality gates, v
|
||||
name = "mergemaster"
|
||||
stage = "mergemaster"
|
||||
role = "Merges completed coder work into master, runs quality gates, archives stories, and cleans up worktrees."
|
||||
model = "sonnet"
|
||||
model = "opus"
|
||||
max_turns = 30
|
||||
max_budget_usd = 5.00
|
||||
prompt = """You are the mergemaster agent for story {{story_id}}. Your job is to merge the completed coder work into master.
|
||||
|
||||
Read CLAUDE.md first, then .story_kit/README.md to understand the dev process.
|
||||
Read CLAUDE.md first, then .huskies/README.md for the dev process, .huskies/specs/00_CONTEXT.md for what this project does, and .huskies/specs/tech/STACK.md for the tech stack and source map.
|
||||
|
||||
## Your Workflow
|
||||
1. Call merge_agent_work(story_id='{{story_id}}') — this blocks until the merge completes and returns the result. Do NOT poll get_merge_status.
|
||||
2. Review the result: check success, had_conflicts, conflicts_resolved, gates_passed, and gate_output
|
||||
3. If merge succeeded and gates passed: report success to the human
|
||||
4. If conflicts were auto-resolved (conflicts_resolved=true) and gates passed: report success, noting which conflicts were resolved
|
||||
5. If conflicts could not be auto-resolved: **resolve them yourself** in the merge worktree (see below)
|
||||
6. If merge failed for any other reason: call report_merge_failure(story_id='{{story_id}}', reason='<details>') and report to the human
|
||||
7. If gates failed after merge: attempt to fix the issues yourself in the merge worktree, then re-trigger merge_agent_work. After 3 fix attempts, call report_merge_failure and stop.
|
||||
|
||||
## Resolving Complex Conflicts Yourself
|
||||
|
||||
When the auto-resolver fails, you have access to the merge worktree at `.story_kit/merge_workspace/`. Go in there and resolve the conflicts manually:
|
||||
|
||||
1. Run `git diff --name-only --diff-filter=U` in the merge worktree to list conflicted files
|
||||
2. **Build context before touching code.** Run `git log --oneline master...HEAD` on the feature branch to see its commits. Then run `git log --oneline --since="$(git log -1 --format=%ci <feature-branch-base-commit>)" master` to see what landed on master since the branch was created. Read the story files in `.story_kit/work/` for any recently merged stories that touch the same files — this tells you WHY master changed and what must be preserved.
|
||||
3. Read each conflicted file and understand both sides of the conflict
|
||||
4. **Understand intent, not just syntax.** The feature branch may be behind master — master's version of shared infrastructure is almost always correct. The feature branch's contribution is the NEW functionality it adds. Your job is to integrate the new into master's structure, not pick one side.
|
||||
5. Resolve by integrating the feature's new functionality into master's code structure
|
||||
5. Stage resolved files with `git add`
|
||||
6. Call the `run_tests` MCP tool to start tests, then poll `get_test_result` until complete
|
||||
7. If it compiles, commit and re-trigger merge_agent_work
|
||||
|
||||
### Common conflict patterns in this project:
|
||||
|
||||
**Story file rename/rename conflicts:** Both branches moved the story .md file to different pipeline directories. Resolution: `git rm` both sides — story files in `work/2_current/`, `work/3_qa/`, `work/4_merge/` are gitignored and don't need to be committed.
|
||||
|
||||
**bot.rs tokio::select! conflicts:** Master has a `tokio::select!` loop in `handle_message()` that handles permission forwarding (story 275). Feature branches created before story 275 have a simpler direct `provider.chat_stream().await` call. Resolution: KEEP master's tokio::select! loop. Integrate only the feature's new logic (e.g. typing indicators, new callbacks) into the existing loop structure. Do NOT replace the loop with the old direct call.
|
||||
|
||||
**Duplicate functions/imports:** The auto-resolver keeps both sides, producing duplicates. Resolution: keep one copy (prefer master's version), delete the duplicate.
|
||||
|
||||
**Formatting-only conflicts:** Both sides reformatted the same code differently. Resolution: pick either side (prefer master).
|
||||
1. Call merge_agent_work(story_id='{{story_id}}'). It blocks until the merge completes and returns the full result.
|
||||
2. If success and gates passed: you're done. Exit.
|
||||
3. If gates failed: read the gate_output carefully, fix the issues in the merge workspace at `.huskies/merge_workspace/`, run run_tests MCP tool to verify, recommit, and call merge_agent_work again.
|
||||
4. If merge failed for any other reason: call report_merge_failure(story_id='{{story_id}}', reason='<details>') and exit.
|
||||
5. After 3 failed fix attempts, call report_merge_failure and exit.
|
||||
|
||||
## Fixing Gate Failures
|
||||
|
||||
If quality gates fail, attempt to fix issues yourself in the merge worktree. Use the run_tests MCP tool (then poll get_test_result) to verify — do not run script/test via Bash.
|
||||
The auto-resolver often produces broken code. Common problems:
|
||||
- Duplicate imports or definitions (kept both sides)
|
||||
- Formatting issues (import ordering, line breaks)
|
||||
- Unclosed delimiters from bad conflict resolution
|
||||
- Type mismatches from incompatible merge of both sides
|
||||
|
||||
**Fix yourself (up to 3 attempts total):**
|
||||
- Syntax errors (missing semicolons, brackets, commas)
|
||||
- Duplicate definitions from merge artifacts
|
||||
- Simple type annotation errors
|
||||
- Unused import warnings flagged by clippy
|
||||
- Mismatched braces from bad conflict resolution
|
||||
- Trivial formatting issues that block compilation or linting
|
||||
To fix:
|
||||
1. Read the broken files in `.huskies/merge_workspace/`
|
||||
2. Fix the issues — prefer master's structure, integrate only the feature's new code
|
||||
3. Run run_lint MCP tool to check formatting
|
||||
4. Run run_tests MCP tool to verify everything passes
|
||||
5. Commit the fix and call merge_agent_work again
|
||||
|
||||
**Report to human without attempting a fix:**
|
||||
- Logic errors or incorrect business logic
|
||||
- Missing function implementations
|
||||
- Architectural changes required
|
||||
- Non-trivial refactoring needed
|
||||
|
||||
**Max retry limit:** If gates still fail after 3 fix attempts, call report_merge_failure to record the failure, then stop immediately and report the full gate output to the human.
|
||||
|
||||
## CRITICAL Rules
|
||||
- NEVER manually move story files between pipeline stages (e.g. from 4_merge/ to 5_done/)
|
||||
- NEVER call accept_story — only merge_agent_work can move stories to done after a successful merge
|
||||
- When merge fails after exhausting your fix attempts, ALWAYS call report_merge_failure
|
||||
- Report conflict resolution outcomes clearly
|
||||
- Report gate failures with full output so the human can act if needed
|
||||
- The server automatically runs acceptance gates when your process exits"""
|
||||
system_prompt = "You are the mergemaster agent. Your primary job is to merge feature branches to master. First try the merge_agent_work MCP tool. If the auto-resolver fails on complex conflicts, resolve them yourself in the merge worktree — you are an opus-class agent capable of understanding both sides of a conflict and producing correct merged code. Common patterns: keep master's tokio::select! permission loop in bot.rs, discard story file rename conflicts (gitignored), remove duplicate definitions. After resolving, verify compilation before re-triggering merge. CRITICAL: Never manually move story files or call accept_story. After 3 failed fix attempts, call report_merge_failure and stop."
|
||||
## Rules
|
||||
- NEVER manually move story files between pipeline stages
|
||||
- NEVER call accept_story — merge_agent_work handles that
|
||||
- ALWAYS call report_merge_failure if you can't fix the merge"""
|
||||
system_prompt = "You are the mergemaster agent. Call merge_agent_work to merge. If gates fail, fix the issues in the merge workspace, verify with run_lint and run_tests MCP tools, recommit, and retrigger. After 3 failed attempts, call report_merge_failure and exit. Never move story files or call accept_story."
|
||||
|
||||
+112
-112
@@ -1,130 +1,130 @@
|
||||
# Tech Stack & Constraints
|
||||
# Tech Stack
|
||||
|
||||
## Overview
|
||||
This project is a standalone Rust **web server binary** that serves a Vite/React frontend and exposes a **WebSocket API**. The built frontend assets are packaged with the binary (in a `frontend` directory) and served as static files. It functions as an **Agentic Code Assistant** capable of safely executing tools on the host system.
|
||||
## Backend
|
||||
- **Language:** Rust
|
||||
- **Framework:** Poem (HTTP + WebSocket + OpenAPI)
|
||||
- **Database:** SQLite via sqlx + rusqlite
|
||||
- **State:** BFT CRDT replicated document backed by SQLite
|
||||
- **Agents:** Claude Code CLI spawned in PTY pseudo-terminals
|
||||
- **Package manager:** cargo
|
||||
|
||||
## Core Stack
|
||||
* **Backend:** Rust (Web Server)
|
||||
* **MSRV:** Stable (latest)
|
||||
* **Framework:** Poem HTTP server with WebSocket support for streaming; HTTP APIs should use Poem OpenAPI (Swagger) for non-streaming endpoints.
|
||||
* **Frontend:** TypeScript + React
|
||||
* **Build Tool:** Vite
|
||||
* **Package Manager:** npm
|
||||
* **Styling:** CSS Modules or Tailwind (TBD - Defaulting to CSS Modules)
|
||||
* **State Management:** React Context / Hooks
|
||||
* **Chat UI:** Rendered Markdown with syntax highlighting.
|
||||
## Frontend
|
||||
- **Language:** TypeScript + React
|
||||
- **Build:** Vite
|
||||
- **Package manager:** npm
|
||||
- **Testing:** Vitest (unit), Playwright (e2e)
|
||||
|
||||
## Agent Architecture
|
||||
The application follows a **Tool-Use (Function Calling)** architecture:
|
||||
1. **Frontend:** Collects user input and sends it to the LLM.
|
||||
2. **LLM:** Decides to generate text OR request a **Tool Call** (e.g., `execute_shell`, `read_file`).
|
||||
3. **Web Server Backend (The "Hand"):**
|
||||
* Intercepts Tool Calls.
|
||||
* Validates the request against the **Safety Policy**.
|
||||
* Executes the native code (File I/O, Shell Process, Search).
|
||||
* Returns the output (stdout/stderr/file content) to the LLM.
|
||||
* **Streaming:** The backend sends real-time updates over WebSocket to keep the UI responsive during long-running Agent tasks.
|
||||
## Deployment
|
||||
- Single Rust binary with embedded React frontend (rust-embed)
|
||||
- Three modes: standard server, headless build agent (`--rendezvous`), multi-project gateway (`--gateway`)
|
||||
- Docker container with OrbStack recommended on macOS
|
||||
|
||||
## LLM Provider Abstraction
|
||||
To support both Remote and Local models, the system implements a `ModelProvider` abstraction layer.
|
||||
## Project Layout
|
||||
```
|
||||
server/src/ — Rust backend
|
||||
frontend/src/ — React frontend
|
||||
crates/bft-json-crdt/ — CRDT library
|
||||
.huskies/ — Pipeline config, agent config, specs
|
||||
script/ — test, build, lint scripts
|
||||
docker/ — Dockerfile and docker-compose
|
||||
website/ — Static marketing/docs site
|
||||
```
|
||||
|
||||
* **Strategy:**
|
||||
* Abstract the differences between API formats (OpenAI-compatible vs Anthropic vs Gemini).
|
||||
* Normalize "Tool Use" definitions, as each provider handles function calling schemas differently.
|
||||
* **Supported Providers:**
|
||||
* **Ollama:** Local inference (e.g., Llama 3, DeepSeek Coder) for privacy and offline usage.
|
||||
* **Anthropic:** Claude 3.5 models (Sonnet, Haiku) via API for coding tasks (Story 12).
|
||||
* **Provider Selection:**
|
||||
* Automatic detection based on model name prefix:
|
||||
* `claude-` → Anthropic API
|
||||
* Otherwise → Ollama
|
||||
* Single unified model dropdown with section headers ("Anthropic", "Ollama")
|
||||
* **API Key Management:**
|
||||
* Anthropic API key stored server-side and persisted securely
|
||||
* On first use of Claude model, user prompted to enter API key
|
||||
* Key persists across sessions (no re-entry needed)
|
||||
## Source Map
|
||||
|
||||
## Tooling Capabilities
|
||||
### Core
|
||||
|
||||
### 1. Filesystem (Native)
|
||||
* **Scope:** Strictly limited to the user-selected `project_root`.
|
||||
* **Operations:** Read, Write, List, Delete.
|
||||
* **Constraint:** Modifications to `.git/` are strictly forbidden via file APIs (use Git tools instead).
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/main.rs` | Entry point, CLI argument parsing, and server startup |
|
||||
| `server/src/config.rs` | Parses `project.toml` for agents, components, and server settings |
|
||||
| `server/src/state.rs` | Global mutable session state (project root, cancellation) |
|
||||
| `server/src/store.rs` | JSON-backed persistent key-value store for settings |
|
||||
| `server/src/gateway.rs` | Multi-project gateway mode (MCP proxy, project switching, agent registration) |
|
||||
|
||||
### 2. Shell Execution
|
||||
* **Library:** `tokio::process` for async execution.
|
||||
* **Constraint:** We do **not** run an interactive shell (repl). We run discrete, stateless commands.
|
||||
* **Allowlist:** The agent may only execute specific binaries:
|
||||
* `git`
|
||||
* `cargo`, `rustc`, `rustfmt`, `clippy`
|
||||
* `npm`, `node`, `yarn`, `pnpm`, `bun`
|
||||
* `ls`, `find`, `grep` (if not using internal search)
|
||||
* `mkdir`, `rm`, `touch`, `mv`, `cp`
|
||||
### Agents
|
||||
|
||||
### 3. Search & Navigation
|
||||
* **Library:** `ignore` (by BurntSushi) + `grep` logic.
|
||||
* **Behavior:**
|
||||
* Must respect `.gitignore` files automatically.
|
||||
* Must be performant (parallel traversal).
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/mod.rs` | Types, configuration, and orchestration for coding agents |
|
||||
| `server/src/agents/gates.rs` | Runs test suites and validation scripts in agent worktrees |
|
||||
| `server/src/agents/lifecycle.rs` | File creation, archival, and stage transitions for pipeline items |
|
||||
| `server/src/agents/merge.rs` | Rebases agent work onto master and runs post-merge validation |
|
||||
| `server/src/agents/pty.rs` | Spawns agent processes in pseudo-terminals and streams output |
|
||||
| `server/src/agents/token_usage.rs` | Persists per-agent token consumption records to disk |
|
||||
| `server/src/agent_log.rs` | Reads and writes JSONL agent event logs to disk |
|
||||
| `server/src/agent_mode.rs` | Headless build-agent mode for distributed story processing |
|
||||
|
||||
## Coding Standards
|
||||
### Agent Pool
|
||||
|
||||
### Rust
|
||||
* **Style:** `rustfmt` standard.
|
||||
* **Linter:** `clippy` - Must pass with 0 warnings before merging.
|
||||
* **Error Handling:** Custom `AppError` type deriving `thiserror`. All Commands return `Result<T, AppError>`.
|
||||
* **Concurrency:** Heavy tools (Search, Shell) must run on `tokio` threads to avoid blocking the UI.
|
||||
* **Quality Gates:**
|
||||
* `cargo clippy --all-targets --all-features` must show 0 errors, 0 warnings
|
||||
* `cargo check` must succeed
|
||||
* `cargo nextest run` must pass all tests
|
||||
* **Test Coverage:**
|
||||
* Generate JSON report: `cargo llvm-cov nextest --no-clean --json --output-path .story_kit/coverage/server.json`
|
||||
* Generate lcov report: `cargo llvm-cov report --lcov --output-path .story_kit/coverage/server.lcov`
|
||||
* Reports are written to `.story_kit/coverage/` (excluded from git)
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/pool/mod.rs` | Manages the set of active agents across all pipeline stages |
|
||||
| `server/src/agents/pool/start.rs` | Spawns a new agent process in a worktree for a story |
|
||||
| `server/src/agents/pool/stop.rs` | Terminates a running agent while preserving its worktree |
|
||||
| `server/src/agents/pool/pipeline/advance.rs` | Moves stories forward through pipeline stages |
|
||||
| `server/src/agents/pool/pipeline/completion.rs` | Processes exit results and triggers pipeline advancement |
|
||||
| `server/src/agents/pool/pipeline/merge.rs` | Orchestrates the merge-to-master flow for completed stories |
|
||||
| `server/src/agents/pool/auto_assign/auto_assign.rs` | Scans pipeline stages and dispatches agents to unassigned stories |
|
||||
|
||||
### TypeScript / React
|
||||
* **Style:** Biome formatter (replaces Prettier/ESLint).
|
||||
* **Linter:** Biome - Must pass with 0 errors, 0 warnings before merging.
|
||||
* **Types:** Shared types with Rust (via `tauri-specta` or manual interface matching) are preferred to ensure type safety across the bridge.
|
||||
* **Testing:** Vitest for unit/component tests; Playwright for end-to-end tests.
|
||||
* **Quality Gates:**
|
||||
* `npx @biomejs/biome check src/` must show 0 errors, 0 warnings
|
||||
* `npm run build` must succeed
|
||||
* `npm test` must pass
|
||||
* `npm run test:e2e` must pass
|
||||
* No `any` types allowed (use proper types or `unknown`)
|
||||
* React keys must use stable IDs, not array indices
|
||||
* All buttons must have explicit `type` attribute
|
||||
### CRDT & Database
|
||||
|
||||
## Libraries (Approved)
|
||||
* **Rust:**
|
||||
* `serde`, `serde_json`: Serialization.
|
||||
* `ignore`: Fast recursive directory iteration respecting gitignore.
|
||||
* `walkdir`: Simple directory traversal.
|
||||
* `tokio`: Async runtime.
|
||||
* `reqwest`: For LLM API calls (Anthropic, Ollama).
|
||||
* `eventsource-stream`: For Server-Sent Events (Anthropic streaming).
|
||||
* `uuid`: For unique message IDs.
|
||||
* `chrono`: For timestamps.
|
||||
* `poem`: HTTP server framework.
|
||||
* `poem-openapi`: OpenAPI (Swagger) for non-streaming HTTP APIs.
|
||||
* **JavaScript:**
|
||||
* `react-markdown`: For rendering chat responses.
|
||||
* `vitest`: Unit/component testing.
|
||||
* `playwright`: End-to-end testing.
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/crdt_state.rs` | Pipeline state as a conflict-free replicated document backed by SQLite |
|
||||
| `server/src/crdt_sync.rs` | WebSocket-based replication of pipeline state between nodes |
|
||||
| `server/src/pipeline_state.rs` | Typed pipeline state machine |
|
||||
| `server/src/db/mod.rs` | Content store, shadow writes, and CRDT op persistence |
|
||||
|
||||
## Running the App (Worktrees & Ports)
|
||||
### HTTP — MCP Tools (the tools agents call)
|
||||
|
||||
Multiple instances can run simultaneously in different worktrees. To avoid port conflicts:
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/http/mcp/mod.rs` | MCP endpoint dispatching tool calls |
|
||||
| `server/src/http/mcp/agent_tools.rs` | Start, stop, wait, list, and inspect agents |
|
||||
| `server/src/http/mcp/git_tools.rs` | Status, diff, add, commit, and log on agent worktrees |
|
||||
| `server/src/http/mcp/merge_tools.rs` | Merge agent work to master and report failures |
|
||||
| `server/src/http/mcp/shell_tools.rs` | Run commands, execute tests, and stream output |
|
||||
| `server/src/http/mcp/story_tools.rs` | Create, update, move, and manage stories/bugs/refactors |
|
||||
| `server/src/http/mcp/diagnostics.rs` | Server logs, CRDT dump, version, and story movement helpers |
|
||||
|
||||
- **Backend:** Set `HUSKIES_PORT` to a unique port (default is 3001). Example: `HUSKIES_PORT=3002 cargo run`
|
||||
- **Frontend:** Run `npm run dev` from `frontend/`. It auto-selects the next unused port. It reads `HUSKIES_PORT` to know which backend to talk to, so export it before running: `export HUSKIES_PORT=3002 && cd frontend && npm run dev`
|
||||
### Chat — Bot Commands
|
||||
|
||||
When running in a worktree, use a port that won't conflict with the main instance (3001). Ports 3002+ are good choices.
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/commands/mod.rs` | Bot-level command registry shared by all transports |
|
||||
| `server/src/chat/commands/status.rs` | `status` command and pipeline status helpers |
|
||||
| `server/src/chat/commands/backlog.rs` | `backlog` command — shows only backlog-stage items |
|
||||
| `server/src/chat/commands/run_tests.rs` | `run_tests` command — run the project's test suite |
|
||||
|
||||
## Safety & Sandbox
|
||||
1. **Project Scope:** The application must strictly enforce that it does not read/write outside the `project_root` selected by the user.
|
||||
2. **Human in the Loop:**
|
||||
* Shell commands that modify state (non-readonly) should ideally require a UI confirmation (configurable).
|
||||
* File writes must be confirmed or revertible.
|
||||
### Chat — Transports
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/matrix/` | Matrix bot integration |
|
||||
| `server/src/chat/transport/slack/` | Slack bot integration |
|
||||
| `server/src/chat/transport/whatsapp/` | WhatsApp Business API integration |
|
||||
| `server/src/chat/transport/discord/` | Discord bot integration |
|
||||
|
||||
### Frontend
|
||||
|
||||
| Directory | Description |
|
||||
|-----------|-------------|
|
||||
| `frontend/src/components/` | React UI components |
|
||||
| `frontend/src/api/` | API client code (gateway, agents, etc.) |
|
||||
|
||||
### Utilities
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/rebuild.rs` | Server rebuild and restart logic |
|
||||
| `server/src/worktree.rs` | Creates, lists, and removes git worktrees for agent isolation |
|
||||
| `server/src/io/watcher.rs` | Filesystem watcher for `.huskies/work/` and `project.toml` |
|
||||
|
||||
## Quality Gates
|
||||
All enforced by `script/test`:
|
||||
1. Frontend build (`npm run build`)
|
||||
2. Rust formatting (`cargo fmt --all --check`)
|
||||
3. Rust linting (`cargo clippy -- -D warnings`)
|
||||
4. Rust tests (`cargo test`)
|
||||
5. Frontend tests (`npm test`)
|
||||
|
||||
-70
@@ -1,70 +0,0 @@
|
||||
---
|
||||
name: "Stale 1_backlog filesystem shadows get re-promoted by rate-limit retry timers, yanking successfully-merged stories back into current"
|
||||
---
|
||||
|
||||
# Bug 510: Stale 1_backlog filesystem shadows get re-promoted by rate-limit retry timers, yanking successfully-merged stories back into current
|
||||
|
||||
## Description
|
||||
|
||||
After a story successfully completes the entire pipeline — coder runs, gates pass, mergemaster squashes the feature branch to master, lifecycle moves the story from `4_merge/` to `5_done/` — a stale filesystem shadow of the story's markdown file remains in `.huskies/work/1_backlog/`. This shadow is a leftover from the 491/492 migration: story state moved to the database as the source of truth, but the lifecycle move logic in `lifecycle.rs` is still operating on the filesystem and doesn't fully clean up after successful pipeline completions.
|
||||
|
||||
When a rate-limit retry timer subsequently fires for that story (rate limits get scheduled by story 496's auto-retry whenever an agent is hard-blocked, and bug 501 means those timers aren't cancelled on successful completion either), the timer fire path calls `move_story_to_current()`, which uses the **filesystem-only** `move_item` helper. That helper finds the stale `1_backlog/` shadow and "moves" it to `2_current/` — even though the story is correctly in `5_done` in the database.
|
||||
|
||||
Net effect: a fully-merged, archived-to-done story suddenly reappears in `current` with a fresh coder spawned on it. The matrix bot sends `Done → Current` notifications. The agent burns tokens working on a story whose work has already shipped to master. The user sees the story flapping and assumes the merge didn't actually happen.
|
||||
|
||||
**Observed live on 2026-04-09 against story 503:**
|
||||
|
||||
```
|
||||
18:31:32 [lifecycle] Moved '503_…' from work/4_merge/ to work/5_done/
|
||||
18:31:32 [bot] Sending stage notification: 🎉 #503 … — Merge → Done
|
||||
18:32:21 [timer] Timer fired for story 503_…
|
||||
18:32:21 [lifecycle] Moved '503_…' from work/1_backlog/ to work/2_current/ ← stale shadow!
|
||||
18:32:21 [auto-assign] Assigning 'coder-1' to '503_…' in 2_current/
|
||||
```
|
||||
|
||||
The merge to master persisted (commit `41515e3b` is on master). Only the *pipeline state* got corrupted by the stale shadow being re-promoted.
|
||||
|
||||
This is **distinct from bug 501** (which is about manual `stop_agent` not cancelling timers) but compounds it: 501 is about user-initiated stops, this is about successful pipeline completions. Both share a root cause — the rate-limit retry timer system has no notion of "this story has moved on, cancel any pending retries" — but the *consequences* of this bug are worse because the timer fires successfully and re-creates work that shouldn't exist.
|
||||
|
||||
Also distinct from bug 502 (mergemaster stage-mismatch) which has been fixed.
|
||||
|
||||
The deeper architectural problem this exposes: **`lifecycle.rs::move_item` and `move_story_to_current` are still on the legacy filesystem path** while the rest of the pipeline (491/492) has moved to DB-as-source-of-truth. The filesystem shadows in `.huskies/work/N_stage/` are supposed to be a *materialized rendering* of the DB state, not a parallel source of truth — but `move_item` treats them as authoritative.
|
||||
|
||||
## How to Reproduce
|
||||
|
||||
1. Take any story through the full pipeline successfully — coder runs, gates pass, mergemaster squashes to master, story moves to `5_done`.
|
||||
2. While the story was in flight, ensure at least one coder run hit a hard rate limit (so a retry timer was scheduled). Bug 501 means that timer survives the successful completion.
|
||||
3. Verify post-completion state:
|
||||
- `SELECT stage FROM pipeline_items WHERE id = 'N_story_X';` returns `5_done` ✓
|
||||
- `ls .huskies/work/1_backlog/N_story_X.md` shows the file STILL EXISTS (the stale shadow)
|
||||
- `cat .huskies/timers.json` shows a pending entry for `N_story_X` with a future `scheduled_at`
|
||||
4. Wait for the timer to fire (default ~5 minutes after the last rate-limit hit).
|
||||
|
||||
## Actual Result
|
||||
|
||||
When the timer fires:
|
||||
- The `[timer] Timer fired` log line appears for the already-done story
|
||||
- `move_story_to_current` is called and finds the stale `1_backlog/N_story_X.md` shadow
|
||||
- Lifecycle log: `[lifecycle] Moved 'N_…' from work/1_backlog/ to work/2_current/`
|
||||
- Auto-assign sees the story in `2_current/` and spawns a coder
|
||||
- Matrix bot sends `Done → Current` (and then later `Current → Current` etc.) stage notifications, spamming the room
|
||||
- The new coder works on a story whose work is already shipped on master, burning tokens
|
||||
- The story is now visible in BOTH `5_done` (via DB) AND `2_current` (via filesystem shadow), depending on which view the consumer reads
|
||||
- The actual master commit is unaffected — the merge that already landed is still there. Only the *pipeline state* is corrupted.
|
||||
|
||||
## Expected Result
|
||||
|
||||
Successful pipeline completions must fully clean up the story's filesystem shadows. After `move_story_to_done` runs, `.huskies/work/1_backlog/N_story_X.md` (and any other stage shadow) for that story must not exist.
|
||||
|
||||
Additionally — and this is the more general fix — the rate-limit retry timer system must cancel any pending timers for a story when that story successfully completes the pipeline. This is a sibling fix to bug 501 (which is about cancelling on manual stop): both manual stop and successful completion should mean "no more retries".
|
||||
|
||||
The deepest fix is to migrate `lifecycle.rs::move_item` off the filesystem path and onto the DB path so the shadow files can be torn down entirely (or made strictly read-only renderings). That's a larger change that probably wants its own story, not a bug fix.
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] After a story moves to 5_done via the normal pipeline path (mergemaster success), the filesystem shadow at .huskies/work/1_backlog/N_story_X.md is removed (and any other stage shadows are also removed)
|
||||
- [ ] When a story moves to 5_done, any pending rate-limit retry timer for that story is cancelled (the entry is removed from timers.json before the file is persisted)
|
||||
- [ ] Regression test: simulate the full repro sequence — run a story through the pipeline with a mid-flight rate limit, complete the merge, fast-forward to the timer fire, assert (a) the story stays in 5_done, (b) no agent is spawned, (c) no Done→Current notification fires
|
||||
- [ ] No regression in bug 501's fix for manual-stop timer cancellation
|
||||
- [ ] Filesystem shadow cleanup is symmetric — also runs on delete_story, move_story to backlog, etc., not just the done path
|
||||
- [ ] The matrix bot does not spam Done→Current notifications for stories whose work has actually completed
|
||||
Generated
+49
-126
@@ -482,9 +482,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.11.0"
|
||||
version = "2.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
|
||||
checksum = "c4512299f36f043ab09a583e57bceb5a5aab7a73db1805848e8fef3c9e8c78b3"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
@@ -1769,21 +1769,6 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb"
|
||||
|
||||
[[package]]
|
||||
name = "foreign-types"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
|
||||
dependencies = [
|
||||
"foreign-types-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "foreign-types-shared"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.2"
|
||||
@@ -2303,7 +2288,7 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "huskies"
|
||||
version = "0.10.0"
|
||||
version = "0.10.2"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"async-trait",
|
||||
@@ -2381,9 +2366,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hyper-rustls"
|
||||
version = "0.27.8"
|
||||
version = "0.27.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2b52f86d1d4bc0d6b4e6826d960b1b333217e07d36b882dca570a5e1c48895b"
|
||||
checksum = "33ca68d021ef39cf6463ab54c1d0f5daf03377b70561305bb89a8f83aab66e0f"
|
||||
dependencies = [
|
||||
"http",
|
||||
"hyper",
|
||||
@@ -2658,7 +2643,7 @@ version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd5b3eaf1a28b758ac0faa5a4254e8ab2705605496f1b1f3fbbc3988ad73d199"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
@@ -2878,9 +2863,9 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.184"
|
||||
version = "0.2.185"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af"
|
||||
checksum = "52ff2c0fe9bc6cb6b14a0592c2ff4fa9ceb83eea9db979b0487cd054946a2b8f"
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
@@ -2894,7 +2879,7 @@ version = "0.1.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e02f3bb43d335493c96bf3fd3a321600bf6bd07ed34bc64118e9293bdffea46c"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"libc",
|
||||
"plain",
|
||||
"redox_syscall 0.7.4",
|
||||
@@ -3113,7 +3098,7 @@ checksum = "70f404a390ff98a73c426b1496b169be60ce6a93723a9a664e579d978a84c5e4"
|
||||
dependencies = [
|
||||
"as_variant",
|
||||
"async-trait",
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"decancer",
|
||||
"eyeball",
|
||||
"eyeball-im",
|
||||
@@ -3401,7 +3386,7 @@ dependencies = [
|
||||
"hyper-util",
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
"rand 0.9.3",
|
||||
"rand 0.9.4",
|
||||
"regex",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
@@ -3427,23 +3412,6 @@ dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"openssl",
|
||||
"openssl-probe",
|
||||
"openssl-sys",
|
||||
"schannel",
|
||||
"security-framework",
|
||||
"security-framework-sys",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "new_debug_unreachable"
|
||||
version = "1.0.6"
|
||||
@@ -3456,7 +3424,7 @@ version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.1.1",
|
||||
"libc",
|
||||
@@ -3468,7 +3436,7 @@ version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
@@ -3490,7 +3458,7 @@ version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
@@ -3508,7 +3476,7 @@ version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3630,50 +3598,12 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
|
||||
|
||||
[[package]]
|
||||
name = "openssl"
|
||||
version = "0.10.77"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfe4646e360ec77dff7dde40ed3d6c5fee52d156ef4a62f53973d38294dad87f"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"cfg-if",
|
||||
"foreign-types",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"openssl-macros",
|
||||
"openssl-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openssl-macros"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openssl-probe"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe"
|
||||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.113"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad2f2c0eba47118757e4c6d2bff2838f3e0523380021356e7875e858372ce644"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "os_str_bytes"
|
||||
version = "6.6.1"
|
||||
@@ -4186,7 +4116,7 @@ version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c3a14896dfa883796f1cb410461aef38810ea05f2b2c33c5aded3649095fdad"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"memchr",
|
||||
"pulldown-cmark-escape",
|
||||
"unicase",
|
||||
@@ -4238,7 +4168,7 @@ dependencies = [
|
||||
"bytes",
|
||||
"getrandom 0.3.4",
|
||||
"lru-slab",
|
||||
"rand 0.9.3",
|
||||
"rand 0.9.4",
|
||||
"ring",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
@@ -4312,9 +4242,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.3"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ec095654a25171c2124e9e3393a930bddbffdc939556c914957a4c3e0a87166"
|
||||
checksum = "44c5af06bb1b7d3216d91932aed5265164bf384dc89cd6ba05cf59a35f5f76ea"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.5",
|
||||
@@ -4415,9 +4345,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.11.0"
|
||||
version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
|
||||
checksum = "fb39b166781f92d482534ef4b4b1b2568f42613b53e5b6c160e24cfbfa30926d"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
@@ -4465,7 +4395,7 @@ version = "0.5.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4474,7 +4404,7 @@ version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f450ad9c3b1da563fb6948a8e0fb0fb9269711c9c73d9ea1de5058c79c8d643a"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4886,7 +4816,7 @@ version = "0.37.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "165ca6e57b20e1351573e3729b958bc62f0e48025386970b6e4d29e7a7e71f3f"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"fallible-iterator",
|
||||
"fallible-streaming-iterator",
|
||||
"hashlink",
|
||||
@@ -4949,7 +4879,7 @@ version = "1.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
@@ -5022,9 +4952,9 @@ checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f"
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.103.11"
|
||||
version = "0.103.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4"
|
||||
checksum = "8279bb85272c9f10811ae6a6c547ff594d6a7f3c6c6b02ee9726d1d0dcfcdd06"
|
||||
dependencies = [
|
||||
"aws-lc-rs",
|
||||
"ring",
|
||||
@@ -5167,7 +5097,7 @@ version = "3.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"core-foundation 0.10.1",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
@@ -5357,9 +5287,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serial2"
|
||||
version = "0.2.35"
|
||||
version = "0.2.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e66ab7ee258c6456796c6098e1b53a5baa1a5e0637347de59ddb44ee8e20be6e"
|
||||
checksum = "fcdbc46aa3882ec3d48ec2b5abcb4f0d863a13d7599265f3faa6d851f23c12f3"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
@@ -5637,7 +5567,7 @@ checksum = "6fef16f3d52a3710a672b48175b713e86476e2df85576a753c8b37ad11a483c0"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"byteorder",
|
||||
"bytes",
|
||||
"crc",
|
||||
@@ -5677,7 +5607,7 @@ checksum = "f053cf36ecb2793a9d9bb02d01bbad1ef66481d5db6ff5ab2dfb7b070cc0d13c"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"byteorder",
|
||||
"crc",
|
||||
"dotenvy",
|
||||
@@ -5884,7 +5814,7 @@ version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"core-foundation 0.9.4",
|
||||
"system-configuration-sys",
|
||||
]
|
||||
@@ -6066,9 +5996,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.51.1"
|
||||
version = "1.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f66bf9585cda4b724d3e78ab34b73fb2bbaba9011b9bfdf69dc836382ea13b8c"
|
||||
checksum = "a91135f59b1cbf38c91e73cf3386fca9bb77915c45ce2771460c9d92f0f3d776"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"libc",
|
||||
@@ -6092,16 +6022,6 @@ dependencies = [
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-native-tls"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
|
||||
dependencies = [
|
||||
"native-tls",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-rustls"
|
||||
version = "0.26.4"
|
||||
@@ -6144,9 +6064,11 @@ checksum = "8f72a05e828585856dacd553fba484c242c46e391fb0e58917c942ee9202915c"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"log",
|
||||
"native-tls",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
"tokio-rustls",
|
||||
"tungstenite 0.29.0",
|
||||
]
|
||||
|
||||
@@ -6276,7 +6198,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
@@ -6379,7 +6301,7 @@ dependencies = [
|
||||
"http",
|
||||
"httparse",
|
||||
"log",
|
||||
"rand 0.9.3",
|
||||
"rand 0.9.4",
|
||||
"sha1",
|
||||
"thiserror 2.0.18",
|
||||
"utf-8",
|
||||
@@ -6396,8 +6318,9 @@ dependencies = [
|
||||
"http",
|
||||
"httparse",
|
||||
"log",
|
||||
"native-tls",
|
||||
"rand 0.9.3",
|
||||
"rand 0.9.4",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"sha1",
|
||||
"thiserror 2.0.18",
|
||||
]
|
||||
@@ -6410,9 +6333,9 @@ checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
|
||||
|
||||
[[package]]
|
||||
name = "typewit"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc19094686c694eb41b3b99dcc2f2975d4b078512fa22ae6c63f7ca318bdcff7"
|
||||
checksum = "214ca0b2191785cbc06209b9ca1861e048e39b5ba33574b3cedd58363d5bb5f6"
|
||||
dependencies = [
|
||||
"typewit_proc_macros",
|
||||
]
|
||||
@@ -6429,7 +6352,7 @@ version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
|
||||
dependencies = [
|
||||
"rand 0.9.3",
|
||||
"rand 0.9.4",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
@@ -6806,7 +6729,7 @@ version = "0.244.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"hashbrown 0.15.5",
|
||||
"indexmap 2.14.0",
|
||||
"semver",
|
||||
@@ -7397,7 +7320,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.11.0",
|
||||
"bitflags 2.11.1",
|
||||
"indexmap 2.14.0",
|
||||
"log",
|
||||
"serde",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Huskies
|
||||
|
||||
A story-driven development server that manages work items, spawns coding agents, and runs them through a pipeline from backlog to done. Ships as a single Rust binary with an embedded React frontend.
|
||||
A story-driven development server that manages work items, spawns coding agents, and runs them through a pipeline from backlog to done. Ships as a single Rust binary with an embedded React frontend. Can also be run in WhatsApp, Matrix, and Slack chats.
|
||||
|
||||
## Getting started with Claude Code
|
||||
|
||||
@@ -220,283 +220,6 @@ Both return a JSON document with:
|
||||
|
||||
## Source Map
|
||||
|
||||
### Core
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/main.rs` | Entry point, CLI argument parsing, and server startup |
|
||||
| `server/src/config.rs` | Parses `project.toml` for agents, components, and server settings |
|
||||
| `server/src/state.rs` | Global mutable session state (project root, cancellation) |
|
||||
| `server/src/store.rs` | JSON-backed persistent key-value store for settings |
|
||||
|
||||
### Agents
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/mod.rs` | Types, configuration, and orchestration for coding agents |
|
||||
| `server/src/agents/gates.rs` | Runs test suites and validation scripts in agent worktrees |
|
||||
| `server/src/agents/lifecycle.rs` | File creation, archival, and stage transitions for pipeline items |
|
||||
| `server/src/agents/merge.rs` | Rebases agent work onto master and runs post-merge validation |
|
||||
| `server/src/agents/pty.rs` | Spawns agent processes in pseudo-terminals and streams output |
|
||||
| `server/src/agents/token_usage.rs` | Persists per-agent token consumption records to disk |
|
||||
| `server/src/agent_log.rs` | Reads and writes JSONL agent event logs to disk |
|
||||
| `server/src/agent_mode.rs` | Headless build-agent mode for distributed story processing |
|
||||
|
||||
### Agent Pool
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/pool/mod.rs` | Manages the set of active agents across all pipeline stages |
|
||||
| `server/src/agents/pool/types.rs` | `AgentPool`, `StoryAgent`, and related data structures |
|
||||
| `server/src/agents/pool/start.rs` | Spawns a new agent process in a worktree for a story |
|
||||
| `server/src/agents/pool/stop.rs` | Terminates a running agent while preserving its worktree |
|
||||
| `server/src/agents/pool/wait.rs` | Blocks until an agent reaches a terminal state |
|
||||
| `server/src/agents/pool/query.rs` | Lists available/active agents and info lookups |
|
||||
| `server/src/agents/pool/process.rs` | Kills orphaned PTY child processes on shutdown |
|
||||
| `server/src/agents/pool/worktree.rs` | Creates and configures git worktrees for agents |
|
||||
| `server/src/agents/pool/test_helpers.rs` | In-memory pool construction and test assertions |
|
||||
|
||||
### Agent Pool — Auto-assign
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/pool/auto_assign/mod.rs` | Wires sub-files and re-exports public items |
|
||||
| `server/src/agents/pool/auto_assign/auto_assign.rs` | Scans pipeline stages and dispatches agents to unassigned stories |
|
||||
| `server/src/agents/pool/auto_assign/reconcile.rs` | Startup reconciliation: detects committed work and advances pipeline |
|
||||
| `server/src/agents/pool/auto_assign/scan.rs` | Scans pipeline stages for work items and queries pool state |
|
||||
| `server/src/agents/pool/auto_assign/story_checks.rs` | Front-matter checks: review holds, blocked state, merge failures |
|
||||
| `server/src/agents/pool/auto_assign/watchdog.rs` | Detects orphaned agents and triggers auto-assign |
|
||||
|
||||
### Agent Pool — Pipeline
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/pool/pipeline/mod.rs` | Stage advancement, completion handling, and merge orchestration |
|
||||
| `server/src/agents/pool/pipeline/advance.rs` | Moves stories forward through pipeline stages |
|
||||
| `server/src/agents/pool/pipeline/completion.rs` | Processes exit results and triggers pipeline advancement |
|
||||
| `server/src/agents/pool/pipeline/merge.rs` | Orchestrates the merge-to-master flow for completed stories |
|
||||
|
||||
### Agent Runtimes
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/agents/runtime/mod.rs` | Pluggable backends (Claude Code, Gemini, OpenAI) for running agents |
|
||||
| `server/src/agents/runtime/claude_code.rs` | Launches Claude Code CLI sessions as agent backends |
|
||||
| `server/src/agents/runtime/gemini.rs` | Drives Google Gemini API sessions as agent backends |
|
||||
| `server/src/agents/runtime/openai.rs` | Drives OpenAI API sessions as agent backends |
|
||||
|
||||
### CRDT
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/crdt_state.rs` | Pipeline state as a conflict-free replicated document backed by SQLite |
|
||||
| `server/src/crdt_sync.rs` | WebSocket-based replication of pipeline state between nodes |
|
||||
| `server/src/crdt_wire.rs` | Serialization format for `SignedOp` sync messages |
|
||||
| `server/src/pipeline_state.rs` | Typed pipeline state machine |
|
||||
|
||||
### Database
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/db/mod.rs` | Content store, shadow writes, and CRDT op persistence |
|
||||
|
||||
### HTTP Server
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/http/mod.rs` | Module declarations for all REST, MCP, WebSocket, and SSE endpoints |
|
||||
| `server/src/http/context.rs` | Shared `AppContext` threaded through all HTTP handlers |
|
||||
| `server/src/http/agents.rs` | REST API for listing, starting, stopping, and inspecting agents |
|
||||
| `server/src/http/agents_sse.rs` | Server-Sent Events endpoint for real-time agent output |
|
||||
| `server/src/http/anthropic.rs` | Proxy for model listing and key-validation to Anthropic |
|
||||
| `server/src/http/assets.rs` | Serves the embedded React frontend via `rust-embed` |
|
||||
| `server/src/http/bot_command.rs` | Bot command HTTP endpoint |
|
||||
| `server/src/http/chat.rs` | REST API for the LLM-powered chat interface |
|
||||
| `server/src/http/health.rs` | Returns a static "ok" response |
|
||||
| `server/src/http/io.rs` | REST API for file and directory operations |
|
||||
| `server/src/http/model.rs` | REST API for model selection and LLM provider management |
|
||||
| `server/src/http/oauth.rs` | Anthropic OAuth callback and token exchange flow |
|
||||
| `server/src/http/project.rs` | REST API for project initialization and context management |
|
||||
| `server/src/http/settings.rs` | REST API for user preferences and editor configuration |
|
||||
| `server/src/http/wizard.rs` | REST API for the project setup wizard |
|
||||
| `server/src/http/ws.rs` | Real-time pipeline updates, chat, and permission prompts |
|
||||
| `server/src/http/test_helpers.rs` | Shared test utilities for HTTP handler tests |
|
||||
|
||||
### HTTP — MCP Tools
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/http/mcp/mod.rs` | Model Context Protocol endpoint dispatching tool calls |
|
||||
| `server/src/http/mcp/agent_tools.rs` | Start, stop, wait, list, and inspect agents via MCP |
|
||||
| `server/src/http/mcp/diagnostics.rs` | Server logs, CRDT dump, and story movement helpers |
|
||||
| `server/src/http/mcp/git_tools.rs` | Status, diff, add, commit, and log on agent worktrees |
|
||||
| `server/src/http/mcp/merge_tools.rs` | Merge agent work to master and report failures |
|
||||
| `server/src/http/mcp/qa_tools.rs` | Request, approve, and reject QA reviews |
|
||||
| `server/src/http/mcp/shell_tools.rs` | Run commands, execute tests, and stream output |
|
||||
| `server/src/http/mcp/status_tools.rs` | Pipeline status, story triage, and AC inspection |
|
||||
| `server/src/http/mcp/story_tools.rs` | Create, update, move, and manage stories/bugs/refactors |
|
||||
| `server/src/http/mcp/wizard_tools.rs` | Interactive setup wizard tool implementations |
|
||||
|
||||
### HTTP — Workflow
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/http/workflow/mod.rs` | Shared story/bug file operations for HTTP and MCP handlers |
|
||||
| `server/src/http/workflow/bug_ops.rs` | Creates bug, refactor, and spike files in the pipeline |
|
||||
| `server/src/http/workflow/story_ops.rs` | Creates, updates, and manages acceptance criteria in stories |
|
||||
| `server/src/http/workflow/test_results.rs` | Writes structured test results into story markdown |
|
||||
|
||||
### I/O
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/io/mod.rs` | Filesystem, shell, search, onboarding, and story metadata operations |
|
||||
| `server/src/io/fs/mod.rs` | Module declarations and re-exports for file operations |
|
||||
| `server/src/io/fs/files.rs` | Read, write, list, and create files and directories |
|
||||
| `server/src/io/fs/paths.rs` | Resolves CLI and session-relative paths to absolute paths |
|
||||
| `server/src/io/fs/preferences.rs` | Reads and writes model selection and user settings |
|
||||
| `server/src/io/fs/project.rs` | Tracks known projects and resolves the active project root |
|
||||
| `server/src/io/fs/scaffold.rs` | Creates the `.huskies/` directory structure and default files |
|
||||
| `server/src/io/onboarding.rs` | Checks whether scaffold templates have been customized |
|
||||
| `server/src/io/search.rs` | Full-text search across project files |
|
||||
| `server/src/io/shell.rs` | Runs commands in the project directory and captures output |
|
||||
| `server/src/io/story_metadata.rs` | Parses and modifies YAML front matter in story markdown |
|
||||
| `server/src/io/watcher.rs` | Filesystem watcher for `.huskies/work/` and `project.toml` |
|
||||
| `server/src/io/wizard.rs` | Multi-step project onboarding flow with per-step status |
|
||||
| `server/src/io/test_helpers.rs` | Shared test utilities for I/O module tests |
|
||||
|
||||
### Chat
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/mod.rs` | Transport abstraction for chat platforms |
|
||||
| `server/src/chat/lookup.rs` | Shared story-lookup helper for chat commands |
|
||||
| `server/src/chat/timer.rs` | Deferred agent start via one-shot timers |
|
||||
| `server/src/chat/util.rs` | Shared text utilities used by all transports |
|
||||
| `server/src/chat/test_helpers.rs` | Shared test utilities for chat handler tests |
|
||||
|
||||
### Chat — Commands
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/commands/mod.rs` | Bot-level command registry shared by all transports |
|
||||
| `server/src/chat/commands/ambient.rs` | `ambient` command handler |
|
||||
| `server/src/chat/commands/assign.rs` | `assign` command handler |
|
||||
| `server/src/chat/commands/backlog.rs` | `backlog` command — shows only backlog-stage items |
|
||||
| `server/src/chat/commands/cost.rs` | `cost` command handler |
|
||||
| `server/src/chat/commands/coverage.rs` | `coverage` command — show or refresh test coverage |
|
||||
| `server/src/chat/commands/depends.rs` | `depends` command handler |
|
||||
| `server/src/chat/commands/git.rs` | `git` command handler |
|
||||
| `server/src/chat/commands/help.rs` | `help` command handler |
|
||||
| `server/src/chat/commands/loc.rs` | `loc` command — top source files by line count |
|
||||
| `server/src/chat/commands/move_story.rs` | `move` command handler |
|
||||
| `server/src/chat/commands/overview.rs` | `overview` command handler |
|
||||
| `server/src/chat/commands/run_tests.rs` | `test` command — run the project's test suite |
|
||||
| `server/src/chat/commands/setup.rs` | `setup` command handler |
|
||||
| `server/src/chat/commands/show.rs` | `show` command handler |
|
||||
| `server/src/chat/commands/status.rs` | `status` command and pipeline status helpers |
|
||||
| `server/src/chat/commands/timer.rs` | `timer` command handler |
|
||||
| `server/src/chat/commands/triage.rs` | Story triage dump subcommand of `status` |
|
||||
| `server/src/chat/commands/unblock.rs` | `unblock` command handler |
|
||||
| `server/src/chat/commands/unreleased.rs` | `unreleased` command handler |
|
||||
|
||||
### Chat — Matrix Transport
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/matrix/mod.rs` | Matrix bot integration |
|
||||
| `server/src/chat/transport/matrix/config.rs` | Deserialization of `bot.toml` Matrix settings |
|
||||
| `server/src/chat/transport/matrix/commands.rs` | Re-exports from `crate::chat::commands` |
|
||||
| `server/src/chat/transport/matrix/transport_impl.rs` | Matrix `ChatTransport` implementation |
|
||||
| `server/src/chat/transport/matrix/assign.rs` | Assign/re-assign a coder model to a story |
|
||||
| `server/src/chat/transport/matrix/delete.rs` | Delete a story/bug/spike from the pipeline |
|
||||
| `server/src/chat/transport/matrix/htop.rs` | Live-updating system and agent process dashboard |
|
||||
| `server/src/chat/transport/matrix/notifications.rs` | Stage transition notifications for Matrix rooms |
|
||||
| `server/src/chat/transport/matrix/rebuild.rs` | Trigger a server rebuild and restart |
|
||||
| `server/src/chat/transport/matrix/reset.rs` | Clear the current Claude Code session for a room |
|
||||
| `server/src/chat/transport/matrix/rmtree.rs` | Delete the worktree for a story |
|
||||
| `server/src/chat/transport/matrix/start.rs` | Start a coder agent on a story |
|
||||
|
||||
### Chat — Matrix Bot
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/matrix/bot/mod.rs` | Sub-modules for the Matrix chat bot |
|
||||
| `server/src/chat/transport/matrix/bot/context.rs` | Shared state (rooms, history, permissions) |
|
||||
| `server/src/chat/transport/matrix/bot/format.rs` | Markdown-to-HTML conversion and startup announcements |
|
||||
| `server/src/chat/transport/matrix/bot/history.rs` | Per-room message history for LLM context |
|
||||
| `server/src/chat/transport/matrix/bot/mentions.rs` | Checks whether a message mentions the bot |
|
||||
| `server/src/chat/transport/matrix/bot/messages.rs` | Processes incoming messages and dispatches commands |
|
||||
| `server/src/chat/transport/matrix/bot/run.rs` | Connects to homeserver and processes sync events |
|
||||
| `server/src/chat/transport/matrix/bot/verification.rs` | Interactive emoji verification flow for E2EE |
|
||||
|
||||
### Chat — Slack Transport
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/slack/mod.rs` | Slack Bot API integration |
|
||||
| `server/src/chat/transport/slack/commands.rs` | Incoming message dispatch and slash command handling |
|
||||
| `server/src/chat/transport/slack/format.rs` | Markdown to Slack mrkdwn conversion |
|
||||
| `server/src/chat/transport/slack/history.rs` | Conversation history persistence |
|
||||
| `server/src/chat/transport/slack/meta.rs` | `ChatTransport` implementation for Slack |
|
||||
| `server/src/chat/transport/slack/verify.rs` | Request signature verification |
|
||||
|
||||
### Chat — Discord Transport
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/discord/mod.rs` | Discord Bot integration |
|
||||
| `server/src/chat/transport/discord/commands.rs` | Incoming message dispatch and command handling |
|
||||
| `server/src/chat/transport/discord/format.rs` | Markdown to Discord format conversion |
|
||||
| `server/src/chat/transport/discord/gateway.rs` | Minimal Discord Gateway WebSocket client |
|
||||
| `server/src/chat/transport/discord/history.rs` | Conversation history persistence |
|
||||
| `server/src/chat/transport/discord/meta.rs` | `ChatTransport` implementation for Discord |
|
||||
|
||||
### Chat — WhatsApp Transport
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/whatsapp/mod.rs` | WhatsApp Business API integration |
|
||||
| `server/src/chat/transport/whatsapp/commands.rs` | Processes incoming messages as bot commands |
|
||||
| `server/src/chat/transport/whatsapp/format.rs` | Markdown-to-WhatsApp conversion and message chunking |
|
||||
| `server/src/chat/transport/whatsapp/history.rs` | Per-number history and messaging window tracking |
|
||||
| `server/src/chat/transport/whatsapp/meta.rs` | Meta Cloud API transport via Graph API |
|
||||
| `server/src/chat/transport/whatsapp/twilio.rs` | Twilio transport for sending/receiving messages |
|
||||
|
||||
### Chat — Transport Abstraction
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/chat/transport/mod.rs` | Pluggable backends (Matrix, Slack, WhatsApp, Discord) |
|
||||
|
||||
### LLM
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/llm/mod.rs` | Chat orchestration, prompts, OAuth, and provider integrations |
|
||||
| `server/src/llm/chat.rs` | Multi-turn conversations with tool-calling LLM providers |
|
||||
| `server/src/llm/oauth.rs` | Token refresh and credential management for Claude API |
|
||||
| `server/src/llm/prompts.rs` | Static prompt templates for chat and onboarding |
|
||||
| `server/src/llm/types.rs` | `Message`, `Role`, `ToolCall`, `ModelProvider` types |
|
||||
|
||||
### LLM — Providers
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/llm/providers/mod.rs` | Module declarations for Anthropic, Claude Code, and Ollama |
|
||||
| `server/src/llm/providers/anthropic.rs` | Streaming completion client for Claude Messages API |
|
||||
| `server/src/llm/providers/claude_code.rs` | Runs Claude Code CLI in a PTY and parses output |
|
||||
| `server/src/llm/providers/ollama.rs` | Streaming completion client for Ollama models |
|
||||
|
||||
### Utilities
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `server/src/log_buffer.rs` | Bounded in-memory ring buffer for server log output |
|
||||
| `server/src/rebuild.rs` | Server rebuild and restart logic |
|
||||
| `server/src/workflow.rs` | Test result tracking and acceptance evaluation |
|
||||
| `server/src/worktree.rs` | Creates, lists, and removes git worktrees for agent isolation |
|
||||
|
||||
## License
|
||||
See `.huskies/specs/tech/STACK.md` for the full source map.
|
||||
|
||||
GPL-3.0. See [LICENSE](LICENSE).
|
||||
|
||||
@@ -264,11 +264,7 @@ impl<T: CrdtNode + DebugView> BaseCrdt<T> {
|
||||
// Bounded queue overflow: evict the oldest op from the largest
|
||||
// pending bucket before adding the new one. See CAUSAL_QUEUE_MAX.
|
||||
if self.queue_len >= CAUSAL_QUEUE_MAX {
|
||||
if let Some(bucket) = self
|
||||
.message_q
|
||||
.values_mut()
|
||||
.max_by_key(|v| v.len())
|
||||
{
|
||||
if let Some(bucket) = self.message_q.values_mut().max_by_key(|v| v.len()) {
|
||||
if !bucket.is_empty() {
|
||||
bucket.remove(0);
|
||||
self.queue_len = self.queue_len.saturating_sub(1);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::debug::DebugView;
|
||||
use crate::json_crdt::{CrdtNode, OpState, JsonValue};
|
||||
use crate::json_crdt::{CrdtNode, JsonValue, OpState};
|
||||
use crate::op::{join_path, print_path, Op, PathSegment, SequenceNumber};
|
||||
use std::cmp::{max, Ordering};
|
||||
use std::fmt::Debug;
|
||||
|
||||
+2
-1
@@ -109,7 +109,8 @@ RUN groupadd -r huskies \
|
||||
&& chown -R huskies:huskies /usr/local/cargo /usr/local/rustup \
|
||||
&& chown -R huskies:huskies /app \
|
||||
&& mkdir -p /workspace/target /app/target \
|
||||
&& chown huskies:huskies /workspace/target /app/target
|
||||
&& chown huskies:huskies /workspace/target /app/target \
|
||||
&& git config --global init.defaultBranch master
|
||||
|
||||
# ── Entrypoint ───────────────────────────────────────────────────────
|
||||
# Validates required env vars (GIT_USER_NAME, GIT_USER_EMAIL) and
|
||||
|
||||
@@ -69,6 +69,16 @@ services:
|
||||
- workspace-target:/workspace/target
|
||||
- huskies-target:/app/target
|
||||
|
||||
# Isolate frontend node_modules from the host.
|
||||
# npm install pulls platform-specific native binaries (esbuild,
|
||||
# rollup, etc.) — macOS binaries won't run on Linux and vice versa.
|
||||
# Without this volume, building on the Mac host writes macOS
|
||||
# node_modules into the bind mount, then the Linux container tries
|
||||
# to execute them and fails. The Docker volume gives the container
|
||||
# its own Linux-native node_modules that doesn't collide with the
|
||||
# host's.
|
||||
- frontend-modules:/workspace/frontend/node_modules
|
||||
|
||||
# ── Security hardening ──────────────────────────────────────────
|
||||
# Read-only root filesystem. Only explicitly mounted volumes and
|
||||
# tmpfs paths are writable.
|
||||
@@ -130,3 +140,4 @@ volumes:
|
||||
claude-state:
|
||||
workspace-target:
|
||||
huskies-target:
|
||||
frontend-modules:
|
||||
|
||||
Generated
+2
-2
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "huskies",
|
||||
"version": "0.10.0",
|
||||
"version": "0.10.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "huskies",
|
||||
"version": "0.10.0",
|
||||
"version": "0.10.2",
|
||||
"dependencies": {
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"react": "^19.1.0",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "huskies",
|
||||
"private": true,
|
||||
"version": "0.10.0",
|
||||
"version": "0.10.2",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
+18
-1
@@ -2,11 +2,14 @@ import * as React from "react";
|
||||
import type { OAuthStatus } from "./api/client";
|
||||
import { api } from "./api/client";
|
||||
import { Chat } from "./components/Chat";
|
||||
import { GatewayPanel } from "./components/GatewayPanel";
|
||||
import { SelectionScreen } from "./components/selection/SelectionScreen";
|
||||
import { usePathCompletion } from "./components/selection/usePathCompletion";
|
||||
import { gatewayApi } from "./api/gateway";
|
||||
import "./App.css";
|
||||
|
||||
function App() {
|
||||
const [isGateway, setIsGateway] = React.useState<boolean | null>(null);
|
||||
const [projectPath, setProjectPath] = React.useState<string | null>(null);
|
||||
const [_view, setView] = React.useState<"chat" | "token-usage">("chat");
|
||||
const [isCheckingProject, setIsCheckingProject] = React.useState(true);
|
||||
@@ -19,6 +22,14 @@ function App() {
|
||||
null,
|
||||
);
|
||||
|
||||
// Detect gateway mode on startup — if /gateway/mode returns 200, we're a gateway.
|
||||
React.useEffect(() => {
|
||||
gatewayApi
|
||||
.getServerMode()
|
||||
.then((result) => setIsGateway(result.mode === "gateway"))
|
||||
.catch(() => setIsGateway(false));
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
let active = true;
|
||||
function fetchOAuthStatus() {
|
||||
@@ -188,10 +199,16 @@ function App() {
|
||||
}
|
||||
}
|
||||
|
||||
if (isCheckingProject) {
|
||||
// Still probing server mode — wait before rendering.
|
||||
if (isGateway === null || isCheckingProject) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Gateway mode: render the agent management UI instead of the normal chat.
|
||||
if (isGateway) {
|
||||
return <GatewayPanel />;
|
||||
}
|
||||
|
||||
return (
|
||||
<main
|
||||
className="container"
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
export interface BotConfig {
|
||||
transport: string | null;
|
||||
enabled: boolean | null;
|
||||
homeserver: string | null;
|
||||
username: string | null;
|
||||
password: string | null;
|
||||
room_ids: string[] | null;
|
||||
slack_bot_token: string | null;
|
||||
slack_signing_secret: string | null;
|
||||
slack_channel_ids: string[] | null;
|
||||
}
|
||||
|
||||
const DEFAULT_API_BASE = "/api";
|
||||
|
||||
async function requestJson<T>(
|
||||
path: string,
|
||||
options: RequestInit = {},
|
||||
baseUrl = DEFAULT_API_BASE,
|
||||
): Promise<T> {
|
||||
const res = await fetch(`${baseUrl}${path}`, {
|
||||
headers: { "Content-Type": "application/json", ...(options.headers ?? {}) },
|
||||
...options,
|
||||
});
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(text || `Request failed (${res.status})`);
|
||||
}
|
||||
return res.json() as Promise<T>;
|
||||
}
|
||||
|
||||
export const botConfigApi = {
|
||||
getConfig(baseUrl?: string): Promise<BotConfig> {
|
||||
return requestJson<BotConfig>("/bot/config", {}, baseUrl);
|
||||
},
|
||||
|
||||
saveConfig(config: BotConfig, baseUrl?: string): Promise<BotConfig> {
|
||||
return requestJson<BotConfig>(
|
||||
"/bot/config",
|
||||
{ method: "PUT", body: JSON.stringify(config) },
|
||||
baseUrl,
|
||||
);
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,149 @@
|
||||
/// Gateway API client — used when running in gateway mode.
|
||||
///
|
||||
/// The gateway mode is detected by checking `GET /gateway/mode`. If it returns
|
||||
/// `{ "mode": "gateway" }` the frontend switches to the gateway UI.
|
||||
|
||||
export interface JoinedAgent {
|
||||
id: string;
|
||||
label: string;
|
||||
address: string;
|
||||
registered_at: number;
|
||||
/// Unix timestamp of the last heartbeat from this agent.
|
||||
last_seen: number;
|
||||
/// Project this agent is assigned to, if any.
|
||||
assigned_project?: string;
|
||||
}
|
||||
|
||||
export interface GatewayProject {
|
||||
name: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface GatewayInfo {
|
||||
active: string;
|
||||
projects: GatewayProject[];
|
||||
}
|
||||
|
||||
export interface PipelineItem {
|
||||
story_id: string;
|
||||
name: string;
|
||||
stage: string;
|
||||
agent?: { agent_name: string; model: string; status: string } | null;
|
||||
blocked?: boolean;
|
||||
retry_count?: number;
|
||||
merge_failure?: string;
|
||||
}
|
||||
|
||||
export interface ProjectPipelineStatus {
|
||||
active: PipelineItem[];
|
||||
backlog: { story_id: string; name: string }[];
|
||||
backlog_count: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface AllProjectsPipeline {
|
||||
active: string;
|
||||
projects: Record<string, ProjectPipelineStatus>;
|
||||
}
|
||||
|
||||
export interface GenerateTokenResponse {
|
||||
token: string;
|
||||
}
|
||||
|
||||
export interface ServerMode {
|
||||
mode: "gateway" | "standard";
|
||||
}
|
||||
|
||||
async function gatewayRequest<T>(
|
||||
path: string,
|
||||
options: RequestInit = {},
|
||||
): Promise<T> {
|
||||
const res = await fetch(path, {
|
||||
headers: { "Content-Type": "application/json", ...(options.headers ?? {}) },
|
||||
...options,
|
||||
});
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(text || `Request failed (${res.status})`);
|
||||
}
|
||||
// DELETE /gateway/agents/:id returns 204 No Content.
|
||||
if (res.status === 204) {
|
||||
return undefined as unknown as T;
|
||||
}
|
||||
return res.json() as Promise<T>;
|
||||
}
|
||||
|
||||
export const gatewayApi = {
|
||||
/// Returns `{ mode: "gateway" }` if this server is a gateway, otherwise rejects.
|
||||
getServerMode(): Promise<ServerMode> {
|
||||
return gatewayRequest<ServerMode>("/gateway/mode");
|
||||
},
|
||||
|
||||
/// Generate a one-time join token for a new build agent.
|
||||
generateToken(): Promise<GenerateTokenResponse> {
|
||||
return gatewayRequest<GenerateTokenResponse>("/gateway/tokens", {
|
||||
method: "POST",
|
||||
});
|
||||
},
|
||||
|
||||
/// List all build agents that have registered with this gateway.
|
||||
listAgents(): Promise<JoinedAgent[]> {
|
||||
return gatewayRequest<JoinedAgent[]>("/gateway/agents");
|
||||
},
|
||||
|
||||
/// Remove a registered build agent by its ID.
|
||||
removeAgent(id: string): Promise<void> {
|
||||
return gatewayRequest<void>(`/gateway/agents/${id}`, {
|
||||
method: "DELETE",
|
||||
});
|
||||
},
|
||||
|
||||
/// Assign an agent to a project, or unassign it by passing null.
|
||||
assignAgent(id: string, project: string | null): Promise<JoinedAgent> {
|
||||
return gatewayRequest<JoinedAgent>(`/gateway/agents/${id}/assign`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ project }),
|
||||
});
|
||||
},
|
||||
|
||||
/// Get the list of registered projects from the gateway.
|
||||
getGatewayInfo(): Promise<GatewayInfo> {
|
||||
return gatewayRequest<GatewayInfo>("/api/gateway");
|
||||
},
|
||||
|
||||
/// Add a new project to the gateway config.
|
||||
addProject(name: string, url: string): Promise<GatewayProject> {
|
||||
return gatewayRequest<GatewayProject>("/api/gateway/projects", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ name, url }),
|
||||
});
|
||||
},
|
||||
|
||||
/// Remove a project from the gateway config.
|
||||
removeProject(name: string): Promise<void> {
|
||||
return gatewayRequest<void>(
|
||||
`/api/gateway/projects/${encodeURIComponent(name)}`,
|
||||
{ method: "DELETE" },
|
||||
);
|
||||
},
|
||||
|
||||
/// Send a heartbeat for an agent to update its last-seen timestamp.
|
||||
heartbeat(id: string): Promise<void> {
|
||||
return gatewayRequest<void>(`/gateway/agents/${id}/heartbeat`, {
|
||||
method: "POST",
|
||||
});
|
||||
},
|
||||
|
||||
/// Fetch pipeline status from all registered projects.
|
||||
getAllProjectsPipeline(): Promise<AllProjectsPipeline> {
|
||||
return gatewayRequest<AllProjectsPipeline>("/api/gateway/pipeline");
|
||||
},
|
||||
|
||||
/// Switch the active project.
|
||||
switchProject(project: string): Promise<{ ok: boolean; error?: string }> {
|
||||
return gatewayRequest<{ ok: boolean; error?: string }>(
|
||||
"/api/gateway/switch",
|
||||
{ method: "POST", body: JSON.stringify({ project }) },
|
||||
);
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,344 @@
|
||||
import * as React from "react";
|
||||
import type { BotConfig } from "../api/bot_config";
|
||||
import { botConfigApi } from "../api/bot_config";
|
||||
|
||||
const { useState, useEffect } = React;
|
||||
|
||||
interface BotConfigPageProps {
|
||||
onBack: () => void;
|
||||
}
|
||||
|
||||
const fieldStyle: React.CSSProperties = {
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
gap: "4px",
|
||||
};
|
||||
|
||||
const labelStyle: React.CSSProperties = {
|
||||
fontSize: "0.8em",
|
||||
color: "#aaa",
|
||||
fontWeight: 500,
|
||||
};
|
||||
|
||||
const inputStyle: React.CSSProperties = {
|
||||
padding: "8px 10px",
|
||||
borderRadius: "6px",
|
||||
border: "1px solid #333",
|
||||
background: "#1e1e1e",
|
||||
color: "#ececec",
|
||||
fontSize: "0.9em",
|
||||
fontFamily: "monospace",
|
||||
outline: "none",
|
||||
};
|
||||
|
||||
const sectionStyle: React.CSSProperties = {
|
||||
background: "#1e1e1e",
|
||||
border: "1px solid #333",
|
||||
borderRadius: "8px",
|
||||
padding: "20px",
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
gap: "14px",
|
||||
};
|
||||
|
||||
const sectionTitleStyle: React.CSSProperties = {
|
||||
fontSize: "0.85em",
|
||||
fontWeight: 600,
|
||||
color: "#aaa",
|
||||
textTransform: "uppercase",
|
||||
letterSpacing: "0.06em",
|
||||
marginBottom: "2px",
|
||||
};
|
||||
|
||||
function Field({
|
||||
label,
|
||||
value,
|
||||
onChange,
|
||||
placeholder,
|
||||
type = "text",
|
||||
}: {
|
||||
label: string;
|
||||
value: string;
|
||||
onChange: (v: string) => void;
|
||||
placeholder?: string;
|
||||
type?: string;
|
||||
}) {
|
||||
return (
|
||||
<div style={fieldStyle}>
|
||||
<label style={labelStyle}>{label}</label>
|
||||
<input
|
||||
type={type}
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
style={inputStyle}
|
||||
autoComplete="off"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function ListField({
|
||||
label,
|
||||
value,
|
||||
onChange,
|
||||
placeholder,
|
||||
}: {
|
||||
label: string;
|
||||
value: string[];
|
||||
onChange: (v: string[]) => void;
|
||||
placeholder?: string;
|
||||
}) {
|
||||
return (
|
||||
<div style={fieldStyle}>
|
||||
<label style={labelStyle}>{label} (one per line)</label>
|
||||
<textarea
|
||||
value={value.join("\n")}
|
||||
onChange={(e) =>
|
||||
onChange(e.target.value.split("\n").filter((s) => s.trim()))
|
||||
}
|
||||
placeholder={placeholder}
|
||||
rows={3}
|
||||
style={{ ...inputStyle, resize: "vertical" }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/// Bot configuration page — form for Matrix and Slack credentials.
|
||||
export function BotConfigPage({ onBack }: BotConfigPageProps) {
|
||||
const [transport, setTransport] = useState<"matrix" | "slack">("matrix");
|
||||
const [enabled, setEnabled] = useState(false);
|
||||
const [homeserver, setHomeserver] = useState("");
|
||||
const [username, setUsername] = useState("");
|
||||
const [password, setPassword] = useState("");
|
||||
const [roomIds, setRoomIds] = useState<string[]>([]);
|
||||
const [slackBotToken, setSlackBotToken] = useState("");
|
||||
const [slackSigningSecret, setSlackSigningSecret] = useState("");
|
||||
const [slackChannelIds, setSlackChannelIds] = useState<string[]>([]);
|
||||
const [status, setStatus] = useState<"idle" | "saving" | "saved" | "error">(
|
||||
"idle",
|
||||
);
|
||||
const [errorMsg, setErrorMsg] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
botConfigApi
|
||||
.getConfig()
|
||||
.then((cfg) => {
|
||||
if (cfg.transport === "slack") setTransport("slack");
|
||||
setEnabled(cfg.enabled ?? false);
|
||||
setHomeserver(cfg.homeserver ?? "");
|
||||
setUsername(cfg.username ?? "");
|
||||
setPassword(cfg.password ?? "");
|
||||
setRoomIds(cfg.room_ids ?? []);
|
||||
setSlackBotToken(cfg.slack_bot_token ?? "");
|
||||
setSlackSigningSecret(cfg.slack_signing_secret ?? "");
|
||||
setSlackChannelIds(cfg.slack_channel_ids ?? []);
|
||||
})
|
||||
.catch(() => {});
|
||||
}, []);
|
||||
|
||||
function buildConfig(): BotConfig {
|
||||
return {
|
||||
transport,
|
||||
enabled,
|
||||
homeserver: homeserver || null,
|
||||
username: username || null,
|
||||
password: password || null,
|
||||
room_ids: roomIds.length > 0 ? roomIds : null,
|
||||
slack_bot_token: slackBotToken || null,
|
||||
slack_signing_secret: slackSigningSecret || null,
|
||||
slack_channel_ids: slackChannelIds.length > 0 ? slackChannelIds : null,
|
||||
};
|
||||
}
|
||||
|
||||
async function handleSave() {
|
||||
setStatus("saving");
|
||||
setErrorMsg(null);
|
||||
try {
|
||||
await botConfigApi.saveConfig(buildConfig());
|
||||
setStatus("saved");
|
||||
setTimeout(() => setStatus("idle"), 2000);
|
||||
} catch (e) {
|
||||
setStatus("error");
|
||||
setErrorMsg(e instanceof Error ? e.message : "Save failed");
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
height: "100%",
|
||||
backgroundColor: "#171717",
|
||||
color: "#ececec",
|
||||
overflow: "auto",
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
padding: "12px 24px",
|
||||
borderBottom: "1px solid #333",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "16px",
|
||||
background: "#171717",
|
||||
flexShrink: 0,
|
||||
}}
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
onClick={onBack}
|
||||
style={{
|
||||
background: "transparent",
|
||||
border: "none",
|
||||
cursor: "pointer",
|
||||
color: "#888",
|
||||
fontSize: "0.9em",
|
||||
padding: "4px 8px",
|
||||
borderRadius: "4px",
|
||||
}}
|
||||
>
|
||||
← Back
|
||||
</button>
|
||||
<span style={{ fontWeight: 700, fontSize: "1em" }}>
|
||||
Bot Configuration
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div
|
||||
style={{
|
||||
flex: 1,
|
||||
padding: "24px",
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
gap: "20px",
|
||||
maxWidth: "600px",
|
||||
}}
|
||||
>
|
||||
<div style={sectionStyle}>
|
||||
<div style={sectionTitleStyle}>General</div>
|
||||
|
||||
<div style={fieldStyle}>
|
||||
<label style={labelStyle}>Transport</label>
|
||||
<select
|
||||
value={transport}
|
||||
onChange={(e) =>
|
||||
setTransport(e.target.value as "matrix" | "slack")
|
||||
}
|
||||
style={{ ...inputStyle, cursor: "pointer" }}
|
||||
>
|
||||
<option value="matrix">Matrix</option>
|
||||
<option value="slack">Slack</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<label
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "8px",
|
||||
cursor: "pointer",
|
||||
fontSize: "0.9em",
|
||||
color: "#ccc",
|
||||
}}
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={enabled}
|
||||
onChange={(e) => setEnabled(e.target.checked)}
|
||||
/>
|
||||
Enabled
|
||||
</label>
|
||||
</div>
|
||||
|
||||
{transport === "matrix" && (
|
||||
<div style={sectionStyle}>
|
||||
<div style={sectionTitleStyle}>Matrix Credentials</div>
|
||||
<Field
|
||||
label="Homeserver"
|
||||
value={homeserver}
|
||||
onChange={setHomeserver}
|
||||
placeholder="https://matrix.example.com"
|
||||
/>
|
||||
<Field
|
||||
label="Username"
|
||||
value={username}
|
||||
onChange={setUsername}
|
||||
placeholder="@botname:example.com"
|
||||
/>
|
||||
<Field
|
||||
label="Password"
|
||||
value={password}
|
||||
onChange={setPassword}
|
||||
placeholder="bot password"
|
||||
type="password"
|
||||
/>
|
||||
<ListField
|
||||
label="Room IDs"
|
||||
value={roomIds}
|
||||
onChange={setRoomIds}
|
||||
placeholder="!roomid:example.com"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{transport === "slack" && (
|
||||
<div style={sectionStyle}>
|
||||
<div style={sectionTitleStyle}>Slack Credentials</div>
|
||||
<Field
|
||||
label="Bot Token"
|
||||
value={slackBotToken}
|
||||
onChange={setSlackBotToken}
|
||||
placeholder="xoxb-..."
|
||||
/>
|
||||
<Field
|
||||
label="Signing Secret"
|
||||
value={slackSigningSecret}
|
||||
onChange={setSlackSigningSecret}
|
||||
placeholder="signing secret"
|
||||
type="password"
|
||||
/>
|
||||
<ListField
|
||||
label="Channel IDs"
|
||||
value={slackChannelIds}
|
||||
onChange={setSlackChannelIds}
|
||||
placeholder="C01ABCDEF"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ display: "flex", alignItems: "center", gap: "12px" }}>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleSave}
|
||||
disabled={status === "saving"}
|
||||
style={{
|
||||
padding: "8px 24px",
|
||||
borderRadius: "6px",
|
||||
border: "none",
|
||||
background: status === "saved" ? "#1a5c2a" : "#2563eb",
|
||||
color: "#fff",
|
||||
cursor: status === "saving" ? "not-allowed" : "pointer",
|
||||
fontSize: "0.9em",
|
||||
fontWeight: 600,
|
||||
opacity: status === "saving" ? 0.7 : 1,
|
||||
}}
|
||||
>
|
||||
{status === "saving"
|
||||
? "Saving..."
|
||||
: status === "saved"
|
||||
? "Saved!"
|
||||
: "Save"}
|
||||
</button>
|
||||
{status === "error" && errorMsg && (
|
||||
<span style={{ color: "#f08080", fontSize: "0.85em" }}>
|
||||
{errorMsg}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import { useChatSend } from "../hooks/useChatSend";
|
||||
import { useChatWebSocket } from "../hooks/useChatWebSocket";
|
||||
import { estimateTokens, getContextWindowSize } from "../utils/chatUtils";
|
||||
import { ApiKeyDialog } from "./ApiKeyDialog";
|
||||
import { BotConfigPage } from "./BotConfigPage";
|
||||
import { ChatHeader } from "./ChatHeader";
|
||||
import type { ChatInputHandle } from "./ChatInput";
|
||||
import { ChatInput } from "./ChatInput";
|
||||
@@ -61,6 +62,7 @@ export function Chat({
|
||||
null,
|
||||
);
|
||||
const [showHelp, setShowHelp] = useState(false);
|
||||
const [view, setView] = useState<"chat" | "bot-config">("chat");
|
||||
const [queuedMessages, setQueuedMessages] = useState<
|
||||
{ id: string; text: string }[]
|
||||
>([]);
|
||||
@@ -373,12 +375,17 @@ export function Chat({
|
||||
onToggleTools={setEnableTools}
|
||||
wsConnected={wsConnected}
|
||||
oauthStatus={oauthStatus}
|
||||
onShowBotConfig={() => setView("bot-config")}
|
||||
/>
|
||||
|
||||
{view === "bot-config" && (
|
||||
<BotConfigPage onBack={() => setView("chat")} />
|
||||
)}
|
||||
|
||||
<div
|
||||
data-testid="chat-content-area"
|
||||
style={{
|
||||
display: "flex",
|
||||
display: view === "bot-config" ? "none" : "flex",
|
||||
flex: 1,
|
||||
minHeight: 0,
|
||||
flexDirection: isNarrowScreen ? "column" : "row",
|
||||
|
||||
@@ -34,6 +34,7 @@ interface ChatHeaderProps {
|
||||
onToggleTools: (enabled: boolean) => void;
|
||||
wsConnected: boolean;
|
||||
oauthStatus?: OAuthStatus | null;
|
||||
onShowBotConfig?: () => void;
|
||||
}
|
||||
|
||||
const getContextEmoji = (percentage: number): string => {
|
||||
@@ -58,6 +59,7 @@ export function ChatHeader({
|
||||
onToggleTools,
|
||||
wsConnected,
|
||||
oauthStatus = null,
|
||||
onShowBotConfig,
|
||||
}: ChatHeaderProps) {
|
||||
const hasModelOptions = availableModels.length > 0 || claudeModels.length > 0;
|
||||
const [showConfirm, setShowConfirm] = useState(false);
|
||||
@@ -513,6 +515,43 @@ export function ChatHeader({
|
||||
🔄 New Session
|
||||
</button>
|
||||
|
||||
{onShowBotConfig && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onShowBotConfig}
|
||||
title="Configure bot credentials"
|
||||
style={{
|
||||
padding: "6px 12px",
|
||||
borderRadius: "99px",
|
||||
border: "none",
|
||||
fontSize: "0.85em",
|
||||
backgroundColor: "#2f2f2f",
|
||||
color: "#888",
|
||||
cursor: "pointer",
|
||||
outline: "none",
|
||||
transition: "all 0.2s",
|
||||
}}
|
||||
onMouseOver={(e) => {
|
||||
e.currentTarget.style.backgroundColor = "#3f3f3f";
|
||||
e.currentTarget.style.color = "#ccc";
|
||||
}}
|
||||
onMouseOut={(e) => {
|
||||
e.currentTarget.style.backgroundColor = "#2f2f2f";
|
||||
e.currentTarget.style.color = "#888";
|
||||
}}
|
||||
onFocus={(e) => {
|
||||
e.currentTarget.style.backgroundColor = "#3f3f3f";
|
||||
e.currentTarget.style.color = "#ccc";
|
||||
}}
|
||||
onBlur={(e) => {
|
||||
e.currentTarget.style.backgroundColor = "#2f2f2f";
|
||||
e.currentTarget.style.color = "#888";
|
||||
}}
|
||||
>
|
||||
⚙ Bot
|
||||
</button>
|
||||
)}
|
||||
|
||||
{hasModelOptions ? (
|
||||
<select
|
||||
value={model}
|
||||
|
||||
@@ -0,0 +1,771 @@
|
||||
/// Gateway management panel shown when huskies runs in `--gateway` mode.
|
||||
///
|
||||
/// Provides:
|
||||
/// - A cross-project pipeline status view showing active stories per project.
|
||||
/// - Clicking a project card switches to it.
|
||||
/// - An "Add Agent" button that generates a one-time join token.
|
||||
/// - Instructions for running a build agent with the token.
|
||||
/// - A list of connected agents with per-agent status, project assignment, and "Remove" buttons.
|
||||
/// - Auto-refresh every 5 seconds so new agents and disconnections appear without a page reload.
|
||||
|
||||
import * as React from "react";
|
||||
import {
|
||||
gatewayApi,
|
||||
type JoinedAgent,
|
||||
type GatewayProject,
|
||||
type AllProjectsPipeline,
|
||||
type PipelineItem,
|
||||
} from "../api/gateway";
|
||||
|
||||
const { useCallback, useEffect, useRef, useState } = React;
|
||||
|
||||
/// Seconds of silence before an agent is considered disconnected.
|
||||
const DISCONNECT_THRESHOLD_SECS = 60;
|
||||
|
||||
/// Poll the agent list this often (milliseconds).
|
||||
const POLL_INTERVAL_MS = 5_000;
|
||||
|
||||
type AgentStatus = "idle" | "working" | "disconnected";
|
||||
|
||||
/// Derive an agent's display status from its last-seen timestamp and project assignment.
|
||||
function agentStatus(agent: JoinedAgent): AgentStatus {
|
||||
const nowSecs = Date.now() / 1000;
|
||||
if (nowSecs - agent.last_seen > DISCONNECT_THRESHOLD_SECS) {
|
||||
return "disconnected";
|
||||
}
|
||||
return agent.assigned_project ? "working" : "idle";
|
||||
}
|
||||
|
||||
const STATUS_COLORS: Record<AgentStatus, string> = {
|
||||
idle: "#6e7681",
|
||||
working: "#3fb950",
|
||||
disconnected: "#f85149",
|
||||
};
|
||||
|
||||
const STATUS_LABELS: Record<AgentStatus, string> = {
|
||||
idle: "Idle",
|
||||
working: "Working",
|
||||
disconnected: "Disconnected",
|
||||
};
|
||||
|
||||
const STAGE_COLORS: Record<string, string> = {
|
||||
current: "#3fb950",
|
||||
qa: "#d2a679",
|
||||
merge: "#79c0ff",
|
||||
done: "#6e7681",
|
||||
};
|
||||
|
||||
const STAGE_LABELS: Record<string, string> = {
|
||||
current: "In Progress",
|
||||
qa: "QA",
|
||||
merge: "Merging",
|
||||
done: "Done",
|
||||
};
|
||||
|
||||
/// A single story row inside a project pipeline card.
|
||||
function StoryRow({ item }: { item: PipelineItem }) {
|
||||
const color = STAGE_COLORS[item.stage] ?? "#8b949e";
|
||||
const label = STAGE_LABELS[item.stage] ?? item.stage;
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "8px",
|
||||
padding: "4px 0",
|
||||
fontSize: "0.82em",
|
||||
}}
|
||||
>
|
||||
<span
|
||||
style={{
|
||||
padding: "1px 6px",
|
||||
borderRadius: "10px",
|
||||
background: `${color}22`,
|
||||
color,
|
||||
border: `1px solid ${color}44`,
|
||||
whiteSpace: "nowrap",
|
||||
flexShrink: 0,
|
||||
}}
|
||||
>
|
||||
{label}
|
||||
</span>
|
||||
<span style={{ color: "#e6edf3", overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" }}>
|
||||
{item.name}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/// Pipeline status card for a single project.
|
||||
function ProjectPipelineCard({
|
||||
name,
|
||||
pipeline,
|
||||
isActive,
|
||||
onSwitch,
|
||||
}: {
|
||||
name: string;
|
||||
pipeline: AllProjectsPipeline["projects"][string];
|
||||
isActive: boolean;
|
||||
onSwitch: (name: string) => void;
|
||||
}) {
|
||||
const activeItems = pipeline.active ?? [];
|
||||
const backlogCount = pipeline.backlog_count ?? 0;
|
||||
const hasError = Boolean(pipeline.error);
|
||||
|
||||
return (
|
||||
<div
|
||||
data-testid={`pipeline-card-${name}`}
|
||||
onClick={() => onSwitch(name)}
|
||||
style={{
|
||||
padding: "12px 16px",
|
||||
background: "#161b22",
|
||||
border: `1px solid ${isActive ? "#238636" : "#30363d"}`,
|
||||
borderRadius: "8px",
|
||||
marginBottom: "8px",
|
||||
cursor: "pointer",
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "8px",
|
||||
marginBottom: activeItems.length > 0 ? "8px" : 0,
|
||||
}}
|
||||
>
|
||||
<span style={{ fontWeight: 600, color: "#e6edf3" }}>{name}</span>
|
||||
{isActive && (
|
||||
<span
|
||||
style={{
|
||||
fontSize: "0.7em",
|
||||
padding: "1px 6px",
|
||||
borderRadius: "10px",
|
||||
background: "#23863622",
|
||||
color: "#3fb950",
|
||||
border: "1px solid #23863644",
|
||||
}}
|
||||
>
|
||||
active
|
||||
</span>
|
||||
)}
|
||||
<span style={{ marginLeft: "auto", fontSize: "0.75em", color: "#6e7681" }}>
|
||||
{backlogCount > 0 ? `${backlogCount} in backlog` : ""}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{hasError ? (
|
||||
<div style={{ fontSize: "0.8em", color: "#f85149" }}>{pipeline.error}</div>
|
||||
) : activeItems.length === 0 ? (
|
||||
<div style={{ fontSize: "0.8em", color: "#6e7681" }}>No active stories</div>
|
||||
) : (
|
||||
<div>
|
||||
{activeItems.map((item) => (
|
||||
<StoryRow key={item.story_id} item={item} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function TokenDisplay({ token }: { token: string }) {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
const envCmd = `HUSKIES_JOIN_TOKEN=${token} huskies agent --rendezvous <CRDT_SYNC_URL>`;
|
||||
const flagCmd = `huskies agent --rendezvous <CRDT_SYNC_URL> --join-token ${token}`;
|
||||
|
||||
const copyToClipboard = useCallback((text: string) => {
|
||||
void navigator.clipboard.writeText(text).then(() => {
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
});
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
marginTop: "12px",
|
||||
padding: "12px 16px",
|
||||
background: "#161b22",
|
||||
border: "1px solid #238636",
|
||||
borderRadius: "8px",
|
||||
fontSize: "0.85em",
|
||||
}}
|
||||
>
|
||||
<div style={{ color: "#3fb950", fontWeight: 600, marginBottom: "8px" }}>
|
||||
Token generated — run the build agent with one of:
|
||||
</div>
|
||||
<div style={{ marginBottom: "6px" }}>
|
||||
<code
|
||||
style={{
|
||||
display: "block",
|
||||
background: "#0d1117",
|
||||
padding: "8px 10px",
|
||||
borderRadius: "4px",
|
||||
color: "#e6edf3",
|
||||
wordBreak: "break-all",
|
||||
}}
|
||||
>
|
||||
{envCmd}
|
||||
</code>
|
||||
</div>
|
||||
<div>
|
||||
<code
|
||||
style={{
|
||||
display: "block",
|
||||
background: "#0d1117",
|
||||
padding: "8px 10px",
|
||||
borderRadius: "4px",
|
||||
color: "#e6edf3",
|
||||
wordBreak: "break-all",
|
||||
}}
|
||||
>
|
||||
{flagCmd}
|
||||
</code>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => copyToClipboard(flagCmd)}
|
||||
style={{
|
||||
marginTop: "8px",
|
||||
fontSize: "0.8em",
|
||||
padding: "3px 10px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #444",
|
||||
background: "none",
|
||||
color: "#aaa",
|
||||
cursor: "pointer",
|
||||
}}
|
||||
>
|
||||
{copied ? "Copied!" : "Copy flag command"}
|
||||
</button>
|
||||
<div style={{ marginTop: "8px", color: "#666", fontSize: "0.85em" }}>
|
||||
This token is single-use. Generate a new one for each agent.
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AgentRow({
|
||||
agent,
|
||||
projects,
|
||||
onRemove,
|
||||
onAssign,
|
||||
}: {
|
||||
agent: JoinedAgent;
|
||||
projects: GatewayProject[];
|
||||
onRemove: (id: string) => void;
|
||||
onAssign: (id: string, project: string | null) => void;
|
||||
}) {
|
||||
const registeredAt = new Date(agent.registered_at * 1000).toLocaleString();
|
||||
const status = agentStatus(agent);
|
||||
const statusColor = STATUS_COLORS[status];
|
||||
const statusLabel = STATUS_LABELS[status];
|
||||
|
||||
return (
|
||||
<div
|
||||
data-testid={`agent-row-${agent.id}`}
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "12px",
|
||||
padding: "10px 14px",
|
||||
background: "#161b22",
|
||||
border: "1px solid #30363d",
|
||||
borderRadius: "8px",
|
||||
marginBottom: "8px",
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
width: "8px",
|
||||
height: "8px",
|
||||
borderRadius: "50%",
|
||||
background: statusColor,
|
||||
flexShrink: 0,
|
||||
}}
|
||||
title={statusLabel}
|
||||
/>
|
||||
<div style={{ flex: 1 }}>
|
||||
<div style={{ display: "flex", alignItems: "center", gap: "8px" }}>
|
||||
<span style={{ fontWeight: 600, color: "#e6edf3" }}>{agent.label}</span>
|
||||
<span
|
||||
data-testid={`agent-status-${agent.id}`}
|
||||
style={{
|
||||
fontSize: "0.75em",
|
||||
padding: "1px 6px",
|
||||
borderRadius: "10px",
|
||||
background: `${statusColor}22`,
|
||||
color: statusColor,
|
||||
border: `1px solid ${statusColor}44`,
|
||||
}}
|
||||
>
|
||||
{statusLabel}
|
||||
</span>
|
||||
</div>
|
||||
<div style={{ fontSize: "0.8em", color: "#8b949e" }}>
|
||||
{agent.address}
|
||||
</div>
|
||||
<div style={{ fontSize: "0.75em", color: "#6e7681" }}>
|
||||
Registered {registeredAt}
|
||||
{agent.assigned_project && (
|
||||
<span style={{ marginLeft: "8px", color: "#8b949e" }}>
|
||||
· Project: {agent.assigned_project}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<select
|
||||
data-testid={`assign-agent-${agent.id}`}
|
||||
value={agent.assigned_project ?? ""}
|
||||
onChange={(e) =>
|
||||
onAssign(agent.id, e.target.value === "" ? null : e.target.value)
|
||||
}
|
||||
style={{
|
||||
fontSize: "0.8em",
|
||||
padding: "4px 8px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #30363d",
|
||||
background: "#0d1117",
|
||||
color: "#e6edf3",
|
||||
cursor: "pointer",
|
||||
}}
|
||||
>
|
||||
<option value="">— unassigned —</option>
|
||||
{projects.map((p) => (
|
||||
<option key={p.name} value={p.name}>
|
||||
{p.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
<button
|
||||
type="button"
|
||||
data-testid={`remove-agent-${agent.id}`}
|
||||
onClick={() => onRemove(agent.id)}
|
||||
style={{
|
||||
fontSize: "0.8em",
|
||||
padding: "4px 10px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #f85149",
|
||||
background: "none",
|
||||
color: "#f85149",
|
||||
cursor: "pointer",
|
||||
}}
|
||||
>
|
||||
Remove
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/// Gateway management panel — rendered when running in `--gateway` mode.
|
||||
export function GatewayPanel() {
|
||||
const [agents, setAgents] = useState<JoinedAgent[]>([]);
|
||||
const [projects, setProjects] = useState<GatewayProject[]>([]);
|
||||
const [token, setToken] = useState<string | null>(null);
|
||||
const [generating, setGenerating] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [pipeline, setPipeline] = useState<AllProjectsPipeline | null>(null);
|
||||
|
||||
// Add-project form state
|
||||
const [newProjectName, setNewProjectName] = useState("");
|
||||
const [newProjectUrl, setNewProjectUrl] = useState("");
|
||||
const [addingProject, setAddingProject] = useState(false);
|
||||
|
||||
// Keep stable refs so polling intervals don't recreate on state changes.
|
||||
const setAgentsRef = useRef(setAgents);
|
||||
setAgentsRef.current = setAgents;
|
||||
const setPipelineRef = useRef(setPipeline);
|
||||
setPipelineRef.current = setPipeline;
|
||||
|
||||
useEffect(() => {
|
||||
// Initial load.
|
||||
gatewayApi
|
||||
.listAgents()
|
||||
.then(setAgents)
|
||||
.catch(() => setAgents([]));
|
||||
gatewayApi
|
||||
.getGatewayInfo()
|
||||
.then((info) => setProjects(info.projects))
|
||||
.catch(() => setProjects([]));
|
||||
gatewayApi
|
||||
.getAllProjectsPipeline()
|
||||
.then(setPipeline)
|
||||
.catch(() => setPipeline(null));
|
||||
|
||||
// Poll so the dashboard auto-updates as agents connect/disconnect and
|
||||
// stories move through pipelines.
|
||||
const timer = setInterval(() => {
|
||||
gatewayApi
|
||||
.listAgents()
|
||||
.then((updated) => setAgentsRef.current(updated))
|
||||
.catch(() => {});
|
||||
gatewayApi
|
||||
.getAllProjectsPipeline()
|
||||
.then((updated) => setPipelineRef.current(updated))
|
||||
.catch(() => {});
|
||||
}, POLL_INTERVAL_MS);
|
||||
|
||||
return () => clearInterval(timer);
|
||||
}, []);
|
||||
|
||||
const handleAddAgent = useCallback(async () => {
|
||||
setGenerating(true);
|
||||
setError(null);
|
||||
setToken(null);
|
||||
try {
|
||||
const result = await gatewayApi.generateToken();
|
||||
setToken(result.token);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : String(e));
|
||||
} finally {
|
||||
setGenerating(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleRemoveAgent = useCallback(async (id: string) => {
|
||||
try {
|
||||
await gatewayApi.removeAgent(id);
|
||||
setAgents((prev) => prev.filter((a) => a.id !== id));
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleAssignAgent = useCallback(
|
||||
async (id: string, project: string | null) => {
|
||||
try {
|
||||
const updated = await gatewayApi.assignAgent(id, project);
|
||||
setAgents((prev) =>
|
||||
prev.map((a) => (a.id === updated.id ? updated : a)),
|
||||
);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleAddProject = useCallback(async () => {
|
||||
const name = newProjectName.trim();
|
||||
const url = newProjectUrl.trim();
|
||||
if (!name || !url) return;
|
||||
setAddingProject(true);
|
||||
setError(null);
|
||||
try {
|
||||
const created = await gatewayApi.addProject(name, url);
|
||||
setProjects((prev) => [...prev, created]);
|
||||
setNewProjectName("");
|
||||
setNewProjectUrl("");
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : String(e));
|
||||
} finally {
|
||||
setAddingProject(false);
|
||||
}
|
||||
}, [newProjectName, newProjectUrl]);
|
||||
|
||||
const handleSwitchProject = useCallback(async (name: string) => {
|
||||
setError(null);
|
||||
try {
|
||||
const result = await gatewayApi.switchProject(name);
|
||||
if (!result.ok) {
|
||||
setError(result.error ?? "Failed to switch project");
|
||||
return;
|
||||
}
|
||||
// Refresh pipeline to reflect new active project.
|
||||
const updated = await gatewayApi.getAllProjectsPipeline();
|
||||
setPipeline(updated);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleRemoveProject = useCallback(async (name: string) => {
|
||||
if (!window.confirm(`Remove project "${name}"? This cannot be undone.`)) {
|
||||
return;
|
||||
}
|
||||
setError(null);
|
||||
try {
|
||||
await gatewayApi.removeProject(name);
|
||||
setProjects((prev) => prev.filter((p) => p.name !== name));
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
minHeight: "100vh",
|
||||
background: "#0d1117",
|
||||
color: "#e6edf3",
|
||||
padding: "32px",
|
||||
fontFamily: "-apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif",
|
||||
}}
|
||||
>
|
||||
<div style={{ maxWidth: "720px", margin: "0 auto" }}>
|
||||
<h1 style={{ fontSize: "1.5em", fontWeight: 700, marginBottom: "4px" }}>
|
||||
Huskies Gateway
|
||||
</h1>
|
||||
<p style={{ color: "#8b949e", marginBottom: "32px" }}>
|
||||
Manage build agents connected to this gateway.
|
||||
</p>
|
||||
|
||||
{/* Cross-project pipeline status */}
|
||||
<section style={{ marginBottom: "32px" }}>
|
||||
<h2
|
||||
style={{
|
||||
fontSize: "1.1em",
|
||||
fontWeight: 600,
|
||||
marginBottom: "12px",
|
||||
borderBottom: "1px solid #21262d",
|
||||
paddingBottom: "8px",
|
||||
}}
|
||||
>
|
||||
Pipeline Status
|
||||
</h2>
|
||||
{pipeline ? (
|
||||
Object.entries(pipeline.projects).map(([name, status]) => (
|
||||
<ProjectPipelineCard
|
||||
key={name}
|
||||
name={name}
|
||||
pipeline={status}
|
||||
isActive={name === pipeline.active}
|
||||
onSwitch={handleSwitchProject}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
<p style={{ color: "#6e7681" }}>Loading pipeline status…</p>
|
||||
)}
|
||||
</section>
|
||||
|
||||
{/* Add Agent */}
|
||||
<section style={{ marginBottom: "32px" }}>
|
||||
<h2
|
||||
style={{
|
||||
fontSize: "1.1em",
|
||||
fontWeight: 600,
|
||||
marginBottom: "12px",
|
||||
borderBottom: "1px solid #21262d",
|
||||
paddingBottom: "8px",
|
||||
}}
|
||||
>
|
||||
Add Agent
|
||||
</h2>
|
||||
<button
|
||||
type="button"
|
||||
data-testid="add-agent-button"
|
||||
onClick={handleAddAgent}
|
||||
disabled={generating}
|
||||
style={{
|
||||
padding: "8px 18px",
|
||||
borderRadius: "6px",
|
||||
border: "1px solid #238636",
|
||||
background: generating ? "#1a2f1a" : "#238636",
|
||||
color: "#fff",
|
||||
cursor: generating ? "not-allowed" : "pointer",
|
||||
fontWeight: 600,
|
||||
fontSize: "0.9em",
|
||||
}}
|
||||
>
|
||||
{generating ? "Generating…" : "Add Agent"}
|
||||
</button>
|
||||
{token && <TokenDisplay token={token} />}
|
||||
</section>
|
||||
|
||||
{/* Agent list */}
|
||||
<section>
|
||||
<h2
|
||||
style={{
|
||||
fontSize: "1.1em",
|
||||
fontWeight: 600,
|
||||
marginBottom: "12px",
|
||||
borderBottom: "1px solid #21262d",
|
||||
paddingBottom: "8px",
|
||||
}}
|
||||
>
|
||||
Connected Agents{" "}
|
||||
{agents.length > 0 && (
|
||||
<span
|
||||
style={{
|
||||
fontSize: "0.8em",
|
||||
color: "#8b949e",
|
||||
fontWeight: 400,
|
||||
}}
|
||||
>
|
||||
({agents.length})
|
||||
</span>
|
||||
)}
|
||||
</h2>
|
||||
{agents.length === 0 ? (
|
||||
<p style={{ color: "#6e7681" }}>
|
||||
No agents connected yet. Click "Add Agent" to generate a join
|
||||
token.
|
||||
</p>
|
||||
) : (
|
||||
<div>
|
||||
{agents.map((agent) => (
|
||||
<AgentRow
|
||||
key={agent.id}
|
||||
agent={agent}
|
||||
projects={projects}
|
||||
onRemove={handleRemoveAgent}
|
||||
onAssign={handleAssignAgent}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
|
||||
{/* Project management */}
|
||||
<section style={{ marginTop: "32px" }}>
|
||||
<h2
|
||||
style={{
|
||||
fontSize: "1.1em",
|
||||
fontWeight: 600,
|
||||
marginBottom: "12px",
|
||||
borderBottom: "1px solid #21262d",
|
||||
paddingBottom: "8px",
|
||||
}}
|
||||
>
|
||||
Projects{" "}
|
||||
{projects.length > 0 && (
|
||||
<span style={{ fontSize: "0.8em", color: "#8b949e", fontWeight: 400 }}>
|
||||
({projects.length})
|
||||
</span>
|
||||
)}
|
||||
</h2>
|
||||
|
||||
{/* Existing projects list */}
|
||||
{projects.map((p) => (
|
||||
<div
|
||||
key={p.name}
|
||||
data-testid={`project-row-${p.name}`}
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "12px",
|
||||
padding: "10px 14px",
|
||||
background: "#161b22",
|
||||
border: "1px solid #30363d",
|
||||
borderRadius: "8px",
|
||||
marginBottom: "8px",
|
||||
}}
|
||||
>
|
||||
<div style={{ flex: 1 }}>
|
||||
<div style={{ fontWeight: 600, color: "#e6edf3" }}>{p.name}</div>
|
||||
<div style={{ fontSize: "0.8em", color: "#8b949e" }}>{p.url}</div>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
data-testid={`remove-project-${p.name}`}
|
||||
onClick={() => handleRemoveProject(p.name)}
|
||||
style={{
|
||||
fontSize: "0.8em",
|
||||
padding: "4px 10px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #f85149",
|
||||
background: "none",
|
||||
color: "#f85149",
|
||||
cursor: "pointer",
|
||||
}}
|
||||
>
|
||||
Remove
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Add project form */}
|
||||
<div
|
||||
style={{
|
||||
marginTop: "12px",
|
||||
display: "flex",
|
||||
gap: "8px",
|
||||
alignItems: "flex-end",
|
||||
flexWrap: "wrap",
|
||||
}}
|
||||
>
|
||||
<div style={{ flex: "1 1 140px" }}>
|
||||
<div style={{ fontSize: "0.75em", color: "#8b949e", marginBottom: "4px" }}>
|
||||
Name
|
||||
</div>
|
||||
<input
|
||||
data-testid="new-project-name"
|
||||
type="text"
|
||||
placeholder="my-project"
|
||||
value={newProjectName}
|
||||
onChange={(e) => setNewProjectName(e.target.value)}
|
||||
style={{
|
||||
width: "100%",
|
||||
padding: "6px 10px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #30363d",
|
||||
background: "#0d1117",
|
||||
color: "#e6edf3",
|
||||
fontSize: "0.85em",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div style={{ flex: "2 1 200px" }}>
|
||||
<div style={{ fontSize: "0.75em", color: "#8b949e", marginBottom: "4px" }}>
|
||||
Container URL
|
||||
</div>
|
||||
<input
|
||||
data-testid="new-project-url"
|
||||
type="text"
|
||||
placeholder="http://localhost:3001"
|
||||
value={newProjectUrl}
|
||||
onChange={(e) => setNewProjectUrl(e.target.value)}
|
||||
style={{
|
||||
width: "100%",
|
||||
padding: "6px 10px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #30363d",
|
||||
background: "#0d1117",
|
||||
color: "#e6edf3",
|
||||
fontSize: "0.85em",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
data-testid="add-project-button"
|
||||
onClick={handleAddProject}
|
||||
disabled={addingProject || !newProjectName.trim() || !newProjectUrl.trim()}
|
||||
style={{
|
||||
padding: "6px 14px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #238636",
|
||||
background: addingProject ? "#1a2f1a" : "#238636",
|
||||
color: "#fff",
|
||||
cursor: addingProject ? "not-allowed" : "pointer",
|
||||
fontWeight: 600,
|
||||
fontSize: "0.85em",
|
||||
whiteSpace: "nowrap",
|
||||
}}
|
||||
>
|
||||
{addingProject ? "Adding…" : "Add Project"}
|
||||
</button>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{error && (
|
||||
<div
|
||||
style={{
|
||||
marginTop: "16px",
|
||||
padding: "10px 14px",
|
||||
background: "#f8514911",
|
||||
border: "1px solid #f85149",
|
||||
borderRadius: "6px",
|
||||
color: "#f85149",
|
||||
fontSize: "0.875em",
|
||||
}}
|
||||
>
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -10,7 +10,8 @@ export const SLASH_COMMANDS: SlashCommand[] = [
|
||||
},
|
||||
{
|
||||
name: "/backlog",
|
||||
description: "Show all items in the backlog with dependency satisfaction status.",
|
||||
description:
|
||||
"Show all items in the backlog with dependency satisfaction status.",
|
||||
},
|
||||
{
|
||||
name: "/status",
|
||||
|
||||
@@ -60,6 +60,7 @@ export default defineConfig(() => {
|
||||
build: {
|
||||
outDir: "dist",
|
||||
emptyOutDir: true,
|
||||
chunkSizeWarningLimit: 1100,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
+1
-1
@@ -6,7 +6,7 @@ PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
echo "=== Checking Rust formatting ==="
|
||||
if cargo fmt --version &>/dev/null; then
|
||||
cargo fmt --manifest-path "$PROJECT_ROOT/Cargo.toml" --check
|
||||
cargo fmt --manifest-path "$PROJECT_ROOT/Cargo.toml" --all --check
|
||||
else
|
||||
echo "Skipping Rust formatting check (rustfmt not installed)"
|
||||
fi
|
||||
|
||||
+1
-1
@@ -16,7 +16,7 @@ fi
|
||||
|
||||
echo "=== Checking Rust formatting ==="
|
||||
if cargo fmt --version &>/dev/null; then
|
||||
cargo fmt --manifest-path "$PROJECT_ROOT/Cargo.toml" --check
|
||||
cargo fmt --manifest-path "$PROJECT_ROOT/Cargo.toml" --all --check
|
||||
else
|
||||
echo "Skipping Rust formatting check (rustfmt not installed)"
|
||||
fi
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "huskies"
|
||||
version = "0.10.0"
|
||||
version = "0.10.2"
|
||||
edition = "2024"
|
||||
build = "build.rs"
|
||||
|
||||
|
||||
@@ -1,858 +0,0 @@
|
||||
//! Pipeline state machine — design sketch (story 520) — BARE version.
|
||||
//!
|
||||
//! This is a SCRATCH EXPERIMENT, not wired into anything else in the codebase.
|
||||
//! "Bare" version: hand-rolled with plain Rust enums and pattern matching,
|
||||
//! no external state-machine library. See `pipeline_state_sketch_statig.rs`
|
||||
//! for a parallel version using the `statig` crate.
|
||||
//!
|
||||
//! Run with:
|
||||
//! cargo run --example pipeline_state_sketch_bare -p huskies
|
||||
//! Test with:
|
||||
//! cargo test --example pipeline_state_sketch_bare -p huskies
|
||||
//!
|
||||
//! Goal: demonstrate the typed pipeline state machine that should replace
|
||||
//! huskies's stringly-typed CRDT state. It is intentionally standalone —
|
||||
//! no integration with crdt_state, no persistence, no events escape this
|
||||
//! file. Once we agree on the shape, this becomes the foundation for the
|
||||
//! real implementation in src/pipeline_state.rs.
|
||||
//!
|
||||
//! The point of this version is to show that the Rust type system alone is
|
||||
//! enough to make impossible states unrepresentable, without needing any
|
||||
//! state-machine framework.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
// ── Newtypes ─────────────────────────────────────────────────────────────────
|
||||
//
|
||||
// Each of these is a "wrapper around String" today, but the wrapping itself
|
||||
// is the point: a function that takes a `BranchName` cannot accidentally be
|
||||
// called with a `StoryId`. Validation can be added later (e.g. `BranchName::new`
|
||||
// returns `Result<Self, BranchNameError>` and the inner `String` is private)
|
||||
// without changing call sites.
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StoryId(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct BranchName(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct AgentName(pub String);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct NodePubkey(pub [u8; 32]);
|
||||
|
||||
// ── Synced pipeline stage (lives in CRDT, converges across nodes) ────────────
|
||||
//
|
||||
// This is the SHARED state — every node sees the same Stage for a given story
|
||||
// after CRDT convergence. Local-only state (which agent is running, retry
|
||||
// count, rate-limit timers) lives separately in `ExecutionState` below, keyed
|
||||
// by node pubkey.
|
||||
//
|
||||
// Notice what is NOT a field on Stage:
|
||||
// - `agent` — that's local execution state, not pipeline state
|
||||
// - `retry_count` — also local
|
||||
// - `blocked` — folded into `Archived { reason: Blocked { .. } }`
|
||||
//
|
||||
// And notice what IS a field, by construction:
|
||||
// - Stage::Merge requires a non-zero commits_ahead (silent no-op merge is unrepresentable)
|
||||
// - Stage::Done requires a merge_commit (a "done" story without merge metadata is unrepresentable)
|
||||
// - Stage::Archived always carries a reason (no "archived but we don't know why")
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Stage {
|
||||
/// Story exists, waiting for dependencies or auto-assign promotion.
|
||||
Backlog,
|
||||
|
||||
/// Story is being actively coded somewhere in the mesh.
|
||||
/// (Which node is local — see ExecutionState.)
|
||||
Coding,
|
||||
|
||||
/// Coder has run; gates are running.
|
||||
Qa,
|
||||
|
||||
/// Gates passed (or were skipped); ready to merge.
|
||||
/// `commits_ahead: NonZeroU32` makes "Merge with nothing to merge" structurally impossible.
|
||||
/// This single field eliminates today's bug 519 (silent mergemaster no-op).
|
||||
Merge {
|
||||
feature_branch: BranchName,
|
||||
commits_ahead: NonZeroU32,
|
||||
},
|
||||
|
||||
/// Mergemaster squashed to master. Always carries the merge metadata,
|
||||
/// so a "done" story is provably reachable from master.
|
||||
Done {
|
||||
merged_at: DateTime<Utc>,
|
||||
merge_commit: GitSha,
|
||||
},
|
||||
|
||||
/// Out of the active flow. The reason explains why.
|
||||
Archived {
|
||||
archived_at: DateTime<Utc>,
|
||||
reason: ArchiveReason,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ArchiveReason {
|
||||
/// Normal happy-path completion: accepted and filed away.
|
||||
Completed,
|
||||
/// User explicitly abandoned the story.
|
||||
Abandoned,
|
||||
/// Replaced by another story.
|
||||
Superseded { by: StoryId },
|
||||
/// Manually blocked, awaiting human resolution. Was bug-436's `blocked: true`.
|
||||
Blocked { reason: String },
|
||||
/// Mergemaster failed beyond the retry budget. Was bug-436's `merge_failure`.
|
||||
MergeFailed { reason: String },
|
||||
/// Held in review at human request. Was bug-436's `review_hold`.
|
||||
ReviewHeld { reason: String },
|
||||
}
|
||||
|
||||
// ── Per-node execution state (lives in CRDT under node_pubkey key) ───────────
|
||||
//
|
||||
// LOCAL-AUTHORED but GLOBALLY-READABLE. Each node only writes to entries where
|
||||
// node_pubkey == self, so there are no inter-author CRDT merge conflicts. Other
|
||||
// nodes can READ all entries to know what's happening across the mesh.
|
||||
//
|
||||
// In the real CRDT document, this would be stored as something like:
|
||||
// crdt.execution_state: { node_pubkey -> { story_id -> ExecutionState } }
|
||||
//
|
||||
// Why this matters operationally:
|
||||
// - Cross-node observability: matrix bot can show "node A is running coder-1
|
||||
// on story X, node B is rate-limited on story Y"
|
||||
// - Heartbeat detection: if `last_heartbeat` is stale > N min, the entry is
|
||||
// dead (laptop closed, OOM, segfault). Other nodes can take over (story 479).
|
||||
// - Foundation for CRDT-based work claiming (story 479).
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ExecutionState {
|
||||
/// No agent on this node is currently working on this story.
|
||||
Idle,
|
||||
|
||||
/// An agent has been requested but hasn't started its subprocess yet.
|
||||
Pending {
|
||||
agent: AgentName,
|
||||
since: DateTime<Utc>,
|
||||
},
|
||||
|
||||
/// An agent's subprocess is alive on this node.
|
||||
/// `last_heartbeat` is updated periodically; if stale, the process probably died.
|
||||
Running {
|
||||
agent: AgentName,
|
||||
started_at: DateTime<Utc>,
|
||||
last_heartbeat: DateTime<Utc>,
|
||||
},
|
||||
|
||||
/// Agent hit a rate limit; will resume at the given time.
|
||||
RateLimited {
|
||||
agent: AgentName,
|
||||
resume_at: DateTime<Utc>,
|
||||
},
|
||||
|
||||
/// Agent finished. exit_code disambiguates clean exit / panic / etc.
|
||||
Completed {
|
||||
agent: AgentName,
|
||||
exit_code: i32,
|
||||
completed_at: DateTime<Utc>,
|
||||
},
|
||||
}
|
||||
|
||||
// ── Pipeline events ──────────────────────────────────────────────────────────
|
||||
//
|
||||
// Events drive Stage transitions. Each event carries any data needed to
|
||||
// construct the destination state, so the type signature of `transition`
|
||||
// guarantees we can never accidentally land in an underspecified state.
|
||||
//
|
||||
// (Compare with today's stringly-typed code, where you call
|
||||
// `move_story_to_merge(story_id)` and the destination state is built from
|
||||
// whatever happens to be in scope at the time.)
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum PipelineEvent {
|
||||
/// All depends_on stories are in Done or Archived; promotion fires.
|
||||
DepsMet,
|
||||
|
||||
/// Coder is going to start running gates.
|
||||
GatesStarted,
|
||||
|
||||
/// Gates passed normally — ready to merge. Carries the data needed to
|
||||
/// construct Stage::Merge, so the transition can't produce a malformed merge state.
|
||||
GatesPassed {
|
||||
feature_branch: BranchName,
|
||||
commits_ahead: NonZeroU32,
|
||||
},
|
||||
|
||||
/// Gates failed; coder will retry.
|
||||
GatesFailed { reason: String },
|
||||
|
||||
/// QA mode is "server" — skip QA and go straight to merge.
|
||||
QaSkipped {
|
||||
feature_branch: BranchName,
|
||||
commits_ahead: NonZeroU32,
|
||||
},
|
||||
|
||||
/// Mergemaster successfully squashed and pushed to master.
|
||||
MergeSucceeded { merge_commit: GitSha },
|
||||
|
||||
/// Mergemaster gave up after the retry budget.
|
||||
MergeFailedFinal { reason: String },
|
||||
|
||||
/// User accepted a Done story (or auto-accept fired).
|
||||
Accepted,
|
||||
|
||||
/// User explicitly blocked the story.
|
||||
Block { reason: String },
|
||||
|
||||
/// User explicitly unblocked.
|
||||
Unblock,
|
||||
|
||||
/// User explicitly abandoned.
|
||||
Abandon,
|
||||
|
||||
/// User marked the story as superseded by another.
|
||||
Supersede { by: StoryId },
|
||||
|
||||
/// User put the story on review hold.
|
||||
ReviewHold { reason: String },
|
||||
}
|
||||
|
||||
// ── Transition errors ────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TransitionError {
|
||||
/// The current stage doesn't accept this event.
|
||||
InvalidTransition {
|
||||
from_stage: String,
|
||||
event: String,
|
||||
},
|
||||
}
|
||||
|
||||
// ── The transition function ──────────────────────────────────────────────────
|
||||
//
|
||||
// Pure function. Takes the current Stage and an event, returns the new Stage
|
||||
// or a TransitionError. The compiler enforces that every constructed Stage
|
||||
// has all required fields, so impossible destination states are unrepresentable.
|
||||
//
|
||||
// "What about the *side effects* of a transition?" — they don't go in here.
|
||||
// transition() is pure. Side effects (matrix bot notifications, file writes,
|
||||
// agent spawns, web UI broadcasts) are dispatched by an event bus that watches
|
||||
// the (before, after) tuple. See the `EventBus` sketch further down.
|
||||
|
||||
pub fn transition(state: Stage, event: PipelineEvent) -> Result<Stage, TransitionError> {
|
||||
use PipelineEvent::*;
|
||||
use Stage::*;
|
||||
|
||||
let stage_label = stage_label(&state);
|
||||
let event_label = event_label(&event);
|
||||
let invalid = || TransitionError::InvalidTransition {
|
||||
from_stage: stage_label.to_string(),
|
||||
event: event_label.to_string(),
|
||||
};
|
||||
|
||||
let now = Utc::now();
|
||||
|
||||
match (state, event) {
|
||||
// ── Forward path: backlog → current → (qa →) merge → done ──────────
|
||||
(Backlog, DepsMet) => Ok(Coding),
|
||||
(Coding, GatesStarted) => Ok(Qa),
|
||||
(Coding, QaSkipped { feature_branch, commits_ahead }) => Ok(Merge {
|
||||
feature_branch,
|
||||
commits_ahead,
|
||||
}),
|
||||
(Qa, GatesPassed { feature_branch, commits_ahead }) => Ok(Merge {
|
||||
feature_branch,
|
||||
commits_ahead,
|
||||
}),
|
||||
// Gates failed → back to Coding for retry. (Retry-budget enforcement
|
||||
// lives outside this function — it's accounting on the local side.)
|
||||
(Qa, GatesFailed { .. }) => Ok(Coding),
|
||||
(Merge { .. }, MergeSucceeded { merge_commit }) => Ok(Done {
|
||||
merged_at: now,
|
||||
merge_commit,
|
||||
}),
|
||||
|
||||
// ── Done → Archived(Completed) ─────────────────────────────────────
|
||||
(Done { .. }, Accepted) => Ok(Archived {
|
||||
archived_at: now,
|
||||
reason: ArchiveReason::Completed,
|
||||
}),
|
||||
|
||||
// ── Stuck states (any active stage → Archived with a reason) ──────
|
||||
(Backlog, Block { reason })
|
||||
| (Coding, Block { reason })
|
||||
| (Qa, Block { reason })
|
||||
| (Merge { .. }, Block { reason }) => Ok(Archived {
|
||||
archived_at: now,
|
||||
reason: ArchiveReason::Blocked { reason },
|
||||
}),
|
||||
|
||||
(Backlog, ReviewHold { reason })
|
||||
| (Coding, ReviewHold { reason })
|
||||
| (Qa, ReviewHold { reason })
|
||||
| (Merge { .. }, ReviewHold { reason }) => Ok(Archived {
|
||||
archived_at: now,
|
||||
reason: ArchiveReason::ReviewHeld { reason },
|
||||
}),
|
||||
|
||||
(Merge { .. }, MergeFailedFinal { reason }) => Ok(Archived {
|
||||
archived_at: now,
|
||||
reason: ArchiveReason::MergeFailed { reason },
|
||||
}),
|
||||
|
||||
// ── Abandon / supersede from any active or done stage ──────────────
|
||||
(Backlog, Abandon)
|
||||
| (Coding, Abandon)
|
||||
| (Qa, Abandon)
|
||||
| (Merge { .. }, Abandon)
|
||||
| (Done { .. }, Abandon) => Ok(Archived {
|
||||
archived_at: now,
|
||||
reason: ArchiveReason::Abandoned,
|
||||
}),
|
||||
|
||||
(Backlog, Supersede { by })
|
||||
| (Coding, Supersede { by })
|
||||
| (Qa, Supersede { by })
|
||||
| (Merge { .. }, Supersede { by })
|
||||
| (Done { .. }, Supersede { by }) => Ok(Archived {
|
||||
archived_at: now,
|
||||
reason: ArchiveReason::Superseded { by },
|
||||
}),
|
||||
|
||||
// ── Unblock: only from Archived(Blocked) → Backlog ─────────────────
|
||||
(
|
||||
Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
},
|
||||
Unblock,
|
||||
) => Ok(Backlog),
|
||||
|
||||
// ── Everything else is invalid ─────────────────────────────────────
|
||||
_ => Err(invalid()),
|
||||
}
|
||||
}
|
||||
|
||||
fn stage_label(s: &Stage) -> &'static str {
|
||||
match s {
|
||||
Stage::Backlog => "Backlog",
|
||||
Stage::Coding => "Coding",
|
||||
Stage::Qa => "Qa",
|
||||
Stage::Merge { .. } => "Merge",
|
||||
Stage::Done { .. } => "Done",
|
||||
Stage::Archived { .. } => "Archived",
|
||||
}
|
||||
}
|
||||
|
||||
fn event_label(e: &PipelineEvent) -> &'static str {
|
||||
match e {
|
||||
PipelineEvent::DepsMet => "DepsMet",
|
||||
PipelineEvent::GatesStarted => "GatesStarted",
|
||||
PipelineEvent::GatesPassed { .. } => "GatesPassed",
|
||||
PipelineEvent::GatesFailed { .. } => "GatesFailed",
|
||||
PipelineEvent::QaSkipped { .. } => "QaSkipped",
|
||||
PipelineEvent::MergeSucceeded { .. } => "MergeSucceeded",
|
||||
PipelineEvent::MergeFailedFinal { .. } => "MergeFailedFinal",
|
||||
PipelineEvent::Accepted => "Accepted",
|
||||
PipelineEvent::Block { .. } => "Block",
|
||||
PipelineEvent::Unblock => "Unblock",
|
||||
PipelineEvent::Abandon => "Abandon",
|
||||
PipelineEvent::Supersede { .. } => "Supersede",
|
||||
PipelineEvent::ReviewHold { .. } => "ReviewHold",
|
||||
}
|
||||
}
|
||||
|
||||
// ── Per-node execution state machine ─────────────────────────────────────────
|
||||
//
|
||||
// Independent of the pipeline stage machine. Tracks "what is THIS node doing
|
||||
// about this story right now." Multiple nodes can have different ExecutionState
|
||||
// for the same story_id at the same time — and that's fine, because each node
|
||||
// owns its own subspace in the CRDT.
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExecutionEvent {
|
||||
SpawnRequested { agent: AgentName },
|
||||
SpawnedSuccessfully,
|
||||
Heartbeat,
|
||||
HitRateLimit { resume_at: DateTime<Utc> },
|
||||
Exited { exit_code: i32 },
|
||||
Stopped,
|
||||
Reset,
|
||||
}
|
||||
|
||||
pub fn execution_transition(
|
||||
state: ExecutionState,
|
||||
event: ExecutionEvent,
|
||||
) -> Result<ExecutionState, TransitionError> {
|
||||
use ExecutionEvent::*;
|
||||
use ExecutionState::*;
|
||||
|
||||
let now = Utc::now();
|
||||
|
||||
match (state, event) {
|
||||
(Idle, SpawnRequested { agent }) => Ok(Pending { agent, since: now }),
|
||||
|
||||
(Pending { agent, .. }, SpawnedSuccessfully) => Ok(Running {
|
||||
agent,
|
||||
started_at: now,
|
||||
last_heartbeat: now,
|
||||
}),
|
||||
|
||||
(
|
||||
Running {
|
||||
agent, started_at, ..
|
||||
},
|
||||
Heartbeat,
|
||||
) => Ok(Running {
|
||||
agent,
|
||||
started_at,
|
||||
last_heartbeat: now,
|
||||
}),
|
||||
|
||||
(Running { agent, .. }, HitRateLimit { resume_at })
|
||||
| (Pending { agent, .. }, HitRateLimit { resume_at }) => Ok(RateLimited { agent, resume_at }),
|
||||
|
||||
(RateLimited { agent, .. }, SpawnedSuccessfully) => Ok(Running {
|
||||
agent,
|
||||
started_at: now,
|
||||
last_heartbeat: now,
|
||||
}),
|
||||
|
||||
(Running { agent, .. }, Exited { exit_code })
|
||||
| (Pending { agent, .. }, Exited { exit_code })
|
||||
| (RateLimited { agent, .. }, Exited { exit_code }) => Ok(Completed {
|
||||
agent,
|
||||
exit_code,
|
||||
completed_at: now,
|
||||
}),
|
||||
|
||||
// Stop and Reset always return to Idle, from anywhere.
|
||||
(_, Stopped) | (_, Reset) => Ok(Idle),
|
||||
|
||||
_ => Err(TransitionError::InvalidTransition {
|
||||
from_stage: "ExecutionState".to_string(),
|
||||
event: "<exec event>".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Event bus sketch ─────────────────────────────────────────────────────────
|
||||
//
|
||||
// This is intentionally tiny — the goal is to show that the side-effect dispatch
|
||||
// is *separable* from the transition function. Real implementation would use
|
||||
// tokio broadcast channels or a proper event bus, but the pattern is the same.
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TransitionFired {
|
||||
pub story_id: StoryId,
|
||||
pub before: Stage,
|
||||
pub after: Stage,
|
||||
pub event: PipelineEvent,
|
||||
pub at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
pub trait TransitionSubscriber: Send + Sync {
|
||||
fn name(&self) -> &'static str;
|
||||
fn on_transition(&self, fired: &TransitionFired);
|
||||
}
|
||||
|
||||
pub struct EventBus {
|
||||
subscribers: Vec<Box<dyn TransitionSubscriber>>,
|
||||
}
|
||||
|
||||
impl EventBus {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
subscribers: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn subscribe<S: TransitionSubscriber + 'static>(&mut self, subscriber: S) {
|
||||
self.subscribers.push(Box::new(subscriber));
|
||||
}
|
||||
|
||||
pub fn fire(&self, event: TransitionFired) {
|
||||
for sub in &self.subscribers {
|
||||
sub.on_transition(&event);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for EventBus {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// Example subscribers (just println! for the sketch):
|
||||
|
||||
pub struct MatrixBotSub;
|
||||
impl TransitionSubscriber for MatrixBotSub {
|
||||
fn name(&self) -> &'static str {
|
||||
"matrix-bot"
|
||||
}
|
||||
fn on_transition(&self, f: &TransitionFired) {
|
||||
println!(
|
||||
" [matrix-bot] #{}: {} → {}",
|
||||
f.story_id.0,
|
||||
stage_label(&f.before),
|
||||
stage_label(&f.after)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FileRendererSub;
|
||||
impl TransitionSubscriber for FileRendererSub {
|
||||
fn name(&self) -> &'static str {
|
||||
"filesystem"
|
||||
}
|
||||
fn on_transition(&self, f: &TransitionFired) {
|
||||
println!(
|
||||
" [filesystem] re-rendering .huskies/work/{}/{}.md",
|
||||
stage_dir_name(&f.after),
|
||||
f.story_id.0
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PipelineItemsSub;
|
||||
impl TransitionSubscriber for PipelineItemsSub {
|
||||
fn name(&self) -> &'static str {
|
||||
"pipeline-items"
|
||||
}
|
||||
fn on_transition(&self, f: &TransitionFired) {
|
||||
println!(
|
||||
" [pipeline-items] UPDATE pipeline_items SET stage = '{}' WHERE id = '{}'",
|
||||
stage_dir_name(&f.after),
|
||||
f.story_id.0
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn stage_dir_name(s: &Stage) -> &'static str {
|
||||
match s {
|
||||
Stage::Backlog => "1_backlog",
|
||||
Stage::Coding => "2_current",
|
||||
Stage::Qa => "3_qa",
|
||||
Stage::Merge { .. } => "4_merge",
|
||||
Stage::Done { .. } => "5_done",
|
||||
Stage::Archived { .. } => "6_archived",
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn nz(n: u32) -> NonZeroU32 {
|
||||
NonZeroU32::new(n).unwrap()
|
||||
}
|
||||
fn fb(name: &str) -> BranchName {
|
||||
BranchName(name.to_string())
|
||||
}
|
||||
fn sha(s: &str) -> GitSha {
|
||||
GitSha(s.to_string())
|
||||
}
|
||||
|
||||
// ── Happy path ─────────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn happy_path_backlog_through_done() {
|
||||
let s = Stage::Backlog;
|
||||
let s = transition(s, PipelineEvent::DepsMet).unwrap();
|
||||
assert!(matches!(s, Stage::Coding));
|
||||
|
||||
let s = transition(
|
||||
s,
|
||||
PipelineEvent::QaSkipped {
|
||||
feature_branch: fb("feature/story-1"),
|
||||
commits_ahead: nz(3),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(s, Stage::Merge { .. }));
|
||||
|
||||
let s = transition(
|
||||
s,
|
||||
PipelineEvent::MergeSucceeded {
|
||||
merge_commit: sha("abc123"),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(s, Stage::Done { .. }));
|
||||
|
||||
let s = transition(s, PipelineEvent::Accepted).unwrap();
|
||||
assert!(matches!(
|
||||
s,
|
||||
Stage::Archived {
|
||||
reason: ArchiveReason::Completed,
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn qa_retry_loop() {
|
||||
let s = Stage::Coding;
|
||||
let s = transition(s, PipelineEvent::GatesStarted).unwrap();
|
||||
assert!(matches!(s, Stage::Qa));
|
||||
|
||||
let s = transition(
|
||||
s,
|
||||
PipelineEvent::GatesFailed {
|
||||
reason: "tests failed".into(),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(s, Stage::Coding));
|
||||
}
|
||||
|
||||
// ── Bug 519 made unrepresentable: Merge with zero commits ahead ────────
|
||||
|
||||
#[test]
|
||||
fn merge_with_zero_commits_is_unrepresentable() {
|
||||
// NonZeroU32::new(0) returns None — the type system literally refuses
|
||||
// to construct a Merge state with no commits ahead of master. This is
|
||||
// bug 519's "silent mergemaster no-op" gone, structurally.
|
||||
assert!(NonZeroU32::new(0).is_none());
|
||||
}
|
||||
|
||||
// ── Bug 502 made unrepresentable: agent on the wrong stage ─────────────
|
||||
//
|
||||
// There's nothing to test here at the *Stage* level, because Stage doesn't
|
||||
// have an `agent` field at all. Agent assignment is per-node ExecutionState.
|
||||
// The "coder agent on a Merge stage" failure mode from bug 502 cannot be
|
||||
// expressed in this type system: a coder can attach to a story (writing to
|
||||
// its node-local ExecutionState), but the Stage::Merge variant has no slot
|
||||
// for an agent. The "wrong-stage agent" error is gone because the wrong
|
||||
// state is unrepresentable.
|
||||
|
||||
// ── Invalid transitions return errors ──────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn cannot_jump_from_backlog_to_done() {
|
||||
let s = Stage::Backlog;
|
||||
let result = transition(s, PipelineEvent::Accepted);
|
||||
assert!(matches!(
|
||||
result,
|
||||
Err(TransitionError::InvalidTransition { .. })
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cannot_unblock_a_done_story() {
|
||||
let s = Stage::Done {
|
||||
merged_at: Utc::now(),
|
||||
merge_commit: sha("abc"),
|
||||
};
|
||||
let result = transition(s, PipelineEvent::Unblock);
|
||||
assert!(matches!(
|
||||
result,
|
||||
Err(TransitionError::InvalidTransition { .. })
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cannot_unblock_a_review_held_story() {
|
||||
// Unblock is specifically for Blocked, not for any Archived variant.
|
||||
let s = Stage::Archived {
|
||||
archived_at: Utc::now(),
|
||||
reason: ArchiveReason::ReviewHeld {
|
||||
reason: "TBD".into(),
|
||||
},
|
||||
};
|
||||
let result = transition(s, PipelineEvent::Unblock);
|
||||
assert!(matches!(
|
||||
result,
|
||||
Err(TransitionError::InvalidTransition { .. })
|
||||
));
|
||||
}
|
||||
|
||||
// ── Block from any active stage ────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn block_from_any_active_stage() {
|
||||
for s in [Stage::Backlog, Stage::Coding, Stage::Qa] {
|
||||
let result = transition(
|
||||
s.clone(),
|
||||
PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
},
|
||||
);
|
||||
assert!(matches!(
|
||||
result,
|
||||
Ok(Stage::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
})
|
||||
));
|
||||
}
|
||||
|
||||
// Also from Merge:
|
||||
let m = Stage::Merge {
|
||||
feature_branch: fb("f"),
|
||||
commits_ahead: nz(1),
|
||||
};
|
||||
let result = transition(
|
||||
m,
|
||||
PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
},
|
||||
);
|
||||
assert!(matches!(
|
||||
result,
|
||||
Ok(Stage::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
})
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unblock_returns_to_backlog() {
|
||||
let s = Stage::Archived {
|
||||
archived_at: Utc::now(),
|
||||
reason: ArchiveReason::Blocked {
|
||||
reason: "test".into(),
|
||||
},
|
||||
};
|
||||
let result = transition(s, PipelineEvent::Unblock).unwrap();
|
||||
assert!(matches!(result, Stage::Backlog));
|
||||
}
|
||||
|
||||
// ── Execution state ────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn execution_happy_path() {
|
||||
let e = ExecutionState::Idle;
|
||||
let e = execution_transition(
|
||||
e,
|
||||
ExecutionEvent::SpawnRequested {
|
||||
agent: AgentName("coder-1".into()),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(e, ExecutionState::Pending { .. }));
|
||||
|
||||
let e = execution_transition(e, ExecutionEvent::SpawnedSuccessfully).unwrap();
|
||||
assert!(matches!(e, ExecutionState::Running { .. }));
|
||||
|
||||
let e = execution_transition(e, ExecutionEvent::Heartbeat).unwrap();
|
||||
assert!(matches!(e, ExecutionState::Running { .. }));
|
||||
|
||||
let e = execution_transition(e, ExecutionEvent::Exited { exit_code: 0 }).unwrap();
|
||||
assert!(matches!(
|
||||
e,
|
||||
ExecutionState::Completed { exit_code: 0, .. }
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn execution_rate_limit_then_resume() {
|
||||
let e = ExecutionState::Running {
|
||||
agent: AgentName("coder-1".into()),
|
||||
started_at: Utc::now(),
|
||||
last_heartbeat: Utc::now(),
|
||||
};
|
||||
let e = execution_transition(
|
||||
e,
|
||||
ExecutionEvent::HitRateLimit {
|
||||
resume_at: Utc::now() + chrono::Duration::minutes(5),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(e, ExecutionState::RateLimited { .. }));
|
||||
|
||||
let e = execution_transition(e, ExecutionEvent::SpawnedSuccessfully).unwrap();
|
||||
assert!(matches!(e, ExecutionState::Running { .. }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn execution_stop_from_anywhere_returns_idle() {
|
||||
let e = ExecutionState::Running {
|
||||
agent: AgentName("coder-1".into()),
|
||||
started_at: Utc::now(),
|
||||
last_heartbeat: Utc::now(),
|
||||
};
|
||||
let e = execution_transition(e, ExecutionEvent::Stopped).unwrap();
|
||||
assert!(matches!(e, ExecutionState::Idle));
|
||||
}
|
||||
}
|
||||
|
||||
// ── main: a quick interactive demo ───────────────────────────────────────────
|
||||
|
||||
fn main() {
|
||||
println!("─── Pipeline state machine sketch (story 520) ───\n");
|
||||
|
||||
// Set up the event bus with three subscribers — one for each side effect.
|
||||
let mut bus = EventBus::new();
|
||||
bus.subscribe(MatrixBotSub);
|
||||
bus.subscribe(PipelineItemsSub);
|
||||
bus.subscribe(FileRendererSub);
|
||||
|
||||
let story_id = StoryId("100_story_demo".into());
|
||||
|
||||
// Helper to apply a transition + fire the bus.
|
||||
let mut current_stage = Stage::Backlog;
|
||||
let step = |bus: &EventBus,
|
||||
stage: &mut Stage,
|
||||
event: PipelineEvent|
|
||||
-> Result<(), TransitionError> {
|
||||
let before = stage.clone();
|
||||
let after = transition(stage.clone(), event.clone())?;
|
||||
bus.fire(TransitionFired {
|
||||
story_id: story_id.clone(),
|
||||
before,
|
||||
after: after.clone(),
|
||||
event,
|
||||
at: Utc::now(),
|
||||
});
|
||||
*stage = after;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
println!("Initial: {current_stage:?}\n");
|
||||
|
||||
println!("→ DepsMet");
|
||||
step(&bus, &mut current_stage, PipelineEvent::DepsMet).unwrap();
|
||||
println!();
|
||||
|
||||
println!("→ QaSkipped (qa: server, gates auto-pass)");
|
||||
step(
|
||||
&bus,
|
||||
&mut current_stage,
|
||||
PipelineEvent::QaSkipped {
|
||||
feature_branch: BranchName("feature/story-100".into()),
|
||||
commits_ahead: NonZeroU32::new(3).unwrap(),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
println!();
|
||||
|
||||
println!("→ MergeSucceeded");
|
||||
step(
|
||||
&bus,
|
||||
&mut current_stage,
|
||||
PipelineEvent::MergeSucceeded {
|
||||
merge_commit: GitSha("abc1234".into()),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
println!();
|
||||
|
||||
println!("→ Accepted");
|
||||
step(&bus, &mut current_stage, PipelineEvent::Accepted).unwrap();
|
||||
println!();
|
||||
|
||||
println!("Final: {current_stage:?}\n");
|
||||
|
||||
println!("─── Trying an invalid transition: Done → Unblock ───");
|
||||
let invalid_result = transition(current_stage.clone(), PipelineEvent::Unblock);
|
||||
println!("Result: {invalid_result:?}");
|
||||
}
|
||||
@@ -1,785 +0,0 @@
|
||||
//! Pipeline state machine — design sketch (story 520) — STATIG version.
|
||||
//!
|
||||
//! Parallel to `pipeline_state_sketch_bare.rs`. Same domain types, same
|
||||
//! transitions, same event semantics — but the state machine is built using
|
||||
//! the `statig` crate (https://crates.io/crates/statig) instead of being
|
||||
//! hand-rolled.
|
||||
//!
|
||||
//! Run with:
|
||||
//! cargo run --example pipeline_state_sketch_statig -p huskies
|
||||
//! Test with:
|
||||
//! cargo test --example pipeline_state_sketch_statig -p huskies
|
||||
//!
|
||||
//! Why both versions?
|
||||
//!
|
||||
//! - The **bare** version shows that plain Rust enums + a transition function
|
||||
//! are *enough* to make impossible states unrepresentable. No framework.
|
||||
//! - The **statig** version shows what we'd gain by adopting a state-machine
|
||||
//! crate: hierarchical states (the `active` superstate factors out the
|
||||
//! cross-cutting Block/ReviewHold/Abandon/Supersede transitions, which the
|
||||
//! bare version had to duplicate inline with `|` patterns), generated
|
||||
//! `State` enum with type-safe data-carrying constructors, and stateful
|
||||
//! `handle(&event)` dispatch. Type safety is preserved either way:
|
||||
//! `State::merge(BranchName, NonZeroU32)` requires both args at the
|
||||
//! constructor, just like `Stage::Merge { feature_branch, commits_ahead }`
|
||||
//! in the bare version.
|
||||
//!
|
||||
//! Trade-off: statig adds a dependency and a proc-macro layer, which makes
|
||||
//! the code harder to read for someone unfamiliar with the crate. The
|
||||
//! framework-free version is more transparent but requires manual
|
||||
//! pattern-matching and inline duplication for cross-cutting transitions.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use statig::prelude::*;
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
// ── Newtypes (same as bare version) ──────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StoryId(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct BranchName(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct AgentName(pub String);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct NodePubkey(pub [u8; 32]);
|
||||
|
||||
// ── Archive reason (same as bare version) ────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ArchiveReason {
|
||||
Completed,
|
||||
Abandoned,
|
||||
Superseded { by: StoryId },
|
||||
Blocked { reason: String },
|
||||
MergeFailed { reason: String },
|
||||
ReviewHeld { reason: String },
|
||||
}
|
||||
|
||||
// ── Pipeline events (same as bare version) ───────────────────────────────────
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum PipelineEvent {
|
||||
DepsMet,
|
||||
GatesStarted,
|
||||
GatesPassed {
|
||||
feature_branch: BranchName,
|
||||
commits_ahead: NonZeroU32,
|
||||
},
|
||||
GatesFailed {
|
||||
reason: String,
|
||||
},
|
||||
QaSkipped {
|
||||
feature_branch: BranchName,
|
||||
commits_ahead: NonZeroU32,
|
||||
},
|
||||
MergeSucceeded {
|
||||
merge_commit: GitSha,
|
||||
},
|
||||
MergeFailedFinal {
|
||||
reason: String,
|
||||
},
|
||||
Accepted,
|
||||
Block {
|
||||
reason: String,
|
||||
},
|
||||
Unblock,
|
||||
Abandon,
|
||||
Supersede {
|
||||
by: StoryId,
|
||||
},
|
||||
ReviewHold {
|
||||
reason: String,
|
||||
},
|
||||
}
|
||||
|
||||
// ── The state machine ────────────────────────────────────────────────────────
|
||||
//
|
||||
// statig requires a "context" struct (the `Self` of the impl block). For us
|
||||
// it's empty — all per-state data lives ON the state itself, carried forward
|
||||
// by the auto-generated `State::xxx(...)` constructors.
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct PipelineMachine;
|
||||
|
||||
#[state_machine(
|
||||
initial = "State::backlog()",
|
||||
state(derive(Debug, Clone, PartialEq, Eq))
|
||||
)]
|
||||
impl PipelineMachine {
|
||||
// ── Active stages: backlog, coding, qa, merge ────────────────────────
|
||||
//
|
||||
// Each is a child of the `active` superstate, which handles the
|
||||
// cross-cutting transitions (Block / ReviewHold / Abandon / Supersede)
|
||||
// exactly once instead of being duplicated per state.
|
||||
|
||||
#[state(superstate = "active")]
|
||||
fn backlog(event: &PipelineEvent) -> Response<State> {
|
||||
match event {
|
||||
PipelineEvent::DepsMet => Transition(State::coding()),
|
||||
_ => Super, // defer to `active` (and ultimately to "unhandled")
|
||||
}
|
||||
}
|
||||
|
||||
#[state(superstate = "active")]
|
||||
fn coding(event: &PipelineEvent) -> Response<State> {
|
||||
match event {
|
||||
PipelineEvent::GatesStarted => Transition(State::qa()),
|
||||
PipelineEvent::QaSkipped {
|
||||
feature_branch,
|
||||
commits_ahead,
|
||||
} => Transition(State::merge(feature_branch.clone(), *commits_ahead)),
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
#[state(superstate = "active")]
|
||||
fn qa(event: &PipelineEvent) -> Response<State> {
|
||||
match event {
|
||||
PipelineEvent::GatesPassed {
|
||||
feature_branch,
|
||||
commits_ahead,
|
||||
} => Transition(State::merge(feature_branch.clone(), *commits_ahead)),
|
||||
PipelineEvent::GatesFailed { .. } => Transition(State::coding()),
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
#[state(superstate = "active")]
|
||||
fn merge(
|
||||
_feature_branch: &mut BranchName,
|
||||
_commits_ahead: &mut NonZeroU32,
|
||||
event: &PipelineEvent,
|
||||
) -> Response<State> {
|
||||
// Note: the type signature of this state function REQUIRES both
|
||||
// _feature_branch and _commits_ahead. There is no way to construct
|
||||
// a Merge state without them. NonZeroU32 makes "merge with zero
|
||||
// commits ahead" structurally unrepresentable (bug 519 fixed by
|
||||
// construction, same as the bare version).
|
||||
//
|
||||
// The fields are prefixed with `_` because this state function only
|
||||
// transitions forward and doesn't read them — but they're available
|
||||
// to inspect via the State::Merge variant generated by the macro.
|
||||
match event {
|
||||
PipelineEvent::MergeSucceeded { merge_commit } => Transition(State::done(
|
||||
Utc::now(),
|
||||
merge_commit.clone(),
|
||||
)),
|
||||
PipelineEvent::MergeFailedFinal { reason } => Transition(State::archived(
|
||||
Utc::now(),
|
||||
ArchiveReason::MergeFailed {
|
||||
reason: reason.clone(),
|
||||
},
|
||||
)),
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
// ── Cross-cutting superstate ─────────────────────────────────────────
|
||||
//
|
||||
// This is the statig payoff: ONE place defines what Block/ReviewHold/
|
||||
// Abandon/Supersede do across all four active stages. The bare version
|
||||
// had to duplicate this with `|` patterns. Adding a new active stage
|
||||
// here means just adding it as a child of `active`; the cross-cutting
|
||||
// transitions come for free.
|
||||
|
||||
#[superstate]
|
||||
fn active(event: &PipelineEvent) -> Response<State> {
|
||||
let now = Utc::now();
|
||||
match event {
|
||||
PipelineEvent::Block { reason } => Transition(State::archived(
|
||||
now,
|
||||
ArchiveReason::Blocked {
|
||||
reason: reason.clone(),
|
||||
},
|
||||
)),
|
||||
PipelineEvent::ReviewHold { reason } => Transition(State::archived(
|
||||
now,
|
||||
ArchiveReason::ReviewHeld {
|
||||
reason: reason.clone(),
|
||||
},
|
||||
)),
|
||||
PipelineEvent::Abandon => {
|
||||
Transition(State::archived(now, ArchiveReason::Abandoned))
|
||||
}
|
||||
PipelineEvent::Supersede { by } => Transition(State::archived(
|
||||
now,
|
||||
ArchiveReason::Superseded { by: by.clone() },
|
||||
)),
|
||||
_ => Handled, // unhandled events are silently ignored
|
||||
}
|
||||
}
|
||||
|
||||
// ── Done is special: it's not a child of `active` because Block and ──
|
||||
// ── ReviewHold are NOT valid from Done (per the bare version's rules).
|
||||
// ── Abandon and Supersede ARE valid, so we have to handle them inline.
|
||||
|
||||
#[state]
|
||||
fn done(
|
||||
merged_at: &mut DateTime<Utc>,
|
||||
merge_commit: &mut GitSha,
|
||||
event: &PipelineEvent,
|
||||
) -> Response<State> {
|
||||
let now = Utc::now();
|
||||
let _ = merged_at; // currently unused; available for queries
|
||||
let _ = merge_commit;
|
||||
match event {
|
||||
PipelineEvent::Accepted => {
|
||||
Transition(State::archived(now, ArchiveReason::Completed))
|
||||
}
|
||||
PipelineEvent::Abandon => {
|
||||
Transition(State::archived(now, ArchiveReason::Abandoned))
|
||||
}
|
||||
PipelineEvent::Supersede { by } => Transition(State::archived(
|
||||
now,
|
||||
ArchiveReason::Superseded { by: by.clone() },
|
||||
)),
|
||||
_ => Handled,
|
||||
}
|
||||
}
|
||||
|
||||
// ── Archived is terminal except for Unblock from Blocked → Backlog ───
|
||||
|
||||
#[state]
|
||||
fn archived(
|
||||
archived_at: &mut DateTime<Utc>,
|
||||
reason: &mut ArchiveReason,
|
||||
event: &PipelineEvent,
|
||||
) -> Response<State> {
|
||||
let _ = archived_at;
|
||||
match event {
|
||||
PipelineEvent::Unblock => {
|
||||
if matches!(reason, ArchiveReason::Blocked { .. }) {
|
||||
Transition(State::backlog())
|
||||
} else {
|
||||
Handled // unblock only valid from Blocked
|
||||
}
|
||||
}
|
||||
_ => Handled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Per-node execution state machine ─────────────────────────────────────────
|
||||
//
|
||||
// Independent of the pipeline stage machine. Tracks "what is THIS node doing
|
||||
// about this story right now." Lives in its own sub-module so its generated
|
||||
// `State` enum doesn't collide with `PipelineMachine`'s.
|
||||
//
|
||||
// In a real implementation, multiple nodes can have different ExecutionState
|
||||
// for the same story_id at the same time — and that's fine, because each
|
||||
// node owns its own subspace in the CRDT (keyed by node pubkey).
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExecutionEvent {
|
||||
SpawnRequested { agent: AgentName },
|
||||
SpawnedSuccessfully,
|
||||
Heartbeat,
|
||||
HitRateLimit { resume_at: DateTime<Utc> },
|
||||
Exited { exit_code: i32 },
|
||||
Stopped,
|
||||
Reset,
|
||||
}
|
||||
|
||||
pub mod execution {
|
||||
use super::{AgentName, DateTime, ExecutionEvent, Utc};
|
||||
use statig::prelude::*;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ExecutionMachine;
|
||||
|
||||
#[state_machine(
|
||||
initial = "State::idle()",
|
||||
state(derive(Debug, Clone, PartialEq, Eq))
|
||||
)]
|
||||
impl ExecutionMachine {
|
||||
// ── Idle: no agent on this node is working on this story ──────────
|
||||
|
||||
#[state(superstate = "any")]
|
||||
fn idle(event: &ExecutionEvent) -> Response<State> {
|
||||
match event {
|
||||
ExecutionEvent::SpawnRequested { agent } => {
|
||||
Transition(State::pending(agent.clone(), Utc::now()))
|
||||
}
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
// ── Pending: agent has been requested but hasn't started yet ──────
|
||||
|
||||
#[state(superstate = "any")]
|
||||
fn pending(
|
||||
agent: &mut AgentName,
|
||||
_since: &mut DateTime<Utc>,
|
||||
event: &ExecutionEvent,
|
||||
) -> Response<State> {
|
||||
match event {
|
||||
ExecutionEvent::SpawnedSuccessfully => {
|
||||
let now = Utc::now();
|
||||
Transition(State::running(agent.clone(), now, now))
|
||||
}
|
||||
ExecutionEvent::HitRateLimit { resume_at } => {
|
||||
Transition(State::rate_limited(agent.clone(), *resume_at))
|
||||
}
|
||||
ExecutionEvent::Exited { exit_code } => Transition(State::completed(
|
||||
agent.clone(),
|
||||
*exit_code,
|
||||
Utc::now(),
|
||||
)),
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
// ── Running: agent's subprocess is alive ──────────────────────────
|
||||
//
|
||||
// Heartbeat is a self-transition: we update last_heartbeat in-place
|
||||
// via the &mut reference and return `Handled` (no actual stage change).
|
||||
// This is statig's idiomatic way to mutate state-local data without
|
||||
// transitioning.
|
||||
|
||||
#[state(superstate = "any")]
|
||||
fn running(
|
||||
agent: &mut AgentName,
|
||||
_started_at: &mut DateTime<Utc>,
|
||||
last_heartbeat: &mut DateTime<Utc>,
|
||||
event: &ExecutionEvent,
|
||||
) -> Response<State> {
|
||||
match event {
|
||||
ExecutionEvent::Heartbeat => {
|
||||
*last_heartbeat = Utc::now();
|
||||
Handled
|
||||
}
|
||||
ExecutionEvent::HitRateLimit { resume_at } => {
|
||||
Transition(State::rate_limited(agent.clone(), *resume_at))
|
||||
}
|
||||
ExecutionEvent::Exited { exit_code } => Transition(State::completed(
|
||||
agent.clone(),
|
||||
*exit_code,
|
||||
Utc::now(),
|
||||
)),
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
// ── RateLimited: waiting for the API rate-limit window to clear ───
|
||||
|
||||
#[state(superstate = "any")]
|
||||
fn rate_limited(
|
||||
agent: &mut AgentName,
|
||||
_resume_at: &mut DateTime<Utc>,
|
||||
event: &ExecutionEvent,
|
||||
) -> Response<State> {
|
||||
match event {
|
||||
ExecutionEvent::SpawnedSuccessfully => {
|
||||
let now = Utc::now();
|
||||
Transition(State::running(agent.clone(), now, now))
|
||||
}
|
||||
ExecutionEvent::Exited { exit_code } => Transition(State::completed(
|
||||
agent.clone(),
|
||||
*exit_code,
|
||||
Utc::now(),
|
||||
)),
|
||||
_ => Super,
|
||||
}
|
||||
}
|
||||
|
||||
// ── Completed: agent finished, exit code captured ─────────────────
|
||||
|
||||
#[state(superstate = "any")]
|
||||
fn completed(
|
||||
agent: &mut AgentName,
|
||||
exit_code: &mut i32,
|
||||
completed_at: &mut DateTime<Utc>,
|
||||
event: &ExecutionEvent,
|
||||
) -> Response<State> {
|
||||
// Completed is mostly terminal; only Stopped/Reset (handled by
|
||||
// the `any` superstate) returns to Idle. Field names are kept
|
||||
// un-underscored so the generated State::Completed variant
|
||||
// exposes them as `exit_code` etc. for test pattern matching.
|
||||
let _ = (agent, exit_code, completed_at, event);
|
||||
Super
|
||||
}
|
||||
|
||||
// ── Cross-cutting: Stopped and Reset return to Idle from anywhere ─
|
||||
|
||||
#[superstate]
|
||||
fn any(event: &ExecutionEvent) -> Response<State> {
|
||||
match event {
|
||||
ExecutionEvent::Stopped | ExecutionEvent::Reset => {
|
||||
Transition(State::idle())
|
||||
}
|
||||
_ => Handled,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Side effects via statig's entry/exit actions (alternative to EventBus) ───
|
||||
//
|
||||
// The bare version uses an explicit EventBus + Subscriber trait + per-state
|
||||
// publish-on-transition pattern. statig has a more native equivalent:
|
||||
// `#[action]`-tagged functions that fire on state entry / exit / transition.
|
||||
//
|
||||
// We don't include a full action-based example here — it would roughly look
|
||||
// like adding `entry_action = "log_entry"` to each #[state] attribute and
|
||||
// defining `fn log_entry(...)` in the impl block. The trade-off is that
|
||||
// statig's actions are tightly coupled to the state machine impl block,
|
||||
// while the bare version's EventBus allows arbitrary external subscribers
|
||||
// to plug in without touching the state machine code. Both patterns are
|
||||
// valid; pick based on whether you want side-effect dispatch INSIDE the
|
||||
// machine (statig actions) or OUTSIDE it (bare EventBus).
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn nz(n: u32) -> NonZeroU32 {
|
||||
NonZeroU32::new(n).unwrap()
|
||||
}
|
||||
fn fb(name: &str) -> BranchName {
|
||||
BranchName(name.to_string())
|
||||
}
|
||||
fn sha(s: &str) -> GitSha {
|
||||
GitSha(s.to_string())
|
||||
}
|
||||
|
||||
// ── Happy path ─────────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn happy_path_backlog_through_done() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
assert!(matches!(sm.state(), State::Backlog {}));
|
||||
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
assert!(matches!(sm.state(), State::Coding {}));
|
||||
|
||||
sm.handle(&PipelineEvent::QaSkipped {
|
||||
feature_branch: fb("feature/story-1"),
|
||||
commits_ahead: nz(3),
|
||||
});
|
||||
assert!(matches!(sm.state(), State::Merge { .. }));
|
||||
|
||||
sm.handle(&PipelineEvent::MergeSucceeded {
|
||||
merge_commit: sha("abc123"),
|
||||
});
|
||||
assert!(matches!(sm.state(), State::Done { .. }));
|
||||
|
||||
sm.handle(&PipelineEvent::Accepted);
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Completed,
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn qa_retry_loop() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
sm.handle(&PipelineEvent::GatesStarted);
|
||||
assert!(matches!(sm.state(), State::Qa {}));
|
||||
|
||||
sm.handle(&PipelineEvent::GatesFailed {
|
||||
reason: "tests failed".into(),
|
||||
});
|
||||
assert!(matches!(sm.state(), State::Coding {}));
|
||||
}
|
||||
|
||||
// ── Bug 519 unrepresentability: Merge with zero commits ahead ──────────
|
||||
|
||||
#[test]
|
||||
fn merge_with_zero_commits_is_unrepresentable() {
|
||||
// Identical to the bare version: NonZeroU32::new(0) returns None,
|
||||
// so a State::merge(branch, ZERO) literally cannot be constructed.
|
||||
assert!(NonZeroU32::new(0).is_none());
|
||||
}
|
||||
|
||||
// ── Cross-cutting Block from any active stage (superstate proves it) ───
|
||||
|
||||
#[test]
|
||||
fn block_from_backlog_via_superstate() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
});
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_from_coding_via_superstate() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
sm.handle(&PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
});
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_from_qa_via_superstate() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
sm.handle(&PipelineEvent::GatesStarted);
|
||||
sm.handle(&PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
});
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_from_merge_via_superstate() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
sm.handle(&PipelineEvent::QaSkipped {
|
||||
feature_branch: fb("f"),
|
||||
commits_ahead: nz(1),
|
||||
});
|
||||
sm.handle(&PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
});
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
// ── Block from Done is NOT valid (Done isn't a child of `active`) ──────
|
||||
|
||||
#[test]
|
||||
fn block_from_done_is_ignored() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
sm.handle(&PipelineEvent::QaSkipped {
|
||||
feature_branch: fb("f"),
|
||||
commits_ahead: nz(1),
|
||||
});
|
||||
sm.handle(&PipelineEvent::MergeSucceeded {
|
||||
merge_commit: sha("abc"),
|
||||
});
|
||||
// Now in Done. Block should NOT transition us anywhere.
|
||||
sm.handle(&PipelineEvent::Block {
|
||||
reason: "stuck".into(),
|
||||
});
|
||||
assert!(matches!(sm.state(), State::Done { .. }));
|
||||
}
|
||||
|
||||
// ── Abandon from Done IS valid (handled inline in done()) ──────────────
|
||||
|
||||
#[test]
|
||||
fn abandon_from_done_works() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
sm.handle(&PipelineEvent::QaSkipped {
|
||||
feature_branch: fb("f"),
|
||||
commits_ahead: nz(1),
|
||||
});
|
||||
sm.handle(&PipelineEvent::MergeSucceeded {
|
||||
merge_commit: sha("abc"),
|
||||
});
|
||||
sm.handle(&PipelineEvent::Abandon);
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Abandoned,
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
// ── Unblock from Archived(Blocked) → Backlog ───────────────────────────
|
||||
|
||||
#[test]
|
||||
fn unblock_returns_to_backlog() {
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::Block {
|
||||
reason: "test".into(),
|
||||
});
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::Blocked { .. },
|
||||
..
|
||||
}
|
||||
));
|
||||
|
||||
sm.handle(&PipelineEvent::Unblock);
|
||||
assert!(matches!(sm.state(), State::Backlog {}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unblock_from_review_held_does_nothing() {
|
||||
// Unblock is specifically for Blocked, not for any Archived variant.
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
sm.handle(&PipelineEvent::ReviewHold {
|
||||
reason: "TBD".into(),
|
||||
});
|
||||
// Now in Archived(ReviewHeld). Unblock should NOT transition.
|
||||
sm.handle(&PipelineEvent::Unblock);
|
||||
assert!(matches!(
|
||||
sm.state(),
|
||||
State::Archived {
|
||||
reason: ArchiveReason::ReviewHeld { .. },
|
||||
..
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
// ── ExecutionMachine tests ─────────────────────────────────────────────
|
||||
|
||||
use super::execution::{ExecutionMachine, State as ExecState};
|
||||
|
||||
#[test]
|
||||
fn execution_happy_path() {
|
||||
let mut em = ExecutionMachine.state_machine();
|
||||
assert!(matches!(em.state(), ExecState::Idle {}));
|
||||
|
||||
em.handle(&ExecutionEvent::SpawnRequested {
|
||||
agent: AgentName("coder-1".into()),
|
||||
});
|
||||
assert!(matches!(em.state(), ExecState::Pending { .. }));
|
||||
|
||||
em.handle(&ExecutionEvent::SpawnedSuccessfully);
|
||||
assert!(matches!(em.state(), ExecState::Running { .. }));
|
||||
|
||||
em.handle(&ExecutionEvent::Heartbeat);
|
||||
// Heartbeat updates last_heartbeat in-place; we stay in Running.
|
||||
assert!(matches!(em.state(), ExecState::Running { .. }));
|
||||
|
||||
em.handle(&ExecutionEvent::Exited { exit_code: 0 });
|
||||
assert!(matches!(em.state(), ExecState::Completed { exit_code: 0, .. }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn execution_rate_limit_then_resume() {
|
||||
let mut em = ExecutionMachine.state_machine();
|
||||
em.handle(&ExecutionEvent::SpawnRequested {
|
||||
agent: AgentName("coder-1".into()),
|
||||
});
|
||||
em.handle(&ExecutionEvent::SpawnedSuccessfully);
|
||||
em.handle(&ExecutionEvent::HitRateLimit {
|
||||
resume_at: Utc::now() + chrono::Duration::minutes(5),
|
||||
});
|
||||
assert!(matches!(em.state(), ExecState::RateLimited { .. }));
|
||||
|
||||
em.handle(&ExecutionEvent::SpawnedSuccessfully);
|
||||
assert!(matches!(em.state(), ExecState::Running { .. }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn execution_stop_from_running_returns_idle_via_superstate() {
|
||||
let mut em = ExecutionMachine.state_machine();
|
||||
em.handle(&ExecutionEvent::SpawnRequested {
|
||||
agent: AgentName("coder-1".into()),
|
||||
});
|
||||
em.handle(&ExecutionEvent::SpawnedSuccessfully);
|
||||
em.handle(&ExecutionEvent::Stopped);
|
||||
assert!(matches!(em.state(), ExecState::Idle {}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn execution_stop_from_pending_returns_idle_via_superstate() {
|
||||
let mut em = ExecutionMachine.state_machine();
|
||||
em.handle(&ExecutionEvent::SpawnRequested {
|
||||
agent: AgentName("coder-1".into()),
|
||||
});
|
||||
em.handle(&ExecutionEvent::Stopped);
|
||||
assert!(matches!(em.state(), ExecState::Idle {}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn execution_stop_from_rate_limited_returns_idle_via_superstate() {
|
||||
let mut em = ExecutionMachine.state_machine();
|
||||
em.handle(&ExecutionEvent::SpawnRequested {
|
||||
agent: AgentName("coder-1".into()),
|
||||
});
|
||||
em.handle(&ExecutionEvent::SpawnedSuccessfully);
|
||||
em.handle(&ExecutionEvent::HitRateLimit {
|
||||
resume_at: Utc::now() + chrono::Duration::minutes(5),
|
||||
});
|
||||
em.handle(&ExecutionEvent::Stopped);
|
||||
assert!(matches!(em.state(), ExecState::Idle {}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nodepubkey_type_is_constructible() {
|
||||
// Just exercise the NodePubkey newtype so it isn't dead code.
|
||||
// In a real implementation it'd key the per-node ExecutionState
|
||||
// map inside the CRDT.
|
||||
let _ = NodePubkey([0u8; 32]);
|
||||
}
|
||||
}
|
||||
|
||||
// ── main: a quick interactive demo ───────────────────────────────────────────
|
||||
|
||||
fn main() {
|
||||
println!("─── Pipeline state machine sketch (story 520) — STATIG version ───\n");
|
||||
|
||||
let mut sm = PipelineMachine.state_machine();
|
||||
println!("Initial: {:?}\n", sm.state());
|
||||
|
||||
println!("→ DepsMet");
|
||||
sm.handle(&PipelineEvent::DepsMet);
|
||||
println!(" state: {:?}\n", sm.state());
|
||||
|
||||
println!("→ QaSkipped");
|
||||
sm.handle(&PipelineEvent::QaSkipped {
|
||||
feature_branch: BranchName("feature/story-100".into()),
|
||||
commits_ahead: NonZeroU32::new(3).unwrap(),
|
||||
});
|
||||
println!(" state: {:?}\n", sm.state());
|
||||
|
||||
println!("→ MergeSucceeded");
|
||||
sm.handle(&PipelineEvent::MergeSucceeded {
|
||||
merge_commit: GitSha("abc1234".into()),
|
||||
});
|
||||
println!(" state: {:?}\n", sm.state());
|
||||
|
||||
println!("→ Accepted");
|
||||
sm.handle(&PipelineEvent::Accepted);
|
||||
println!(" state: {:?}\n", sm.state());
|
||||
|
||||
println!("─── Trying invalid transition: Done → Unblock ───");
|
||||
let mut sm2 = PipelineMachine.state_machine();
|
||||
sm2.handle(&PipelineEvent::DepsMet);
|
||||
sm2.handle(&PipelineEvent::QaSkipped {
|
||||
feature_branch: BranchName("feature/story-101".into()),
|
||||
commits_ahead: NonZeroU32::new(2).unwrap(),
|
||||
});
|
||||
sm2.handle(&PipelineEvent::MergeSucceeded {
|
||||
merge_commit: GitSha("def5678".into()),
|
||||
});
|
||||
println!(" before Unblock: {:?}", sm2.state());
|
||||
sm2.handle(&PipelineEvent::Unblock); // silently ignored — no transition
|
||||
println!(" after Unblock: {:?} (no change — Unblock is a no-op from Done)", sm2.state());
|
||||
}
|
||||
+45
-58
@@ -6,7 +6,6 @@ use std::fs::{self, File, OpenOptions};
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
|
||||
/// A single line in the agent log file (JSONL format).
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct LogEntry {
|
||||
@@ -72,10 +71,7 @@ impl AgentLogWriter {
|
||||
|
||||
/// Return the log directory for a story.
|
||||
fn log_dir(project_root: &Path, story_id: &str) -> PathBuf {
|
||||
project_root
|
||||
.join(".huskies")
|
||||
.join("logs")
|
||||
.join(story_id)
|
||||
project_root.join(".huskies").join("logs").join(story_id)
|
||||
}
|
||||
|
||||
/// Return the path to a specific log file.
|
||||
@@ -102,8 +98,8 @@ pub fn read_log(path: &Path) -> Result<Vec<LogEntry>, String> {
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let entry: LogEntry = serde_json::from_str(trimmed)
|
||||
.map_err(|e| format!("Failed to parse log entry: {e}"))?;
|
||||
let entry: LogEntry =
|
||||
serde_json::from_str(trimmed).map_err(|e| format!("Failed to parse log entry: {e}"))?;
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
@@ -197,10 +193,7 @@ pub fn format_log_entry_as_text(timestamp: &str, event: &serde_json::Value) -> O
|
||||
Some("done") => Some(format!("{pfx} DONE")),
|
||||
Some("status") => {
|
||||
// Skip trivial running/started noise
|
||||
let status = event
|
||||
.get("status")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("?");
|
||||
let status = event.get("status").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
match status {
|
||||
"running" | "started" => None,
|
||||
_ => Some(format!("{pfx} STATUS: {status}")),
|
||||
@@ -211,10 +204,7 @@ pub fn format_log_entry_as_text(timestamp: &str, event: &serde_json::Value) -> O
|
||||
match data.get("type").and_then(|v| v.as_str()) {
|
||||
Some("assistant") => {
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
if let Some(arr) = data
|
||||
.pointer("/message/content")
|
||||
.and_then(|v| v.as_array())
|
||||
{
|
||||
if let Some(arr) = data.pointer("/message/content").and_then(|v| v.as_array()) {
|
||||
for item in arr {
|
||||
match item.get("type").and_then(|v| v.as_str()) {
|
||||
Some("text") => {
|
||||
@@ -228,15 +218,11 @@ pub fn format_log_entry_as_text(timestamp: &str, event: &serde_json::Value) -> O
|
||||
}
|
||||
}
|
||||
Some("tool_use") => {
|
||||
let name = item
|
||||
.get("name")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("?");
|
||||
let name =
|
||||
item.get("name").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let input = item
|
||||
.get("input")
|
||||
.map(|v| {
|
||||
serde_json::to_string(v).unwrap_or_default()
|
||||
})
|
||||
.map(|v| serde_json::to_string(v).unwrap_or_default())
|
||||
.unwrap_or_default();
|
||||
let display = if input.len() > 200 {
|
||||
format!("{}...", &input[..200])
|
||||
@@ -257,14 +243,9 @@ pub fn format_log_entry_as_text(timestamp: &str, event: &serde_json::Value) -> O
|
||||
}
|
||||
Some("user") => {
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
if let Some(arr) = data
|
||||
.pointer("/message/content")
|
||||
.and_then(|v| v.as_array())
|
||||
{
|
||||
if let Some(arr) = data.pointer("/message/content").and_then(|v| v.as_array()) {
|
||||
for item in arr {
|
||||
if item.get("type").and_then(|v| v.as_str())
|
||||
!= Some("tool_result")
|
||||
{
|
||||
if item.get("type").and_then(|v| v.as_str()) != Some("tool_result") {
|
||||
continue;
|
||||
}
|
||||
let content_str = match item.get("content") {
|
||||
@@ -316,11 +297,7 @@ pub fn read_log_as_readable_lines(path: &Path) -> Result<Vec<String>, String> {
|
||||
///
|
||||
/// Scans `.huskies/logs/{story_id}/` for files matching `{agent_name}-*.log`
|
||||
/// and returns the one with the most recent modification time.
|
||||
pub fn find_latest_log(
|
||||
project_root: &Path,
|
||||
story_id: &str,
|
||||
agent_name: &str,
|
||||
) -> Option<PathBuf> {
|
||||
pub fn find_latest_log(project_root: &Path, story_id: &str, agent_name: &str) -> Option<PathBuf> {
|
||||
let dir = log_dir(project_root, story_id);
|
||||
if !dir.is_dir() {
|
||||
return None;
|
||||
@@ -362,8 +339,7 @@ mod tests {
|
||||
let tmp = tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
let _writer =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-abc123").unwrap();
|
||||
let _writer = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-abc123").unwrap();
|
||||
|
||||
let expected_path = root
|
||||
.join(".huskies")
|
||||
@@ -378,8 +354,7 @@ mod tests {
|
||||
let tmp = tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
let mut writer =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-001").unwrap();
|
||||
let mut writer = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-001").unwrap();
|
||||
|
||||
let event = AgentEvent::Status {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
@@ -426,8 +401,7 @@ mod tests {
|
||||
let tmp = tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
let mut writer =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-002").unwrap();
|
||||
let mut writer = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-002").unwrap();
|
||||
|
||||
let events = vec![
|
||||
AgentEvent::Status {
|
||||
@@ -472,10 +446,8 @@ mod tests {
|
||||
let tmp = tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
let mut writer1 =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-aaa").unwrap();
|
||||
let mut writer2 =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-bbb").unwrap();
|
||||
let mut writer1 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-aaa").unwrap();
|
||||
let mut writer2 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-bbb").unwrap();
|
||||
|
||||
writer1
|
||||
.write_event(&AgentEvent::Output {
|
||||
@@ -496,7 +468,10 @@ mod tests {
|
||||
let path1 = log_file_path(root, "42_story_foo", "coder-1", "sess-aaa");
|
||||
let path2 = log_file_path(root, "42_story_foo", "coder-1", "sess-bbb");
|
||||
|
||||
assert_ne!(path1, path2, "Different sessions should use different files");
|
||||
assert_ne!(
|
||||
path1, path2,
|
||||
"Different sessions should use different files"
|
||||
);
|
||||
|
||||
let entries1 = read_log(&path1).unwrap();
|
||||
let entries2 = read_log(&path2).unwrap();
|
||||
@@ -513,8 +488,7 @@ mod tests {
|
||||
let root = tmp.path();
|
||||
|
||||
// Create two log files with a small delay
|
||||
let mut writer1 =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-old").unwrap();
|
||||
let mut writer1 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-old").unwrap();
|
||||
writer1
|
||||
.write_event(&AgentEvent::Output {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
@@ -527,8 +501,7 @@ mod tests {
|
||||
// Touch the second file to ensure it's newer
|
||||
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||
|
||||
let mut writer2 =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-new").unwrap();
|
||||
let mut writer2 = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-new").unwrap();
|
||||
writer2
|
||||
.write_event(&AgentEvent::Output {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
@@ -568,8 +541,7 @@ mod tests {
|
||||
drop(w1);
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
|
||||
let mut w2 =
|
||||
AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-bbb").unwrap();
|
||||
let mut w2 = AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-bbb").unwrap();
|
||||
w2.write_event(&AgentEvent::Output {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "mergemaster".to_string(),
|
||||
@@ -601,8 +573,7 @@ mod tests {
|
||||
.unwrap();
|
||||
drop(w1);
|
||||
|
||||
let mut w2 =
|
||||
AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-b").unwrap();
|
||||
let mut w2 = AgentLogWriter::new(root, "42_story_foo", "mergemaster", "sess-b").unwrap();
|
||||
w2.write_event(&AgentEvent::Output {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "mergemaster".to_string(),
|
||||
@@ -704,7 +675,10 @@ mod tests {
|
||||
}
|
||||
});
|
||||
let result = format_log_entry_as_text(ts, &event).unwrap();
|
||||
assert!(result.contains("TOOL: Read"), "should show tool call: {result}");
|
||||
assert!(
|
||||
result.contains("TOOL: Read"),
|
||||
"should show tool call: {result}"
|
||||
);
|
||||
assert!(result.contains("file_path"), "should show input: {result}");
|
||||
}
|
||||
|
||||
@@ -728,7 +702,10 @@ mod tests {
|
||||
}
|
||||
});
|
||||
let result = format_log_entry_as_text(ts, &event).unwrap();
|
||||
assert!(result.contains("Now I will read the file."), "should show text: {result}");
|
||||
assert!(
|
||||
result.contains("Now I will read the file."),
|
||||
"should show text: {result}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -743,7 +720,10 @@ mod tests {
|
||||
"event": {"type": "content_block_delta", "delta": {"type": "text_delta", "text": "chunk"}}
|
||||
}
|
||||
});
|
||||
assert!(format_log_entry_as_text(ts, &event).is_none(), "stream events should be skipped");
|
||||
assert!(
|
||||
format_log_entry_as_text(ts, &event).is_none(),
|
||||
"stream events should be skipped"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -771,7 +751,11 @@ mod tests {
|
||||
let path = log_file_path(root, "42_story_foo", "coder-1", "sess-readable");
|
||||
let lines = read_log_as_readable_lines(&path).unwrap();
|
||||
assert_eq!(lines.len(), 2, "Should produce 2 readable lines");
|
||||
assert!(lines[0].contains("Let me read the file"), "first line: {}", lines[0]);
|
||||
assert!(
|
||||
lines[0].contains("Let me read the file"),
|
||||
"first line: {}",
|
||||
lines[0]
|
||||
);
|
||||
assert!(lines[1].contains("DONE"), "second line: {}", lines[1]);
|
||||
}
|
||||
|
||||
@@ -802,7 +786,10 @@ mod tests {
|
||||
};
|
||||
|
||||
// File should still exist and be readable
|
||||
assert!(path.exists(), "Log file should persist after writer is dropped");
|
||||
assert!(
|
||||
path.exists(),
|
||||
"Log file should persist after writer is dropped"
|
||||
);
|
||||
let entries = read_log(&path).unwrap();
|
||||
assert_eq!(entries.len(), 1);
|
||||
assert_eq!(entries[0].event["type"], "status");
|
||||
|
||||
@@ -36,10 +36,15 @@ const SCAN_INTERVAL_SECS: u64 = 15;
|
||||
///
|
||||
/// This function never returns under normal operation — it runs until the
|
||||
/// process is terminated (SIGINT/SIGTERM).
|
||||
///
|
||||
/// If `join_token` and `gateway_url` are both provided the agent will register
|
||||
/// itself with the gateway on startup using the one-time token.
|
||||
pub async fn run(
|
||||
project_root: Option<PathBuf>,
|
||||
rendezvous_url: String,
|
||||
port: u16,
|
||||
join_token: Option<String>,
|
||||
gateway_url: Option<String>,
|
||||
) -> Result<(), std::io::Error> {
|
||||
let project_root = match project_root {
|
||||
Some(r) => r,
|
||||
@@ -51,14 +56,20 @@ pub async fn run(
|
||||
|
||||
println!("\x1b[96;1m[agent-mode]\x1b[0m Starting headless build agent");
|
||||
println!("\x1b[96;1m[agent-mode]\x1b[0m Rendezvous: {rendezvous_url}");
|
||||
println!("\x1b[96;1m[agent-mode]\x1b[0m Project: {}", project_root.display());
|
||||
println!(
|
||||
"\x1b[96;1m[agent-mode]\x1b[0m Project: {}",
|
||||
project_root.display()
|
||||
);
|
||||
|
||||
// Validate project config.
|
||||
let config = ProjectConfig::load(&project_root).unwrap_or_else(|e| {
|
||||
eprintln!("error: invalid project config: {e}");
|
||||
std::process::exit(1);
|
||||
});
|
||||
slog!("[agent-mode] Loaded config with {} agents", config.agent.len());
|
||||
slog!(
|
||||
"[agent-mode] Loaded config with {} agents",
|
||||
config.agent.len()
|
||||
);
|
||||
|
||||
// Event bus for pipeline lifecycle events.
|
||||
let (watcher_tx, _) = broadcast::channel::<watcher::WatcherEvent>(1024);
|
||||
@@ -79,9 +90,7 @@ pub async fn run(
|
||||
{
|
||||
let story_id = evt.story_id.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
if let Err(e) =
|
||||
crate::worktree::prune_worktree_sync(&root, &story_id)
|
||||
{
|
||||
if let Err(e) = crate::worktree::prune_worktree_sync(&root, &story_id) {
|
||||
slog!("[agent-mode] worktree prune failed for {story_id}: {e}");
|
||||
}
|
||||
});
|
||||
@@ -113,9 +122,7 @@ pub async fn run(
|
||||
if let watcher::WatcherEvent::WorkItem { ref stage, .. } = event
|
||||
&& matches!(stage.as_str(), "2_current" | "3_qa" | "4_merge")
|
||||
{
|
||||
slog!(
|
||||
"[agent-mode] CRDT transition in {stage}/; triggering auto-assign."
|
||||
);
|
||||
slog!("[agent-mode] CRDT transition in {stage}/; triggering auto-assign.");
|
||||
auto_agents.auto_assign_available_work(&auto_root).await;
|
||||
}
|
||||
}
|
||||
@@ -125,6 +132,14 @@ pub async fn run(
|
||||
// Write initial heartbeat.
|
||||
write_heartbeat(&rendezvous_url, port);
|
||||
|
||||
// Register with gateway if a join token and gateway URL were provided.
|
||||
if let (Some(token), Some(url)) = (join_token, gateway_url) {
|
||||
let node_id = crdt_state::our_node_id().unwrap_or_else(|| "unknown".to_string());
|
||||
let label = format!("build-agent-{}", &node_id[..node_id.len().min(8)]);
|
||||
let address = format!("ws://0.0.0.0:{port}/crdt-sync");
|
||||
register_with_gateway(&url, &token, &label, &address).await;
|
||||
}
|
||||
|
||||
// Reconcile any committed work from a previous session.
|
||||
{
|
||||
let recon_agents = Arc::clone(&agents);
|
||||
@@ -425,6 +440,36 @@ fn push_feature_branch(worktree_path: &str, story_id: &str) -> Result<(), String
|
||||
}
|
||||
}
|
||||
|
||||
// ── Gateway registration ──────────────────────────────────────────────────
|
||||
|
||||
/// Register this build agent with a gateway using a one-time join token.
|
||||
///
|
||||
/// POSTs `{ token, label, address }` to `{gateway_url}/gateway/register`. On
|
||||
/// success the gateway stores the agent and it will appear in the gateway UI.
|
||||
async fn register_with_gateway(gateway_url: &str, token: &str, label: &str, address: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{}/gateway/register", gateway_url.trim_end_matches('/'));
|
||||
let body = serde_json::json!({
|
||||
"token": token,
|
||||
"label": label,
|
||||
"address": address,
|
||||
});
|
||||
match client.post(&url).json(&body).send().await {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
slog!("[agent-mode] Registered with gateway at {gateway_url}");
|
||||
}
|
||||
Ok(resp) => {
|
||||
slog!(
|
||||
"[agent-mode] Gateway registration failed: HTTP {}",
|
||||
resp.status()
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
slog!("[agent-mode] Gateway registration error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -36,9 +36,7 @@ pub(crate) fn worktree_has_committed_work(wt_path: &Path) -> bool {
|
||||
.current_dir(wt_path)
|
||||
.output();
|
||||
match output {
|
||||
Ok(out) if out.status.success() => {
|
||||
!String::from_utf8_lossy(&out.stdout).trim().is_empty()
|
||||
}
|
||||
Ok(out) if out.status.success() => !String::from_utf8_lossy(&out.stdout).trim().is_empty(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@@ -258,14 +256,21 @@ mod tests {
|
||||
let script_dir = path.join("script");
|
||||
fs::create_dir_all(&script_dir).unwrap();
|
||||
let script_test = script_dir.join("test");
|
||||
fs::write(&script_test, "#!/usr/bin/env bash\necho 'all tests passed'\nexit 0\n").unwrap();
|
||||
fs::write(
|
||||
&script_test,
|
||||
"#!/usr/bin/env bash\necho 'all tests passed'\nexit 0\n",
|
||||
)
|
||||
.unwrap();
|
||||
let mut perms = fs::metadata(&script_test).unwrap().permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&script_test, perms).unwrap();
|
||||
|
||||
let (passed, output) = run_project_tests(path).unwrap();
|
||||
assert!(passed, "script/test exiting 0 should pass");
|
||||
assert!(output.contains("script/test"), "output should mention script/test");
|
||||
assert!(
|
||||
output.contains("script/test"),
|
||||
"output should mention script/test"
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
@@ -286,7 +291,10 @@ mod tests {
|
||||
|
||||
let (passed, output) = run_project_tests(path).unwrap();
|
||||
assert!(!passed, "script/test exiting 1 should fail");
|
||||
assert!(output.contains("script/test"), "output should mention script/test");
|
||||
assert!(
|
||||
output.contains("script/test"),
|
||||
"output should mention script/test"
|
||||
);
|
||||
}
|
||||
|
||||
// ── run_coverage_gate tests ───────────────────────────────────────────────
|
||||
@@ -347,7 +355,10 @@ mod tests {
|
||||
let script = script_dir.join("test_coverage");
|
||||
{
|
||||
let mut f = fs::File::create(&script).unwrap();
|
||||
f.write_all(b"#!/usr/bin/env bash\necho 'FAIL: Coverage 40% is below threshold 80%'\nexit 1\n").unwrap();
|
||||
f.write_all(
|
||||
b"#!/usr/bin/env bash\necho 'FAIL: Coverage 40% is below threshold 80%'\nexit 1\n",
|
||||
)
|
||||
.unwrap();
|
||||
f.sync_all().unwrap();
|
||||
}
|
||||
let mut perms = fs::metadata(&script).unwrap().permissions();
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::slog;
|
||||
|
||||
type ContentTransform = Option<Box<dyn Fn(&str) -> String>>;
|
||||
|
||||
pub(super) fn item_type_from_id(item_id: &str) -> &'static str {
|
||||
pub(crate) fn item_type_from_id(item_id: &str) -> &'static str {
|
||||
// New format: {digits}_{type}_{slug}
|
||||
let after_num = item_id.trim_start_matches(|c: char| c.is_ascii_digit());
|
||||
if after_num.starts_with("_bug_") {
|
||||
@@ -37,9 +37,7 @@ fn move_item<'a>(
|
||||
// Use the typed projection for compile-safe stage comparison.
|
||||
if let Ok(Some(typed_item)) = crate::pipeline_state::read_typed(story_id) {
|
||||
let current_dir = typed_item.stage.dir_name();
|
||||
if current_dir == target_dir
|
||||
|| extra_done_dirs.contains(¤t_dir)
|
||||
{
|
||||
if current_dir == target_dir || extra_done_dirs.contains(¤t_dir) {
|
||||
return Ok(None); // Idempotent: already there.
|
||||
}
|
||||
|
||||
@@ -77,11 +75,7 @@ fn move_item<'a>(
|
||||
}))
|
||||
};
|
||||
|
||||
crate::db::move_item_stage(
|
||||
story_id,
|
||||
target_dir,
|
||||
transform.as_ref().map(|f| f.as_ref()),
|
||||
);
|
||||
crate::db::move_item_stage(story_id, target_dir, transform.as_ref().map(|f| f.as_ref()));
|
||||
|
||||
slog!("[lifecycle] Moved '{story_id}' from work/{src_dir}/ to work/{target_dir}/");
|
||||
return Ok(Some(src_dir));
|
||||
@@ -121,7 +115,16 @@ fn move_item<'a>(
|
||||
/// that has already advanced past the coding stage.
|
||||
/// Idempotent: if already in `2_current/`, returns Ok. If not found, logs and returns Ok.
|
||||
pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(), String> {
|
||||
move_item(project_root, story_id, &["1_backlog"], "2_current", &[], true, &[]).map(|_| ())
|
||||
move_item(
|
||||
project_root,
|
||||
story_id,
|
||||
&["1_backlog"],
|
||||
"2_current",
|
||||
&[],
|
||||
true,
|
||||
&[],
|
||||
)
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
/// Check whether a feature branch `feature/story-{story_id}` exists and has
|
||||
@@ -155,14 +158,15 @@ pub fn feature_branch_has_unmerged_changes(project_root: &Path, story_id: &str)
|
||||
}
|
||||
}
|
||||
|
||||
/// Move a story from `work/2_current/` or `work/4_merge/` to `work/5_done/`.
|
||||
/// Move a story from `work/2_current/`, `work/3_qa/`, or `work/4_merge/` to `work/5_done/`.
|
||||
///
|
||||
/// Idempotent if already in `5_done/` or `6_archived/`. Errors if not found in `2_current/` or `4_merge/`.
|
||||
/// Idempotent if already in `5_done/` or `6_archived/`. Errors if not found in any earlier stage.
|
||||
/// Spikes may transition directly from `3_qa/` to `5_done/`, skipping the merge stage.
|
||||
pub fn move_story_to_done(project_root: &Path, story_id: &str) -> Result<(), String> {
|
||||
move_item(
|
||||
project_root,
|
||||
story_id,
|
||||
&["2_current", "4_merge"],
|
||||
&["2_current", "3_qa", "4_merge"],
|
||||
"5_done",
|
||||
&["6_archived"],
|
||||
false,
|
||||
@@ -204,12 +208,25 @@ pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), Strin
|
||||
}
|
||||
|
||||
/// Move a story from `work/3_qa/` back to `work/2_current/`, clearing `review_hold` and writing notes.
|
||||
pub fn reject_story_from_qa(project_root: &Path, story_id: &str, notes: &str) -> Result<(), String> {
|
||||
let moved = move_item(project_root, story_id, &["3_qa"], "2_current", &[], false, &["review_hold"])?;
|
||||
pub fn reject_story_from_qa(
|
||||
project_root: &Path,
|
||||
story_id: &str,
|
||||
notes: &str,
|
||||
) -> Result<(), String> {
|
||||
let moved = move_item(
|
||||
project_root,
|
||||
story_id,
|
||||
&["3_qa"],
|
||||
"2_current",
|
||||
&[],
|
||||
false,
|
||||
&["review_hold"],
|
||||
)?;
|
||||
if moved.is_some() && !notes.is_empty() {
|
||||
// Append rejection notes to the stored content.
|
||||
if let Some(content) = crate::db::read_content(story_id) {
|
||||
let updated = crate::io::story_metadata::write_rejection_notes_to_content(&content, notes);
|
||||
let updated =
|
||||
crate::io::story_metadata::write_rejection_notes_to_content(&content, notes);
|
||||
crate::db::write_content(story_id, &updated);
|
||||
// Re-sync to DB.
|
||||
crate::db::write_item_with_content(story_id, "2_current", &updated);
|
||||
@@ -250,8 +267,16 @@ pub fn move_story_to_stage(
|
||||
|
||||
let all_dirs: Vec<&str> = STAGES.iter().map(|(_, dir)| *dir).collect();
|
||||
|
||||
match move_item(project_root, story_id, &all_dirs, target_dir, &[], false, &[])
|
||||
.map_err(|_| format!("Work item '{story_id}' not found in any pipeline stage."))?
|
||||
match move_item(
|
||||
project_root,
|
||||
story_id,
|
||||
&all_dirs,
|
||||
target_dir,
|
||||
&[],
|
||||
false,
|
||||
&[],
|
||||
)
|
||||
.map_err(|_| format!("Work item '{story_id}' not found in any pipeline stage."))?
|
||||
{
|
||||
Some(src_dir) => {
|
||||
let from_stage = STAGES
|
||||
|
||||
@@ -248,7 +248,9 @@ pub(crate) fn run_squash_merge(
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to check merge diff: {e}"))?;
|
||||
let changed_files = String::from_utf8_lossy(&diff_check.stdout);
|
||||
let has_code_changes = changed_files.lines().any(|f| !f.starts_with(".huskies/work/"));
|
||||
let has_code_changes = changed_files
|
||||
.lines()
|
||||
.any(|f| !f.starts_with(".huskies/work/"));
|
||||
if !has_code_changes {
|
||||
all_output.push_str(
|
||||
"=== Merge commit contains only .huskies/ file moves, no code changes ===\n",
|
||||
@@ -423,7 +425,14 @@ pub(crate) fn run_squash_merge(
|
||||
// Exclude .huskies/work/ (pipeline file moves) but keep .huskies/project.toml
|
||||
// and other config files which are legitimate deliverables.
|
||||
let diff_stat = Command::new("git")
|
||||
.args(["diff", "--stat", "HEAD~1..HEAD", "--", ".", ":(exclude).huskies/work"])
|
||||
.args([
|
||||
"diff",
|
||||
"--stat",
|
||||
"HEAD~1..HEAD",
|
||||
"--",
|
||||
".",
|
||||
":(exclude).huskies/work",
|
||||
])
|
||||
.current_dir(project_root)
|
||||
.output()
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
|
||||
|
||||
@@ -15,7 +15,7 @@ use super::scan::{
|
||||
};
|
||||
use super::story_checks::{
|
||||
check_archived_dependencies, has_merge_failure, has_review_hold, has_unmet_dependencies,
|
||||
is_story_blocked, read_story_front_matter_agent,
|
||||
is_story_blocked, is_story_frozen, read_story_front_matter_agent,
|
||||
};
|
||||
|
||||
impl AgentPool {
|
||||
@@ -64,8 +64,7 @@ impl AgentPool {
|
||||
}
|
||||
// All deps met — promote from backlog to current.
|
||||
slog!("[auto-assign] Story '{story_id}' deps met; promoting from backlog to current.");
|
||||
if let Err(e) =
|
||||
crate::agents::lifecycle::move_story_to_current(project_root, story_id)
|
||||
if let Err(e) = crate::agents::lifecycle::move_story_to_current(project_root, story_id)
|
||||
{
|
||||
slog!("[auto-assign] Failed to promote '{story_id}' to current: {e}");
|
||||
}
|
||||
@@ -104,6 +103,12 @@ impl AgentPool {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip frozen stories — pipeline advancement is suspended.
|
||||
if is_story_frozen(project_root, stage_dir, story_id) {
|
||||
slog!("[auto-assign] Story '{story_id}' is frozen; skipping until unfrozen.");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip blocked stories (retry limit exceeded).
|
||||
if is_story_blocked(project_root, stage_dir, story_id) {
|
||||
continue;
|
||||
@@ -160,10 +165,12 @@ impl AgentPool {
|
||||
);
|
||||
let _ = crate::io::story_metadata::write_blocked(&story_path);
|
||||
}
|
||||
let _ = self.watcher_tx.send(crate::io::watcher::WatcherEvent::StoryBlocked {
|
||||
story_id: story_id.to_string(),
|
||||
reason: empty_diff_reason.to_string(),
|
||||
});
|
||||
let _ = self
|
||||
.watcher_tx
|
||||
.send(crate::io::watcher::WatcherEvent::StoryBlocked {
|
||||
story_id: story_id.to_string(),
|
||||
reason: empty_diff_reason.to_string(),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -299,15 +306,15 @@ mod tests {
|
||||
async fn auto_assign_picks_up_story_queued_in_current() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let sk = tmp.path().join(".huskies");
|
||||
let current = sk.join("work/2_current");
|
||||
std::fs::create_dir_all(¤t).unwrap();
|
||||
std::fs::create_dir_all(&sk).unwrap();
|
||||
std::fs::write(
|
||||
sk.join("project.toml"),
|
||||
"[[agent]]\nname = \"coder-1\"\nstage = \"coder\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
// Place the story in 2_current/ (simulating the "queued" state).
|
||||
std::fs::write(current.join("story-3.md"), "---\nname: Story 3\n---\n").unwrap();
|
||||
// Place the story in 2_current/ via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content("story-3", "2_current", "---\nname: Story 3\n---\n");
|
||||
|
||||
let pool = AgentPool::new_test(3001);
|
||||
// No agents are running — coder-1 is free.
|
||||
@@ -548,31 +555,33 @@ mod tests {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
let sk = root.join(".huskies");
|
||||
let current = sk.join("work/2_current");
|
||||
let done = sk.join("work/5_done");
|
||||
std::fs::create_dir_all(¤t).unwrap();
|
||||
std::fs::create_dir_all(&done).unwrap();
|
||||
std::fs::create_dir_all(&sk).unwrap();
|
||||
std::fs::write(
|
||||
sk.join("project.toml"),
|
||||
"[[agent]]\nname = \"coder-1\"\nstage = \"coder\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
// Seed stories via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
// Dep 999 is now done.
|
||||
std::fs::write(done.join("999_story_dep.md"), "---\nname: Dep\n---\n").unwrap();
|
||||
crate::db::write_item_with_content("999_story_dep", "5_done", "---\nname: Dep\n---\n");
|
||||
// Story 10 depends on 999 which is done.
|
||||
std::fs::write(
|
||||
current.join("10_story_unblocked.md"),
|
||||
crate::db::write_item_with_content(
|
||||
"10_story_unblocked",
|
||||
"2_current",
|
||||
"---\nname: Unblocked\ndepends_on: [999]\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let pool = AgentPool::new_test(3001);
|
||||
pool.auto_assign_available_work(root).await;
|
||||
|
||||
let agents = pool.agents.lock().unwrap();
|
||||
let has_pending = agents
|
||||
.values()
|
||||
.any(|a| matches!(a.status, crate::agents::AgentStatus::Pending | crate::agents::AgentStatus::Running));
|
||||
let has_pending = agents.values().any(|a| {
|
||||
matches!(
|
||||
a.status,
|
||||
crate::agents::AgentStatus::Pending | crate::agents::AgentStatus::Running
|
||||
)
|
||||
});
|
||||
assert!(
|
||||
has_pending,
|
||||
"story with all deps done should be auto-assigned"
|
||||
|
||||
@@ -161,17 +161,19 @@ impl AgentPool {
|
||||
|
||||
match qa_mode {
|
||||
crate::io::story_metadata::QaMode::Server => {
|
||||
if let Err(e) =
|
||||
crate::agents::move_story_to_merge(project_root, story_id)
|
||||
{
|
||||
eprintln!("[startup:reconcile] Failed to move '{story_id}' to 4_merge/: {e}");
|
||||
if let Err(e) = crate::agents::move_story_to_merge(project_root, story_id) {
|
||||
eprintln!(
|
||||
"[startup:reconcile] Failed to move '{story_id}' to 4_merge/: {e}"
|
||||
);
|
||||
let _ = progress_tx.send(ReconciliationEvent {
|
||||
story_id: story_id.clone(),
|
||||
status: "failed".to_string(),
|
||||
message: format!("Failed to advance to merge: {e}"),
|
||||
});
|
||||
} else {
|
||||
eprintln!("[startup:reconcile] Moved '{story_id}' → 4_merge/ (qa: server).");
|
||||
eprintln!(
|
||||
"[startup:reconcile] Moved '{story_id}' → 4_merge/ (qa: server)."
|
||||
);
|
||||
let _ = progress_tx.send(ReconciliationEvent {
|
||||
story_id: story_id.clone(),
|
||||
status: "advanced".to_string(),
|
||||
@@ -180,10 +182,10 @@ impl AgentPool {
|
||||
}
|
||||
}
|
||||
crate::io::story_metadata::QaMode::Agent => {
|
||||
if let Err(e) =
|
||||
crate::agents::move_story_to_qa(project_root, story_id)
|
||||
{
|
||||
eprintln!("[startup:reconcile] Failed to move '{story_id}' to 3_qa/: {e}");
|
||||
if let Err(e) = crate::agents::move_story_to_qa(project_root, story_id) {
|
||||
eprintln!(
|
||||
"[startup:reconcile] Failed to move '{story_id}' to 3_qa/: {e}"
|
||||
);
|
||||
let _ = progress_tx.send(ReconciliationEvent {
|
||||
story_id: story_id.clone(),
|
||||
status: "failed".to_string(),
|
||||
@@ -199,10 +201,10 @@ impl AgentPool {
|
||||
}
|
||||
}
|
||||
crate::io::story_metadata::QaMode::Human => {
|
||||
if let Err(e) =
|
||||
crate::agents::move_story_to_qa(project_root, story_id)
|
||||
{
|
||||
eprintln!("[startup:reconcile] Failed to move '{story_id}' to 3_qa/: {e}");
|
||||
if let Err(e) = crate::agents::move_story_to_qa(project_root, story_id) {
|
||||
eprintln!(
|
||||
"[startup:reconcile] Failed to move '{story_id}' to 3_qa/: {e}"
|
||||
);
|
||||
let _ = progress_tx.send(ReconciliationEvent {
|
||||
story_id: story_id.clone(),
|
||||
status: "failed".to_string(),
|
||||
@@ -219,7 +221,9 @@ impl AgentPool {
|
||||
"[startup:reconcile] Failed to set review_hold on '{story_id}': {e}"
|
||||
);
|
||||
}
|
||||
eprintln!("[startup:reconcile] Moved '{story_id}' → 3_qa/ (qa: human — holding for review).");
|
||||
eprintln!(
|
||||
"[startup:reconcile] Moved '{story_id}' → 3_qa/ (qa: human — holding for review)."
|
||||
);
|
||||
let _ = progress_tx.send(ReconciliationEvent {
|
||||
story_id: story_id.clone(),
|
||||
status: "review_hold".to_string(),
|
||||
@@ -284,9 +288,7 @@ impl AgentPool {
|
||||
let story_path = project_root
|
||||
.join(".huskies/work/3_qa")
|
||||
.join(format!("{story_id}.md"));
|
||||
if let Err(e) =
|
||||
crate::io::story_metadata::write_review_hold(&story_path)
|
||||
{
|
||||
if let Err(e) = crate::io::story_metadata::write_review_hold(&story_path) {
|
||||
eprintln!(
|
||||
"[startup:reconcile] Failed to set review_hold on '{story_id}': {e}"
|
||||
);
|
||||
|
||||
@@ -18,30 +18,17 @@ pub(in crate::agents::pool) fn is_agent_free(
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn scan_stage_items(project_root: &Path, stage_dir: &str) -> Vec<String> {
|
||||
pub(super) fn scan_stage_items(_project_root: &Path, stage_dir: &str) -> Vec<String> {
|
||||
use std::collections::BTreeSet;
|
||||
let mut items = BTreeSet::new();
|
||||
|
||||
// Include CRDT items via the typed projection — the primary source of truth.
|
||||
// CRDT is the only source of truth — no filesystem fallback.
|
||||
for item in crate::pipeline_state::read_all_typed() {
|
||||
if item.stage.dir_name() == stage_dir {
|
||||
items.insert(item.story_id.0.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Also include filesystem items (backwards compat / migration fallback).
|
||||
let dir = project_root.join(".huskies").join("work").join(stage_dir);
|
||||
if dir.is_dir() && let Ok(entries) = std::fs::read_dir(&dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.extension().and_then(|e| e.to_str()) == Some("md")
|
||||
&& let Some(stem) = path.file_stem().and_then(|s| s.to_str())
|
||||
{
|
||||
items.insert(stem.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
items.into_iter().collect()
|
||||
}
|
||||
|
||||
@@ -165,6 +152,39 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
// ── Bug 556: stale filesystem shadow must not override CRDT stage ──────────
|
||||
//
|
||||
// A story file left in 1_backlog/ on disk but tracked as 6_archived in the
|
||||
// CRDT must NOT appear when scanning 1_backlog. Without the fix, the
|
||||
// filesystem fallback would add it, causing promote_ready_backlog_stories to
|
||||
// attempt to promote an archived story.
|
||||
#[test]
|
||||
fn scan_stage_items_skips_filesystem_item_known_to_crdt_at_different_stage() {
|
||||
crate::db::ensure_content_store();
|
||||
// Write the story into the CRDT as 6_archived.
|
||||
crate::db::write_item_with_content(
|
||||
"9970_story_archived",
|
||||
"6_archived",
|
||||
"---\nname: Archived\n---\n",
|
||||
);
|
||||
|
||||
// Also place a stale .md file in a temp 1_backlog/ dir.
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
||||
std::fs::create_dir_all(&backlog).unwrap();
|
||||
std::fs::write(
|
||||
backlog.join("9970_story_archived.md"),
|
||||
"---\nname: Archived\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let items = scan_stage_items(tmp.path(), "1_backlog");
|
||||
assert!(
|
||||
!items.contains(&"9970_story_archived".to_string()),
|
||||
"archived CRDT story must not appear in 1_backlog scan via stale filesystem shadow"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scan_stage_items_returns_empty_for_missing_dir() {
|
||||
// Use a unique stage name that no other test writes to, so
|
||||
@@ -576,7 +596,9 @@ stage = "coder"
|
||||
);
|
||||
|
||||
let count = count_active_agents_for_stage(&config, &agents, &PipelineStage::Coder);
|
||||
assert_eq!(count, 1, "Only Running coder should be counted, not Completed");
|
||||
assert_eq!(
|
||||
count, 1,
|
||||
"Only Running coder should be counted, not Completed"
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -52,18 +52,18 @@ pub(super) fn is_story_blocked(project_root: &Path, _stage_dir: &str, story_id:
|
||||
///
|
||||
/// Reads dependency state from the CRDT document first. Falls back to the
|
||||
/// filesystem when the CRDT layer is not initialised.
|
||||
pub(super) fn has_unmet_dependencies(
|
||||
project_root: &Path,
|
||||
stage_dir: &str,
|
||||
story_id: &str,
|
||||
) -> bool {
|
||||
pub(super) fn has_unmet_dependencies(project_root: &Path, stage_dir: &str, story_id: &str) -> bool {
|
||||
// Prefer CRDT-based check.
|
||||
let crdt_deps = crate::crdt_state::check_unmet_deps_crdt(story_id);
|
||||
if !crdt_deps.is_empty() {
|
||||
return true;
|
||||
}
|
||||
// If the CRDT had the item and returned empty deps, it means all are met.
|
||||
if crate::pipeline_state::read_typed(story_id).ok().flatten().is_some() {
|
||||
if crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// Fallback: filesystem check (CRDT not initialised or item not yet in CRDT).
|
||||
@@ -82,13 +82,30 @@ pub(super) fn check_archived_dependencies(
|
||||
story_id: &str,
|
||||
) -> Vec<u32> {
|
||||
// Prefer CRDT-based check when the item is known to CRDT.
|
||||
if crate::pipeline_state::read_typed(story_id).ok().flatten().is_some() {
|
||||
if crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some()
|
||||
{
|
||||
return crate::crdt_state::check_archived_deps_crdt(story_id);
|
||||
}
|
||||
// Fallback: filesystem.
|
||||
crate::io::story_metadata::check_archived_deps(project_root, stage_dir, story_id)
|
||||
}
|
||||
|
||||
/// Return `true` if the story file has `frozen: true` in its front matter.
|
||||
pub(super) fn is_story_frozen(project_root: &Path, _stage_dir: &str, story_id: &str) -> bool {
|
||||
use crate::io::story_metadata::parse_front_matter;
|
||||
let contents = match read_story_contents(project_root, story_id) {
|
||||
Some(c) => c,
|
||||
None => return false,
|
||||
};
|
||||
parse_front_matter(&contents)
|
||||
.ok()
|
||||
.and_then(|m| m.frozen)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Return `true` if the story file has a `merge_failure` field in its front matter.
|
||||
pub(super) fn has_merge_failure(project_root: &Path, _stage_dir: &str, story_id: &str) -> bool {
|
||||
use crate::io::story_metadata::parse_front_matter;
|
||||
@@ -146,7 +163,11 @@ mod tests {
|
||||
"---\nname: Blocked\ndepends_on: [999]\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
assert!(has_unmet_dependencies(tmp.path(), "2_current", "10_story_blocked"));
|
||||
assert!(has_unmet_dependencies(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"10_story_blocked"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -162,7 +183,11 @@ mod tests {
|
||||
"---\nname: Ok\ndepends_on: [999]\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
assert!(!has_unmet_dependencies(tmp.path(), "2_current", "10_story_ok"));
|
||||
assert!(!has_unmet_dependencies(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"10_story_ok"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -171,7 +196,11 @@ mod tests {
|
||||
let current = tmp.path().join(".huskies/work/2_current");
|
||||
std::fs::create_dir_all(¤t).unwrap();
|
||||
std::fs::write(current.join("5_story_free.md"), "---\nname: Free\n---\n").unwrap();
|
||||
assert!(!has_unmet_dependencies(tmp.path(), "2_current", "5_story_free"));
|
||||
assert!(!has_unmet_dependencies(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"5_story_free"
|
||||
));
|
||||
}
|
||||
|
||||
// ── Bug 503: archived-dep visibility ─────────────────────────────────────
|
||||
@@ -184,7 +213,11 @@ mod tests {
|
||||
let archived = tmp.path().join(".huskies/work/6_archived");
|
||||
std::fs::create_dir_all(&backlog).unwrap();
|
||||
std::fs::create_dir_all(&archived).unwrap();
|
||||
std::fs::write(archived.join("500_spike_crdt.md"), "---\nname: CRDT Spike\n---\n").unwrap();
|
||||
std::fs::write(
|
||||
archived.join("500_spike_crdt.md"),
|
||||
"---\nname: CRDT Spike\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
std::fs::write(
|
||||
backlog.join("503_story_dependent.md"),
|
||||
"---\nname: Dependent\ndepends_on: [500]\n---\n",
|
||||
|
||||
@@ -84,8 +84,8 @@ impl AgentPool {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use super::super::super::{AgentPool, composite_key};
|
||||
use super::*;
|
||||
|
||||
// ── check_orphaned_agents return value tests (bug 161) ──────────────────
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
//! Agent pool — manages the set of active agents across all pipeline stages.
|
||||
mod auto_assign;
|
||||
mod pipeline;
|
||||
mod start;
|
||||
mod stop;
|
||||
mod wait;
|
||||
mod process;
|
||||
mod query;
|
||||
mod start;
|
||||
mod stop;
|
||||
mod types;
|
||||
mod wait;
|
||||
mod worktree;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -68,10 +68,15 @@ impl AgentPool {
|
||||
Err(broadcast::error::RecvError::Lagged(_)) => continue,
|
||||
};
|
||||
let (story_id, agent_name) = match &event {
|
||||
WatcherEvent::RateLimitWarning { story_id, agent_name }
|
||||
| WatcherEvent::RateLimitHardBlock { story_id, agent_name, .. } => {
|
||||
(story_id.clone(), agent_name.clone())
|
||||
WatcherEvent::RateLimitWarning {
|
||||
story_id,
|
||||
agent_name,
|
||||
}
|
||||
| WatcherEvent::RateLimitHardBlock {
|
||||
story_id,
|
||||
agent_name,
|
||||
..
|
||||
} => (story_id.clone(), agent_name.clone()),
|
||||
_ => continue,
|
||||
};
|
||||
let key = composite_key(&story_id, &agent_name);
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
//! Pipeline advance — moves stories forward through pipeline stages after agent completion.
|
||||
use crate::config::ProjectConfig;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use crate::slog;
|
||||
use crate::slog_error;
|
||||
use crate::slog_warn;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use super::super::super::{
|
||||
CompletionReport, PipelineStage,
|
||||
agent_config_stage, pipeline_stage,
|
||||
};
|
||||
use super::super::super::{CompletionReport, PipelineStage, agent_config_stage, pipeline_stage};
|
||||
use super::super::{AgentPool, StoryAgent};
|
||||
|
||||
impl AgentPool {
|
||||
@@ -43,6 +40,13 @@ impl AgentPool {
|
||||
.map(agent_config_stage)
|
||||
.unwrap_or_else(|| pipeline_stage(agent_name));
|
||||
|
||||
// If the story is frozen, do not advance the pipeline. The agent's work
|
||||
// is done but the story stays at its current stage.
|
||||
if crate::io::story_metadata::is_story_frozen_in_store(story_id) {
|
||||
slog!("[pipeline] Story '{story_id}' is frozen; pipeline advancement suppressed.");
|
||||
return;
|
||||
}
|
||||
|
||||
match stage {
|
||||
PipelineStage::Other => {
|
||||
// Supervisors and unknown agents do not advance the pipeline.
|
||||
@@ -66,14 +70,16 @@ impl AgentPool {
|
||||
"[pipeline] Coder '{agent_name}' passed gates for '{story_id}'. \
|
||||
qa: server — moving directly to merge."
|
||||
);
|
||||
if let Err(e) =
|
||||
crate::agents::lifecycle::move_story_to_merge(&project_root, story_id)
|
||||
{
|
||||
if let Err(e) = crate::agents::lifecycle::move_story_to_merge(
|
||||
&project_root,
|
||||
story_id,
|
||||
) {
|
||||
slog_error!(
|
||||
"[pipeline] Failed to move '{story_id}' to 4_merge/: {e}"
|
||||
);
|
||||
} else {
|
||||
self.start_mergemaster_or_block(&project_root, story_id).await;
|
||||
self.start_mergemaster_or_block(&project_root, story_id)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
crate::io::story_metadata::QaMode::Agent => {
|
||||
@@ -81,13 +87,17 @@ impl AgentPool {
|
||||
"[pipeline] Coder '{agent_name}' passed gates for '{story_id}'. \
|
||||
qa: agent — moving to QA."
|
||||
);
|
||||
if let Err(e) = crate::agents::lifecycle::move_story_to_qa(&project_root, story_id) {
|
||||
if let Err(e) =
|
||||
crate::agents::lifecycle::move_story_to_qa(&project_root, story_id)
|
||||
{
|
||||
slog_error!("[pipeline] Failed to move '{story_id}' to 3_qa/: {e}");
|
||||
} else if let Err(e) = self
|
||||
.start_agent(&project_root, story_id, Some("qa"), None, None)
|
||||
.await
|
||||
{
|
||||
slog_error!("[pipeline] Failed to start qa agent for '{story_id}': {e}");
|
||||
slog_error!(
|
||||
"[pipeline] Failed to start qa agent for '{story_id}': {e}"
|
||||
);
|
||||
}
|
||||
}
|
||||
crate::io::story_metadata::QaMode::Human => {
|
||||
@@ -95,7 +105,9 @@ impl AgentPool {
|
||||
"[pipeline] Coder '{agent_name}' passed gates for '{story_id}'. \
|
||||
qa: human — holding for human review."
|
||||
);
|
||||
if let Err(e) = crate::agents::lifecycle::move_story_to_qa(&project_root, story_id) {
|
||||
if let Err(e) =
|
||||
crate::agents::lifecycle::move_story_to_qa(&project_root, story_id)
|
||||
{
|
||||
slog_error!("[pipeline] Failed to move '{story_id}' to 3_qa/: {e}");
|
||||
} else {
|
||||
write_review_hold_to_store(story_id);
|
||||
@@ -104,7 +116,8 @@ impl AgentPool {
|
||||
}
|
||||
} else {
|
||||
// Increment retry count and check if blocked.
|
||||
if let Some(reason) = should_block_story(story_id, config.max_retries, "coder") {
|
||||
if let Some(reason) = should_block_story(story_id, config.max_retries, "coder")
|
||||
{
|
||||
// Story has exceeded retry limit — do not restart.
|
||||
let _ = self.watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: story_id.to_string(),
|
||||
@@ -144,13 +157,14 @@ impl AgentPool {
|
||||
.clone()
|
||||
.unwrap_or_else(|| project_root.clone());
|
||||
let cp = coverage_path.clone();
|
||||
let coverage_result =
|
||||
tokio::task::spawn_blocking(move || crate::agents::gates::run_coverage_gate(&cp))
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
slog_warn!("[pipeline] Coverage gate task panicked: {e}");
|
||||
Ok((false, format!("Coverage gate task panicked: {e}")))
|
||||
});
|
||||
let coverage_result = tokio::task::spawn_blocking(move || {
|
||||
crate::agents::gates::run_coverage_gate(&cp)
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
slog_warn!("[pipeline] Coverage gate task panicked: {e}");
|
||||
Ok((false, format!("Coverage gate task panicked: {e}")))
|
||||
});
|
||||
let (coverage_passed, coverage_output) = match coverage_result {
|
||||
Ok(pair) => pair,
|
||||
Err(e) => (false, e),
|
||||
@@ -184,17 +198,21 @@ impl AgentPool {
|
||||
"[pipeline] QA passed gates and coverage for '{story_id}'. \
|
||||
Moving directly to merge."
|
||||
);
|
||||
if let Err(e) =
|
||||
crate::agents::lifecycle::move_story_to_merge(&project_root, story_id)
|
||||
{
|
||||
if let Err(e) = crate::agents::lifecycle::move_story_to_merge(
|
||||
&project_root,
|
||||
story_id,
|
||||
) {
|
||||
slog_error!(
|
||||
"[pipeline] Failed to move '{story_id}' to 4_merge/: {e}"
|
||||
);
|
||||
} else {
|
||||
self.start_mergemaster_or_block(&project_root, story_id).await;
|
||||
self.start_mergemaster_or_block(&project_root, story_id)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
} else if let Some(reason) = should_block_story(story_id, config.max_retries, "qa-coverage") {
|
||||
} else if let Some(reason) =
|
||||
should_block_story(story_id, config.max_retries, "qa-coverage")
|
||||
{
|
||||
// Story has exceeded retry limit — do not restart.
|
||||
let _ = self.watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: story_id.to_string(),
|
||||
@@ -217,7 +235,8 @@ impl AgentPool {
|
||||
slog_error!("[pipeline] Failed to restart qa for '{story_id}': {e}");
|
||||
}
|
||||
}
|
||||
} else if let Some(reason) = should_block_story(story_id, config.max_retries, "qa") {
|
||||
} else if let Some(reason) = should_block_story(story_id, config.max_retries, "qa")
|
||||
{
|
||||
// Story has exceeded retry limit — do not restart.
|
||||
let _ = self.watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: story_id.to_string(),
|
||||
@@ -272,13 +291,14 @@ impl AgentPool {
|
||||
"[pipeline] Mergemaster completed for '{story_id}'. Running post-merge tests on master."
|
||||
);
|
||||
let root = project_root.clone();
|
||||
let test_result =
|
||||
tokio::task::spawn_blocking(move || crate::agents::gates::run_project_tests(&root))
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
slog_warn!("[pipeline] Post-merge test task panicked: {e}");
|
||||
Ok((false, format!("Test task panicked: {e}")))
|
||||
});
|
||||
let test_result = tokio::task::spawn_blocking(move || {
|
||||
crate::agents::gates::run_project_tests(&root)
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
slog_warn!("[pipeline] Post-merge test task panicked: {e}");
|
||||
Ok((false, format!("Test task panicked: {e}")))
|
||||
});
|
||||
let (passed, output) = match test_result {
|
||||
Ok(pair) => pair,
|
||||
Err(e) => (false, e),
|
||||
@@ -309,7 +329,9 @@ impl AgentPool {
|
||||
slog!(
|
||||
"[pipeline] Story '{story_id}' done. Worktree preserved for inspection."
|
||||
);
|
||||
} else if let Some(reason) = should_block_story(story_id, config.max_retries, "mergemaster") {
|
||||
} else if let Some(reason) =
|
||||
should_block_story(story_id, config.max_retries, "mergemaster")
|
||||
{
|
||||
// Story has exceeded retry limit — do not restart.
|
||||
let _ = self.watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: story_id.to_string(),
|
||||
@@ -564,7 +586,10 @@ mod tests {
|
||||
)
|
||||
.unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_content("9909_story_agent_qa", "---\nname: Test\nqa: agent\n---\ntest");
|
||||
crate::db::write_content(
|
||||
"9909_story_agent_qa",
|
||||
"---\nname: Test\nqa: agent\n---\ntest",
|
||||
);
|
||||
|
||||
let pool = AgentPool::new_test(3001);
|
||||
pool.run_pipeline_advance(
|
||||
@@ -672,14 +697,9 @@ mod tests {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
// Set up story in 2_current/
|
||||
let current = root.join(".huskies/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::write(current.join("173_story_test.md"), "test").unwrap();
|
||||
// Ensure 3_qa/ exists for the move target
|
||||
fs::create_dir_all(root.join(".huskies/work/3_qa")).unwrap();
|
||||
// Ensure 1_backlog/ exists (start_agent calls move_story_to_current)
|
||||
fs::create_dir_all(root.join(".huskies/work/1_backlog")).unwrap();
|
||||
// Seed story via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content("173_story_test", "2_current", "---\nname: test\n---\n");
|
||||
|
||||
// Write a project.toml with a qa agent so start_agent can resolve it.
|
||||
fs::create_dir_all(root.join(".huskies")).unwrap();
|
||||
@@ -758,10 +778,26 @@ stage = "qa"
|
||||
let root = tmp.path();
|
||||
|
||||
// Init a bare git repo on master with one empty commit.
|
||||
Command::new("git").args(["init"]).current_dir(root).output().unwrap();
|
||||
Command::new("git").args(["config", "user.email", "test@test.com"]).current_dir(root).output().unwrap();
|
||||
Command::new("git").args(["config", "user.name", "Test"]).current_dir(root).output().unwrap();
|
||||
Command::new("git").args(["commit", "--allow-empty", "-m", "init"]).current_dir(root).output().unwrap();
|
||||
Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
Command::new("git")
|
||||
.args(["config", "user.email", "test@test.com"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
Command::new("git")
|
||||
.args(["config", "user.name", "Test"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
Command::new("git")
|
||||
.args(["commit", "--allow-empty", "-m", "init"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
// Create a feature branch that points at master HEAD (zero commits ahead).
|
||||
// This replicates the incident where the worktree was reset to master.
|
||||
@@ -775,7 +811,11 @@ stage = "qa"
|
||||
let current = root.join(".huskies/work/2_current");
|
||||
fs::create_dir_all(¤t).unwrap();
|
||||
fs::create_dir_all(root.join(".huskies/work/4_merge")).unwrap();
|
||||
fs::write(current.join("9919_story_no_commits.md"), "---\nname: Test\n---\n").unwrap();
|
||||
fs::write(
|
||||
current.join("9919_story_no_commits.md"),
|
||||
"---\nname: Test\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_content("9919_story_no_commits", "---\nname: Test\n---\n");
|
||||
|
||||
@@ -835,15 +875,14 @@ stage = "qa"
|
||||
|
||||
#[tokio::test]
|
||||
async fn pipeline_advance_picks_up_waiting_qa_stories_after_completion() {
|
||||
use std::fs;
|
||||
use super::super::super::auto_assign::is_agent_free;
|
||||
use std::fs;
|
||||
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
let sk = root.join(".huskies");
|
||||
let qa_dir = sk.join("work/3_qa");
|
||||
fs::create_dir_all(&qa_dir).unwrap();
|
||||
fs::create_dir_all(&sk).unwrap();
|
||||
|
||||
// Configure a single QA agent.
|
||||
fs::write(
|
||||
@@ -856,19 +895,21 @@ stage = "qa"
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Seed stories via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
// Story 292 is in QA with QA agent running (will "complete" via
|
||||
// run_pipeline_advance below). Story 293 is in QA with NO agent —
|
||||
// simulating the "stuck" state from bug 295.
|
||||
fs::write(
|
||||
qa_dir.join("292_story_first.md"),
|
||||
crate::db::write_item_with_content(
|
||||
"292_story_first",
|
||||
"3_qa",
|
||||
"---\nname: First\nqa: human\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(
|
||||
qa_dir.join("293_story_second.md"),
|
||||
);
|
||||
crate::db::write_item_with_content(
|
||||
"293_story_second",
|
||||
"3_qa",
|
||||
"---\nname: Second\nqa: human\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let pool = AgentPool::new_test(3001);
|
||||
// QA is currently running on story 292.
|
||||
@@ -908,8 +949,7 @@ stage = "qa"
|
||||
// After pipeline advance, auto_assign should have started QA on story 293.
|
||||
let agents = pool.agents.lock().unwrap();
|
||||
let qa_on_293 = agents.values().any(|a| {
|
||||
a.agent_name == "qa"
|
||||
&& matches!(a.status, AgentStatus::Pending | AgentStatus::Running)
|
||||
a.agent_name == "qa" && matches!(a.status, AgentStatus::Pending | AgentStatus::Running)
|
||||
});
|
||||
assert!(
|
||||
qa_on_293,
|
||||
@@ -940,10 +980,26 @@ stage = "qa"
|
||||
let root = tmp.path();
|
||||
|
||||
// Init a git repo so post-merge tests would pass if they ran.
|
||||
Command::new("git").args(["init"]).current_dir(root).output().unwrap();
|
||||
Command::new("git").args(["config", "user.email", "test@test.com"]).current_dir(root).output().unwrap();
|
||||
Command::new("git").args(["config", "user.name", "Test"]).current_dir(root).output().unwrap();
|
||||
Command::new("git").args(["commit", "--allow-empty", "-m", "init"]).current_dir(root).output().unwrap();
|
||||
Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
Command::new("git")
|
||||
.args(["config", "user.email", "test@test.com"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
Command::new("git")
|
||||
.args(["config", "user.name", "Test"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
Command::new("git")
|
||||
.args(["commit", "--allow-empty", "-m", "init"])
|
||||
.current_dir(root)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
// Set up pipeline dirs.
|
||||
fs::create_dir_all(root.join(".huskies/work/5_done")).unwrap();
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
//! Agent completion handling — processes exit results and triggers pipeline advancement.
|
||||
use crate::slog;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use crate::slog;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use super::super::super::{AgentEvent, AgentStatus, CompletionReport, PipelineStage, pipeline_stage};
|
||||
use super::super::super::{
|
||||
AgentEvent, AgentStatus, CompletionReport, PipelineStage, pipeline_stage,
|
||||
};
|
||||
use super::super::{AgentPool, StoryAgent, composite_key};
|
||||
use super::advance::spawn_pipeline_advance;
|
||||
|
||||
@@ -207,7 +209,10 @@ pub(in crate::agents::pool) async fn run_server_owned_completion(
|
||||
// hold the build lock while gates try to run.
|
||||
if let Some(wt_path) = worktree_path.as_ref()
|
||||
&& let Ok(output) = std::process::Command::new("pgrep")
|
||||
.args(["-f", &format!("--manifest-path {}/Cargo.toml", wt_path.display())])
|
||||
.args([
|
||||
"-f",
|
||||
&format!("--manifest-path {}/Cargo.toml", wt_path.display()),
|
||||
])
|
||||
.output()
|
||||
{
|
||||
let pids = String::from_utf8_lossy(&output.stdout);
|
||||
@@ -216,7 +221,9 @@ pub(in crate::agents::pool) async fn run_server_owned_completion(
|
||||
crate::slog!(
|
||||
"[agents] Killing stale cargo process (pid {pid}) for '{story_id}' before running gates"
|
||||
);
|
||||
unsafe { libc::kill(pid, libc::SIGKILL); }
|
||||
unsafe {
|
||||
libc::kill(pid, libc::SIGKILL);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -311,8 +318,8 @@ pub(in crate::agents::pool) async fn run_server_owned_completion(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use super::super::super::AgentPool;
|
||||
use super::*;
|
||||
use crate::agents::{AgentEvent, AgentStatus, CompletionReport};
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
@@ -24,16 +24,23 @@ impl AgentPool {
|
||||
project_root: &Path,
|
||||
story_id: &str,
|
||||
) -> Result<(), String> {
|
||||
// Guard against double-starts.
|
||||
// Guard against double-starts; clear any completed/failed entry so the
|
||||
// caller can retry without needing to call a separate cleanup step.
|
||||
{
|
||||
let jobs = self.merge_jobs.lock().map_err(|e| e.to_string())?;
|
||||
if let Some(job) = jobs.get(story_id)
|
||||
&& matches!(job.status, crate::agents::merge::MergeJobStatus::Running)
|
||||
{
|
||||
return Err(format!(
|
||||
"Merge already in progress for '{story_id}'. \
|
||||
Use get_merge_status to poll for completion."
|
||||
));
|
||||
let mut jobs = self.merge_jobs.lock().map_err(|e| e.to_string())?;
|
||||
if let Some(job) = jobs.get(story_id) {
|
||||
match &job.status {
|
||||
crate::agents::merge::MergeJobStatus::Running => {
|
||||
return Err(format!(
|
||||
"Merge already in progress for '{story_id}'. \
|
||||
Use get_merge_status to poll for completion."
|
||||
));
|
||||
}
|
||||
// Completed or Failed: clear stale entry so we can start fresh.
|
||||
_ => {
|
||||
jobs.remove(story_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,10 +92,11 @@ impl AgentPool {
|
||||
let sid = story_id.to_string();
|
||||
let br = branch.clone();
|
||||
|
||||
let merge_result =
|
||||
tokio::task::spawn_blocking(move || crate::agents::merge::run_squash_merge(&root, &br, &sid))
|
||||
.await
|
||||
.map_err(|e| format!("Merge task panicked: {e}"))??;
|
||||
let merge_result = tokio::task::spawn_blocking(move || {
|
||||
crate::agents::merge::run_squash_merge(&root, &br, &sid)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Merge task panicked: {e}"))??;
|
||||
|
||||
if !merge_result.success {
|
||||
return Ok(crate::agents::merge::MergeReport {
|
||||
@@ -185,8 +193,8 @@ impl AgentPool {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use super::super::super::AgentPool;
|
||||
use super::*;
|
||||
use crate::agents::merge::{MergeJob, MergeJobStatus};
|
||||
use std::process::Command;
|
||||
|
||||
|
||||
@@ -34,7 +34,11 @@ impl AgentPool {
|
||||
|
||||
/// Test helper: inject a child killer into the registry.
|
||||
#[cfg(test)]
|
||||
pub fn inject_child_killer(&self, key: &str, killer: Box<dyn portable_pty::ChildKiller + Send + Sync>) {
|
||||
pub fn inject_child_killer(
|
||||
&self,
|
||||
key: &str,
|
||||
killer: Box<dyn portable_pty::ChildKiller + Send + Sync>,
|
||||
) {
|
||||
let mut killers = self.child_killers.lock().unwrap();
|
||||
killers.insert(key.to_string(), killer);
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ use std::path::PathBuf;
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use super::super::{AgentEvent, AgentInfo, AgentStatus, PipelineStage, agent_config_stage};
|
||||
use super::types::{agent_info_from_entry, composite_key};
|
||||
use super::AgentPool;
|
||||
use super::types::{agent_info_from_entry, composite_key};
|
||||
|
||||
impl AgentPool {
|
||||
/// Return the names of configured agents for `stage` that are not currently
|
||||
|
||||
@@ -6,14 +6,15 @@ use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use super::super::runtime::{
|
||||
AgentRuntime, ClaudeCodeRuntime, GeminiRuntime, OpenAiRuntime, RuntimeContext,
|
||||
};
|
||||
use super::super::{
|
||||
AgentEvent, AgentInfo, AgentStatus, PipelineStage, agent_config_stage,
|
||||
pipeline_stage,
|
||||
AgentEvent, AgentInfo, AgentStatus, PipelineStage, agent_config_stage, pipeline_stage,
|
||||
};
|
||||
use super::types::{PendingGuard, StoryAgent, composite_key};
|
||||
use super::{AgentPool, auto_assign};
|
||||
use super::worktree::find_active_story_stage;
|
||||
use super::super::runtime::{AgentRuntime, ClaudeCodeRuntime, GeminiRuntime, OpenAiRuntime, RuntimeContext};
|
||||
use super::{AgentPool, auto_assign};
|
||||
|
||||
impl AgentPool {
|
||||
/// Start an agent for a story: load config, create worktree, spawn agent.
|
||||
@@ -102,7 +103,9 @@ impl AgentPool {
|
||||
// the auto_assign path (bug 379).
|
||||
let front_matter_agent: Option<String> = if agent_name.is_none() {
|
||||
crate::db::read_content(story_id).and_then(|contents| {
|
||||
crate::io::story_metadata::parse_front_matter(&contents).ok()?.agent
|
||||
crate::io::story_metadata::parse_front_matter(&contents)
|
||||
.ok()?
|
||||
.agent
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -446,7 +449,10 @@ impl AgentPool {
|
||||
|
||||
let run_result = match runtime_name {
|
||||
"claude-code" => {
|
||||
let runtime = ClaudeCodeRuntime::new(child_killers_clone.clone(), watcher_tx_clone.clone());
|
||||
let runtime = ClaudeCodeRuntime::new(
|
||||
child_killers_clone.clone(),
|
||||
watcher_tx_clone.clone(),
|
||||
);
|
||||
let ctx = RuntimeContext {
|
||||
story_id: sid.clone(),
|
||||
agent_name: aname.clone(),
|
||||
@@ -514,7 +520,10 @@ impl AgentPool {
|
||||
.find_agent(&aname)
|
||||
.and_then(|a| a.model.clone());
|
||||
let record = crate::agents::token_usage::build_record(
|
||||
&sid, &aname, model, usage.clone(),
|
||||
&sid,
|
||||
&aname,
|
||||
model,
|
||||
usage.clone(),
|
||||
);
|
||||
if let Err(e) = crate::agents::token_usage::append_record(pr, &record) {
|
||||
slog_error!(
|
||||
@@ -568,15 +577,13 @@ impl AgentPool {
|
||||
// re-dispatches a new mergemaster if the story still needs
|
||||
// merging. This avoids an async call to start_agent inside
|
||||
// a tokio::spawn (which would require Send).
|
||||
let _ = watcher_tx_clone.send(
|
||||
crate::io::watcher::WatcherEvent::WorkItem {
|
||||
stage: "4_merge".to_string(),
|
||||
item_id: sid.clone(),
|
||||
action: "reassign".to_string(),
|
||||
commit_msg: String::new(),
|
||||
from_stage: None,
|
||||
},
|
||||
);
|
||||
let _ = watcher_tx_clone.send(crate::io::watcher::WatcherEvent::WorkItem {
|
||||
stage: "4_merge".to_string(),
|
||||
item_id: sid.clone(),
|
||||
action: "reassign".to_string(),
|
||||
commit_msg: String::new(),
|
||||
from_stage: None,
|
||||
});
|
||||
} else {
|
||||
// Server-owned completion: run acceptance gates automatically
|
||||
// when the agent process exits normally.
|
||||
@@ -712,7 +719,9 @@ stage = "coder"
|
||||
pool.inject_test_agent("story-1", "coder-1", AgentStatus::Running);
|
||||
pool.inject_test_agent("story-2", "coder-2", AgentStatus::Pending);
|
||||
|
||||
let result = pool.start_agent(tmp.path(), "story-3", None, None, None).await;
|
||||
let result = pool
|
||||
.start_agent(tmp.path(), "story-3", None, None, None)
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
@@ -744,7 +753,9 @@ stage = "coder"
|
||||
let pool = AgentPool::new_test(3001);
|
||||
pool.inject_test_agent("story-1", "coder-1", AgentStatus::Running);
|
||||
|
||||
let result = pool.start_agent(tmp.path(), "story-3", None, None, None).await;
|
||||
let result = pool
|
||||
.start_agent(tmp.path(), "story-3", None, None, None)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
@@ -782,7 +793,9 @@ stage = "coder"
|
||||
|
||||
let pool = AgentPool::new_test(3001);
|
||||
|
||||
let result = pool.start_agent(tmp.path(), "story-5", None, None, None).await;
|
||||
let result = pool
|
||||
.start_agent(tmp.path(), "story-5", None, None, None)
|
||||
.await;
|
||||
match result {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
@@ -843,7 +856,9 @@ stage = "coder"
|
||||
let pool = AgentPool::new_test(3001);
|
||||
pool.inject_test_agent("story-a", "qa", AgentStatus::Running);
|
||||
|
||||
let result = pool.start_agent(root, "story-b", Some("qa"), None, None).await;
|
||||
let result = pool
|
||||
.start_agent(root, "story-b", Some("qa"), None, None)
|
||||
.await;
|
||||
|
||||
assert!(
|
||||
result.is_err(),
|
||||
@@ -870,7 +885,9 @@ stage = "coder"
|
||||
let pool = AgentPool::new_test(3001);
|
||||
pool.inject_test_agent("story-a", "qa", AgentStatus::Completed);
|
||||
|
||||
let result = pool.start_agent(root, "story-b", Some("qa"), None, None).await;
|
||||
let result = pool
|
||||
.start_agent(root, "story-b", Some("qa"), None, None)
|
||||
.await;
|
||||
|
||||
if let Err(ref e) = result {
|
||||
assert!(
|
||||
@@ -962,7 +979,9 @@ stage = "coder"
|
||||
let pool = AgentPool::new_test(3099);
|
||||
pool.inject_test_agent("story-x", "qa", AgentStatus::Running);
|
||||
|
||||
let result = pool.start_agent(root, "story-y", Some("qa"), None, None).await;
|
||||
let result = pool
|
||||
.start_agent(root, "story-y", Some("qa"), None, None)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
@@ -1247,11 +1266,7 @@ stage = "coder"
|
||||
)
|
||||
.unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"310_story_foo",
|
||||
"2_current",
|
||||
"---\nname: Foo\n---\n",
|
||||
);
|
||||
crate::db::write_item_with_content("310_story_foo", "2_current", "---\nname: Foo\n---\n");
|
||||
|
||||
let pool = AgentPool::new_test(3099);
|
||||
let result = pool
|
||||
@@ -1323,11 +1338,7 @@ stage = "coder"
|
||||
)
|
||||
.unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"55_story_baz",
|
||||
"4_merge",
|
||||
"---\nname: Baz\n---\n",
|
||||
);
|
||||
crate::db::write_item_with_content("55_story_baz", "4_merge", "---\nname: Baz\n---\n");
|
||||
|
||||
let pool = AgentPool::new_test(3099);
|
||||
let result = pool
|
||||
@@ -1459,7 +1470,13 @@ stage = "coder"
|
||||
|
||||
let pool = AgentPool::new_test(3098);
|
||||
let result = pool
|
||||
.start_agent(root, "502_story_split_brain", Some("mergemaster"), None, None)
|
||||
.start_agent(
|
||||
root,
|
||||
"502_story_split_brain",
|
||||
Some("mergemaster"),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Stage check must not reject mergemaster.
|
||||
@@ -1475,11 +1492,15 @@ stage = "coder"
|
||||
// Before the fix, line 53 of start.rs would have demoted it to
|
||||
// 2_current/ via move_story_to_current finding the 1_backlog shadow.
|
||||
assert!(
|
||||
sk_dir.join("work/4_merge/502_story_split_brain.md").exists(),
|
||||
sk_dir
|
||||
.join("work/4_merge/502_story_split_brain.md")
|
||||
.exists(),
|
||||
"story must still be in 4_merge/ after start_agent(mergemaster, ...)"
|
||||
);
|
||||
assert!(
|
||||
!sk_dir.join("work/2_current/502_story_split_brain.md").exists(),
|
||||
!sk_dir
|
||||
.join("work/2_current/502_story_split_brain.md")
|
||||
.exists(),
|
||||
"story must NOT have been demoted to 2_current/ — that's bug 502"
|
||||
);
|
||||
}
|
||||
@@ -1564,11 +1585,7 @@ stage = "coder"
|
||||
)
|
||||
.unwrap();
|
||||
let story_content = "---\nname: Test Story\nagent: coder-opus\n---\n# Story 368\n";
|
||||
std::fs::write(
|
||||
backlog.join("368_story_test.md"),
|
||||
story_content,
|
||||
)
|
||||
.unwrap();
|
||||
std::fs::write(backlog.join("368_story_test.md"), story_content).unwrap();
|
||||
// Also write to the filesystem current dir and content store so that
|
||||
// start_agent reads the correct front matter even when another test has
|
||||
// left a stale entry for "368_story_test" in the global CRDT.
|
||||
@@ -1583,7 +1600,10 @@ stage = "coder"
|
||||
let result = pool
|
||||
.start_agent(tmp.path(), "368_story_test", None, None, None)
|
||||
.await;
|
||||
assert!(result.is_err(), "expected error when preferred agent is busy");
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"expected error when preferred agent is busy"
|
||||
);
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
err.contains("coder-opus"),
|
||||
|
||||
@@ -4,8 +4,8 @@ use crate::slog_error;
|
||||
use std::path::Path;
|
||||
|
||||
use super::super::{AgentEvent, AgentStatus};
|
||||
use super::types::composite_key;
|
||||
use super::AgentPool;
|
||||
use super::types::composite_key;
|
||||
|
||||
impl AgentPool {
|
||||
/// Stop a running agent. Worktree is preserved for inspection.
|
||||
|
||||
@@ -5,8 +5,8 @@ use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use super::super::{AgentEvent, AgentStatus, CompletionReport};
|
||||
use super::types::{StoryAgent, composite_key};
|
||||
use super::AgentPool;
|
||||
use super::types::{StoryAgent, composite_key};
|
||||
|
||||
impl AgentPool {
|
||||
/// Test helper: inject a pre-built agent entry so unit tests can exercise
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
//! Agent wait — blocks until an agent reaches a terminal state with optional timeout.
|
||||
use super::super::{AgentEvent, AgentInfo, AgentStatus};
|
||||
use super::types::{agent_info_from_entry, composite_key};
|
||||
use super::AgentPool;
|
||||
use super::types::{agent_info_from_entry, composite_key};
|
||||
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
|
||||
@@ -23,7 +23,10 @@ impl AgentPool {
|
||||
|
||||
/// Return the active pipeline stage directory name for `story_id`, or `None` if the
|
||||
/// story is not in any active stage (`2_current/`, `3_qa/`, `4_merge/`).
|
||||
pub(super) fn find_active_story_stage(_project_root: &Path, story_id: &str) -> Option<&'static str> {
|
||||
pub(super) fn find_active_story_stage(
|
||||
_project_root: &Path,
|
||||
story_id: &str,
|
||||
) -> Option<&'static str> {
|
||||
if let Ok(Some(item)) = crate::pipeline_state::read_typed(story_id)
|
||||
&& item.stage.is_active()
|
||||
{
|
||||
@@ -39,11 +42,7 @@ mod tests {
|
||||
#[test]
|
||||
fn find_active_story_stage_detects_current() {
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"10_story_test",
|
||||
"2_current",
|
||||
"---\nname: Test\n---\n",
|
||||
);
|
||||
crate::db::write_item_with_content("10_story_test", "2_current", "---\nname: Test\n---\n");
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
assert_eq!(
|
||||
find_active_story_stage(tmp.path(), "10_story_test"),
|
||||
@@ -54,23 +53,18 @@ mod tests {
|
||||
#[test]
|
||||
fn find_active_story_stage_detects_qa() {
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"11_story_test",
|
||||
"3_qa",
|
||||
"---\nname: Test\n---\n",
|
||||
);
|
||||
crate::db::write_item_with_content("11_story_test", "3_qa", "---\nname: Test\n---\n");
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
assert_eq!(find_active_story_stage(tmp.path(), "11_story_test"), Some("3_qa"));
|
||||
assert_eq!(
|
||||
find_active_story_stage(tmp.path(), "11_story_test"),
|
||||
Some("3_qa")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_active_story_stage_detects_merge() {
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"12_story_test",
|
||||
"4_merge",
|
||||
"---\nname: Test\n---\n",
|
||||
);
|
||||
crate::db::write_item_with_content("12_story_test", "4_merge", "---\nname: Test\n---\n");
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
assert_eq!(
|
||||
find_active_story_stage(tmp.path(), "12_story_test"),
|
||||
|
||||
+20
-18
@@ -237,10 +237,23 @@ fn run_agent_pty_blocking(
|
||||
story_id.replace(['_', '.'], "-")
|
||||
);
|
||||
let session_count = std::fs::read_dir(&session_dir)
|
||||
.map(|d| d.filter(|e| e.as_ref().map(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")).unwrap_or(false)).count())
|
||||
.map(|d| {
|
||||
d.filter(|e| {
|
||||
e.as_ref()
|
||||
.map(|e| e.path().extension().is_some_and(|ext| ext == "jsonl"))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.count()
|
||||
})
|
||||
.unwrap_or(0);
|
||||
let session_bytes: u64 = std::fs::read_dir(&session_dir)
|
||||
.map(|d| d.filter_map(|e| e.ok()).filter(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")).filter_map(|e| e.metadata().ok()).map(|m| m.len()).sum())
|
||||
.map(|d| {
|
||||
d.filter_map(|e| e.ok())
|
||||
.filter(|e| e.path().extension().is_some_and(|ext| ext == "jsonl"))
|
||||
.filter_map(|e| e.metadata().ok())
|
||||
.map(|m| m.len())
|
||||
.sum()
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
slog!(
|
||||
@@ -373,12 +386,7 @@ fn run_agent_pty_blocking(
|
||||
"stream_event" => {
|
||||
if let Some(event) = json.get("event") {
|
||||
handle_agent_stream_event(
|
||||
event,
|
||||
story_id,
|
||||
agent_name,
|
||||
tx,
|
||||
event_log,
|
||||
log_writer,
|
||||
event, story_id, agent_name, tx, event_log, log_writer,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -409,8 +417,7 @@ fn run_agent_pty_blocking(
|
||||
t
|
||||
}
|
||||
None => {
|
||||
let default = chrono::Utc::now()
|
||||
+ chrono::Duration::minutes(5);
|
||||
let default = chrono::Utc::now() + chrono::Duration::minutes(5);
|
||||
slog!(
|
||||
"[agent:{story_id}:{agent_name}] API rate limit hard block \
|
||||
(status={status}); no reset_at in rate_limit_info, \
|
||||
@@ -469,14 +476,10 @@ fn run_agent_pty_blocking(
|
||||
let wait_result = child.wait();
|
||||
match &wait_result {
|
||||
Ok(status) => {
|
||||
slog!(
|
||||
"[agent:{story_id}:{agent_name}] Child exited: {status:?}"
|
||||
);
|
||||
slog!("[agent:{story_id}:{agent_name}] Child exited: {status:?}");
|
||||
}
|
||||
Err(e) => {
|
||||
slog!(
|
||||
"[agent:{story_id}:{agent_name}] Child wait error: {e}"
|
||||
);
|
||||
slog!("[agent:{story_id}:{agent_name}] Child wait error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -709,8 +712,7 @@ mod tests {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let root = tmp.path();
|
||||
|
||||
let log_writer =
|
||||
AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-emit").unwrap();
|
||||
let log_writer = AgentLogWriter::new(root, "42_story_foo", "coder-1", "sess-emit").unwrap();
|
||||
let log_mutex = Mutex::new(log_writer);
|
||||
|
||||
let (tx, _rx) = broadcast::channel::<AgentEvent>(64);
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::sync::{Arc, Mutex};
|
||||
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
use serde_json::{Value, json};
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use crate::agent_log::AgentLogWriter;
|
||||
@@ -135,14 +135,15 @@ impl AgentRuntime for GeminiRuntime {
|
||||
});
|
||||
}
|
||||
|
||||
slog!("[gemini] Turn {turn} for {}:{}", ctx.story_id, ctx.agent_name);
|
||||
|
||||
let request_body = build_generate_content_request(
|
||||
&system_instruction,
|
||||
&contents,
|
||||
&gemini_tools,
|
||||
slog!(
|
||||
"[gemini] Turn {turn} for {}:{}",
|
||||
ctx.story_id,
|
||||
ctx.agent_name
|
||||
);
|
||||
|
||||
let request_body =
|
||||
build_generate_content_request(&system_instruction, &contents, &gemini_tools);
|
||||
|
||||
let url = format!(
|
||||
"https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={api_key}"
|
||||
);
|
||||
@@ -201,8 +202,7 @@ impl AgentRuntime for GeminiRuntime {
|
||||
text_parts.push(text.to_string());
|
||||
}
|
||||
if let Some(fc) = part.get("functionCall")
|
||||
&& let (Some(name), Some(args)) =
|
||||
(fc["name"].as_str(), fc.get("args"))
|
||||
&& let (Some(name), Some(args)) = (fc["name"].as_str(), fc.get("args"))
|
||||
{
|
||||
function_calls.push(GeminiFunctionCall {
|
||||
name: name.to_string(),
|
||||
@@ -263,18 +263,14 @@ impl AgentRuntime for GeminiRuntime {
|
||||
text: format!("\n[Tool call: {}]\n", fc.name),
|
||||
});
|
||||
|
||||
let tool_result =
|
||||
call_mcp_tool(&client, &mcp_base, &fc.name, &fc.args).await;
|
||||
let tool_result = call_mcp_tool(&client, &mcp_base, &fc.name, &fc.args).await;
|
||||
|
||||
let response_value = match &tool_result {
|
||||
Ok(result) => {
|
||||
emit(AgentEvent::Output {
|
||||
story_id: ctx.story_id.clone(),
|
||||
agent_name: ctx.agent_name.clone(),
|
||||
text: format!(
|
||||
"[Tool result: {} chars]\n",
|
||||
result.len()
|
||||
),
|
||||
text: format!("[Tool result: {} chars]\n", result.len()),
|
||||
});
|
||||
json!({ "result": result })
|
||||
}
|
||||
@@ -453,7 +449,10 @@ async fn fetch_and_convert_mcp_tools(
|
||||
});
|
||||
}
|
||||
|
||||
slog!("[gemini] Loaded {} MCP tools as function declarations", declarations.len());
|
||||
slog!(
|
||||
"[gemini] Loaded {} MCP tools as function declarations",
|
||||
declarations.len()
|
||||
);
|
||||
Ok(declarations)
|
||||
}
|
||||
|
||||
@@ -560,10 +559,7 @@ async fn call_mcp_tool(
|
||||
// MCP tools/call returns { result: { content: [{ type: "text", text: "..." }] } }
|
||||
let content = &body["result"]["content"];
|
||||
if let Some(arr) = content.as_array() {
|
||||
let texts: Vec<&str> = arr
|
||||
.iter()
|
||||
.filter_map(|c| c["text"].as_str())
|
||||
.collect();
|
||||
let texts: Vec<&str> = arr.iter().filter_map(|c| c["text"].as_str()).collect();
|
||||
if !texts.is_empty() {
|
||||
return Ok(texts.join("\n"));
|
||||
}
|
||||
@@ -747,7 +743,10 @@ mod tests {
|
||||
|
||||
let body = build_generate_content_request(&system, &contents, &tools);
|
||||
assert!(body["tools"][0]["functionDeclarations"].is_array());
|
||||
assert_eq!(body["tools"][0]["functionDeclarations"][0]["name"], "my_tool");
|
||||
assert_eq!(
|
||||
body["tools"][0]["functionDeclarations"][0]["name"],
|
||||
"my_tool"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -151,8 +151,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn claude_code_runtime_get_status_returns_idle() {
|
||||
use std::collections::HashMap;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use std::collections::HashMap;
|
||||
let killers = Arc::new(Mutex::new(HashMap::new()));
|
||||
let (watcher_tx, _) = broadcast::channel::<WatcherEvent>(16);
|
||||
let runtime = ClaudeCodeRuntime::new(killers, watcher_tx);
|
||||
@@ -161,8 +161,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn claude_code_runtime_stream_events_empty() {
|
||||
use std::collections::HashMap;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use std::collections::HashMap;
|
||||
let killers = Arc::new(Mutex::new(HashMap::new()));
|
||||
let (watcher_tx, _) = broadcast::channel::<WatcherEvent>(16);
|
||||
let runtime = ClaudeCodeRuntime::new(killers, watcher_tx);
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use reqwest::Client;
|
||||
use serde_json::{json, Value};
|
||||
use serde_json::{Value, json};
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use crate::agent_log::AgentLogWriter;
|
||||
@@ -471,10 +471,7 @@ async fn call_mcp_tool(
|
||||
// MCP tools/call returns { result: { content: [{ type: "text", text: "..." }] } }
|
||||
let content = &body["result"]["content"];
|
||||
if let Some(arr) = content.as_array() {
|
||||
let texts: Vec<&str> = arr
|
||||
.iter()
|
||||
.filter_map(|c| c["text"].as_str())
|
||||
.collect();
|
||||
let texts: Vec<&str> = arr.iter().filter_map(|c| c["text"].as_str()).collect();
|
||||
if !texts.is_empty() {
|
||||
return Ok(texts.join("\n"));
|
||||
}
|
||||
|
||||
@@ -69,7 +69,10 @@ mod tests {
|
||||
// "timmy ambient on" — bot name mentioned but not @-prefixed, so
|
||||
// is_addressed is false; strip_bot_mention still strips "timmy ".
|
||||
let result = try_handle_command(&dispatch, "timmy ambient on");
|
||||
assert!(result.is_some(), "ambient on should fire even when is_addressed=false");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"ambient on should fire even when is_addressed=false"
|
||||
);
|
||||
assert!(
|
||||
ambient_rooms.lock().unwrap().contains(&room_id),
|
||||
"room should be in ambient_rooms after ambient on"
|
||||
@@ -92,7 +95,10 @@ mod tests {
|
||||
};
|
||||
// Bare "ambient off" in an ambient room (is_addressed=false).
|
||||
let result = try_handle_command(&dispatch, "ambient off");
|
||||
assert!(result.is_some(), "bare ambient off should be handled without LLM");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"bare ambient off should be handled without LLM"
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(
|
||||
output.contains("Ambient mode off"),
|
||||
@@ -161,7 +167,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn ambient_invalid_args_returns_usage() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy ambient");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy ambient",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(
|
||||
output.contains("Usage"),
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
//! Handler for the `backlog` command — shows only Stage::Backlog items.
|
||||
|
||||
use crate::pipeline_state::{PipelineItem, Stage};
|
||||
use super::CommandContext;
|
||||
use super::status::{story_short_label, unmet_deps_from_items};
|
||||
use crate::pipeline_state::{PipelineItem, Stage};
|
||||
|
||||
pub(super) fn handle_backlog(_ctx: &CommandContext) -> Option<String> {
|
||||
Some(build_backlog_output())
|
||||
@@ -94,16 +94,29 @@ mod tests {
|
||||
make_item("30_story_in_qa", "In QA", Stage::Qa),
|
||||
];
|
||||
let output = build_backlog_from_items(&items);
|
||||
assert!(output.contains("In Backlog"), "should show backlog item: {output}");
|
||||
assert!(!output.contains("In Progress"), "should not show coding items: {output}");
|
||||
assert!(!output.contains("In QA"), "should not show QA items: {output}");
|
||||
assert!(
|
||||
output.contains("In Backlog"),
|
||||
"should show backlog item: {output}"
|
||||
);
|
||||
assert!(
|
||||
!output.contains("In Progress"),
|
||||
"should not show coding items: {output}"
|
||||
);
|
||||
assert!(
|
||||
!output.contains("In QA"),
|
||||
"should not show QA items: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
// -- AC: shows number, type, name -----------------------------------------
|
||||
|
||||
#[test]
|
||||
fn backlog_shows_number_type_and_name() {
|
||||
let items = vec![make_item("42_story_my_feature", "My Feature", Stage::Backlog)];
|
||||
let items = vec![make_item(
|
||||
"42_story_my_feature",
|
||||
"My Feature",
|
||||
Stage::Backlog,
|
||||
)];
|
||||
let output = build_backlog_from_items(&items);
|
||||
assert!(
|
||||
output.contains("42 [story] — My Feature"),
|
||||
@@ -116,7 +129,12 @@ mod tests {
|
||||
#[test]
|
||||
fn backlog_shows_waiting_on_for_unmet_deps() {
|
||||
let items = vec![
|
||||
make_item_with_deps("10_story_waiting", "Waiting Story", Stage::Backlog, vec![999]),
|
||||
make_item_with_deps(
|
||||
"10_story_waiting",
|
||||
"Waiting Story",
|
||||
Stage::Backlog,
|
||||
vec![999],
|
||||
),
|
||||
make_item("999_story_dep", "Dep Story", Stage::Backlog),
|
||||
];
|
||||
let output = build_backlog_from_items(&items);
|
||||
@@ -150,16 +168,17 @@ mod tests {
|
||||
fn backlog_no_waiting_on_when_no_deps() {
|
||||
let items = vec![make_item("5_story_nodeps", "No Deps", Stage::Backlog)];
|
||||
let output = build_backlog_from_items(&items);
|
||||
assert!(!output.contains("waiting on"), "no dep suffix when no deps: {output}");
|
||||
assert!(
|
||||
!output.contains("waiting on"),
|
||||
"no dep suffix when no deps: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
// -- AC: command is registered in the registry ----------------------------
|
||||
|
||||
#[test]
|
||||
fn backlog_command_in_registry() {
|
||||
let found = super::super::commands()
|
||||
.iter()
|
||||
.any(|c| c.name == "backlog");
|
||||
let found = super::super::commands().iter().any(|c| c.name == "backlog");
|
||||
assert!(found, "backlog must be registered in commands()");
|
||||
}
|
||||
|
||||
@@ -171,7 +190,10 @@ mod tests {
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap_or_default();
|
||||
assert!(output.contains("backlog"), "backlog should appear in help output: {output}");
|
||||
assert!(
|
||||
output.contains("backlog"),
|
||||
"backlog should appear in help output: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -181,7 +203,10 @@ mod tests {
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy backlog",
|
||||
);
|
||||
assert!(result.is_some(), "backlog command should match and return Some");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"backlog command should match and return Some"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -192,7 +217,10 @@ mod tests {
|
||||
"@timmy backlog",
|
||||
);
|
||||
let output = result.unwrap_or_default();
|
||||
assert!(output.contains("Backlog"), "backlog output should contain Backlog header: {output}");
|
||||
assert!(
|
||||
output.contains("Backlog"),
|
||||
"backlog output should contain Backlog header: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
// -- empty backlog --------------------------------------------------------
|
||||
@@ -201,6 +229,9 @@ mod tests {
|
||||
fn backlog_shows_none_when_empty() {
|
||||
let items = vec![make_item("1_story_done", "Done", Stage::Coding)];
|
||||
let output = build_backlog_from_items(&items);
|
||||
assert!(output.contains("*(none)*"), "should show none when no backlog items: {output}");
|
||||
assert!(
|
||||
output.contains("*(none)*"),
|
||||
"should show none when no backlog items: {output}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::status::story_short_label;
|
||||
use super::CommandContext;
|
||||
use super::status::story_short_label;
|
||||
|
||||
/// Show token spend: 24h total, top 5 stories, agent-type breakdown, and
|
||||
/// all-time total.
|
||||
@@ -102,7 +102,10 @@ mod tests {
|
||||
use crate::agents::AgentPool;
|
||||
use std::sync::Arc;
|
||||
|
||||
fn write_token_records(root: &std::path::Path, records: &[crate::agents::token_usage::TokenUsageRecord]) {
|
||||
fn write_token_records(
|
||||
root: &std::path::Path,
|
||||
records: &[crate::agents::token_usage::TokenUsageRecord],
|
||||
) {
|
||||
for r in records {
|
||||
crate::agents::token_usage::append_record(root, r).unwrap();
|
||||
}
|
||||
@@ -118,7 +121,12 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn make_record(story_id: &str, agent_name: &str, cost: f64, hours_ago: i64) -> crate::agents::token_usage::TokenUsageRecord {
|
||||
fn make_record(
|
||||
story_id: &str,
|
||||
agent_name: &str,
|
||||
cost: f64,
|
||||
hours_ago: i64,
|
||||
) -> crate::agents::token_usage::TokenUsageRecord {
|
||||
let ts = (chrono::Utc::now() - chrono::Duration::hours(hours_ago)).to_rfc3339();
|
||||
crate::agents::token_usage::TokenUsageRecord {
|
||||
story_id: story_id.to_string(),
|
||||
@@ -157,55 +165,89 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cost_command_appears_in_help() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy help");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("cost"), "help should list cost command: {output}");
|
||||
assert!(
|
||||
output.contains("cost"),
|
||||
"help should list cost command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_no_records() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
assert!(output.contains("No usage records found"), "should show empty message: {output}");
|
||||
assert!(
|
||||
output.contains("No usage records found"),
|
||||
"should show empty message: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_shows_24h_total() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_token_records(tmp.path(), &[
|
||||
make_record("42_story_foo", "coder-1", 1.50, 2),
|
||||
make_record("42_story_foo", "coder-1", 0.50, 5),
|
||||
]);
|
||||
write_token_records(
|
||||
tmp.path(),
|
||||
&[
|
||||
make_record("42_story_foo", "coder-1", 1.50, 2),
|
||||
make_record("42_story_foo", "coder-1", 0.50, 5),
|
||||
],
|
||||
);
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
assert!(output.contains("**Last 24h:** $2.00"), "should show 24h total: {output}");
|
||||
assert!(
|
||||
output.contains("**Last 24h:** $2.00"),
|
||||
"should show 24h total: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_excludes_old_from_24h() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_token_records(tmp.path(), &[
|
||||
make_record("42_story_foo", "coder-1", 1.00, 2), // within 24h
|
||||
make_record("43_story_bar", "coder-1", 5.00, 48), // older
|
||||
]);
|
||||
write_token_records(
|
||||
tmp.path(),
|
||||
&[
|
||||
make_record("42_story_foo", "coder-1", 1.00, 2), // within 24h
|
||||
make_record("43_story_bar", "coder-1", 5.00, 48), // older
|
||||
],
|
||||
);
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
assert!(output.contains("**Last 24h:** $1.00"), "should only count recent: {output}");
|
||||
assert!(output.contains("**All-time:** $6.00"), "all-time should include everything: {output}");
|
||||
assert!(
|
||||
output.contains("**Last 24h:** $1.00"),
|
||||
"should only count recent: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("**All-time:** $6.00"),
|
||||
"all-time should include everything: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_shows_top_stories() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_token_records(tmp.path(), &[
|
||||
make_record("42_story_foo", "coder-1", 3.00, 1),
|
||||
make_record("43_story_bar", "coder-1", 1.00, 1),
|
||||
make_record("42_story_foo", "qa-1", 2.00, 1),
|
||||
]);
|
||||
write_token_records(
|
||||
tmp.path(),
|
||||
&[
|
||||
make_record("42_story_foo", "coder-1", 3.00, 1),
|
||||
make_record("43_story_bar", "coder-1", 1.00, 1),
|
||||
make_record("42_story_foo", "qa-1", 2.00, 1),
|
||||
],
|
||||
);
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
assert!(output.contains("Top Stories"), "should have top stories section: {output}");
|
||||
assert!(
|
||||
output.contains("Top Stories"),
|
||||
"should have top stories section: {output}"
|
||||
);
|
||||
// Story 42 ($5.00) should appear before story 43 ($1.00)
|
||||
let pos_42 = output.find("42").unwrap();
|
||||
let pos_43 = output.find("43").unwrap();
|
||||
assert!(pos_42 < pos_43, "story 42 should appear before 43 (sorted by cost): {output}");
|
||||
assert!(
|
||||
pos_42 < pos_43,
|
||||
"story 42 should appear before 43 (sorted by cost): {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -213,45 +255,75 @@ mod tests {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let mut records = Vec::new();
|
||||
for i in 1..=7 {
|
||||
records.push(make_record(&format!("{i}_story_s{i}"), "coder-1", i as f64, 1));
|
||||
records.push(make_record(
|
||||
&format!("{i}_story_s{i}"),
|
||||
"coder-1",
|
||||
i as f64,
|
||||
1,
|
||||
));
|
||||
}
|
||||
write_token_records(tmp.path(), &records);
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
// The top 5 most expensive are stories 7,6,5,4,3. Stories 1 and 2 should be excluded.
|
||||
let top_section = output.split("**By Agent Type").next().unwrap();
|
||||
assert!(!top_section.contains("• 1 —"), "story 1 should not be in top 5: {output}");
|
||||
assert!(!top_section.contains("• 2 —"), "story 2 should not be in top 5: {output}");
|
||||
assert!(
|
||||
!top_section.contains("• 1 —"),
|
||||
"story 1 should not be in top 5: {output}"
|
||||
);
|
||||
assert!(
|
||||
!top_section.contains("• 2 —"),
|
||||
"story 2 should not be in top 5: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_shows_agent_type_breakdown() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_token_records(tmp.path(), &[
|
||||
make_record("42_story_foo", "coder-1", 2.00, 1),
|
||||
make_record("42_story_foo", "qa-1", 1.50, 1),
|
||||
make_record("42_story_foo", "mergemaster", 0.50, 1),
|
||||
]);
|
||||
write_token_records(
|
||||
tmp.path(),
|
||||
&[
|
||||
make_record("42_story_foo", "coder-1", 2.00, 1),
|
||||
make_record("42_story_foo", "qa-1", 1.50, 1),
|
||||
make_record("42_story_foo", "mergemaster", 0.50, 1),
|
||||
],
|
||||
);
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
assert!(output.contains("By Agent Type"), "should have agent type section: {output}");
|
||||
assert!(
|
||||
output.contains("By Agent Type"),
|
||||
"should have agent type section: {output}"
|
||||
);
|
||||
assert!(output.contains("coder"), "should show coder type: {output}");
|
||||
assert!(output.contains("qa"), "should show qa type: {output}");
|
||||
assert!(output.contains("mergemaster"), "should show mergemaster type: {output}");
|
||||
assert!(
|
||||
output.contains("mergemaster"),
|
||||
"should show mergemaster type: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_shows_all_time_total() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_token_records(tmp.path(), &[
|
||||
make_record("42_story_foo", "coder-1", 1.00, 2),
|
||||
make_record("43_story_bar", "coder-1", 9.00, 100),
|
||||
]);
|
||||
write_token_records(
|
||||
tmp.path(),
|
||||
&[
|
||||
make_record("42_story_foo", "coder-1", 1.00, 2),
|
||||
make_record("43_story_bar", "coder-1", 9.00, 100),
|
||||
],
|
||||
);
|
||||
let output = cost_cmd_with_root(tmp.path()).unwrap();
|
||||
assert!(output.contains("**All-time:** $10.00"), "should show all-time total: {output}");
|
||||
assert!(
|
||||
output.contains("**All-time:** $10.00"),
|
||||
"should show all-time total: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cost_command_case_insensitive() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy COST");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy COST",
|
||||
);
|
||||
assert!(result.is_some(), "COST should match case-insensitively");
|
||||
}
|
||||
|
||||
|
||||
@@ -59,12 +59,18 @@ fn read_cached_coverage(project_root: &std::path::Path) -> String {
|
||||
fn read_coverage_report(path: &std::path::Path) -> String {
|
||||
let content = match std::fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return format!("**Coverage (cached)**\n\nError reading `.coverage_report.json`: {e}"),
|
||||
Err(e) => {
|
||||
return format!("**Coverage (cached)**\n\nError reading `.coverage_report.json`: {e}");
|
||||
}
|
||||
};
|
||||
|
||||
let report: CoverageReport = match serde_json::from_str(&content) {
|
||||
Ok(r) => r,
|
||||
Err(e) => return format!("**Coverage (cached)**\n\nFailed to parse `.coverage_report.json`: {e}"),
|
||||
Err(e) => {
|
||||
return format!(
|
||||
"**Coverage (cached)**\n\nFailed to parse `.coverage_report.json`: {e}"
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
format_coverage_report(&report)
|
||||
@@ -81,13 +87,22 @@ fn format_coverage_report(report: &CoverageReport) -> String {
|
||||
// Top 5 lowest-covered files (already sorted ascending in the JSON, but sort
|
||||
// defensively here so the display is correct even if the file was hand-edited).
|
||||
let mut sorted: Vec<&FileCoverage> = report.files.iter().collect();
|
||||
sorted.sort_by(|a, b| a.coverage.partial_cmp(&b.coverage).unwrap_or(std::cmp::Ordering::Equal));
|
||||
sorted.sort_by(|a, b| {
|
||||
a.coverage
|
||||
.partial_cmp(&b.coverage)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
let targets: Vec<&FileCoverage> = sorted.into_iter().take(5).collect();
|
||||
if !targets.is_empty() {
|
||||
out.push_str("\n**Top 5 files needing coverage:**\n");
|
||||
for (i, file) in targets.iter().enumerate() {
|
||||
out.push_str(&format!("{}. {} — {:.1}%\n", i + 1, file.path, file.coverage));
|
||||
out.push_str(&format!(
|
||||
"{}. {} — {:.1}%\n",
|
||||
i + 1,
|
||||
file.path,
|
||||
file.coverage
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,8 +177,13 @@ fn run_coverage(project_root: &std::path::Path) -> String {
|
||||
// Replace the "cached" label with "fresh".
|
||||
result = result.replacen("Coverage (cached)", "Coverage (fresh)", 1);
|
||||
// Replace the cached hint with a pass/fail indicator.
|
||||
let pass_indicator = if out.status.success() { "PASS" } else { "FAIL: coverage below threshold" };
|
||||
result = result.replacen("*Run `coverage run` for fresh results.*", pass_indicator, 1);
|
||||
let pass_indicator = if out.status.success() {
|
||||
"PASS"
|
||||
} else {
|
||||
"FAIL: coverage below threshold"
|
||||
};
|
||||
result =
|
||||
result.replacen("*Run `coverage run` for fresh results.*", pass_indicator, 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -322,9 +342,18 @@ mod tests {
|
||||
let output = handle_coverage(&ctx).unwrap();
|
||||
|
||||
assert!(output.contains("72.5"), "should include overall: {output}");
|
||||
assert!(output.contains("60.0"), "should include threshold: {output}");
|
||||
assert!(output.contains("15.0"), "should include lowest-covered file pct: {output}");
|
||||
assert!(output.contains("server/src/low.rs"), "should include lowest-covered file path: {output}");
|
||||
assert!(
|
||||
output.contains("60.0"),
|
||||
"should include threshold: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("15.0"),
|
||||
"should include lowest-covered file pct: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("server/src/low.rs"),
|
||||
"should include lowest-covered file path: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -348,9 +377,18 @@ mod tests {
|
||||
let output = handle_coverage(&ctx).unwrap();
|
||||
|
||||
assert!(output.contains("a.rs"), "should show lowest file: {output}");
|
||||
assert!(output.contains("e.rs"), "should show 5th lowest file: {output}");
|
||||
assert!(!output.contains("f.rs"), "should not show 6th file: {output}");
|
||||
assert!(!output.contains("g.rs"), "should not show 7th file: {output}");
|
||||
assert!(
|
||||
output.contains("e.rs"),
|
||||
"should show 5th lowest file: {output}"
|
||||
);
|
||||
assert!(
|
||||
!output.contains("f.rs"),
|
||||
"should not show 6th file: {output}"
|
||||
);
|
||||
assert!(
|
||||
!output.contains("g.rs"),
|
||||
"should not show 7th file: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -466,15 +504,24 @@ mod tests {
|
||||
overall: 66.25,
|
||||
threshold: 60.0,
|
||||
files: vec![
|
||||
FileCoverage { path: "a.rs".to_string(), coverage: 10.0 },
|
||||
FileCoverage { path: "b.rs".to_string(), coverage: 80.0 },
|
||||
FileCoverage {
|
||||
path: "a.rs".to_string(),
|
||||
coverage: 10.0,
|
||||
},
|
||||
FileCoverage {
|
||||
path: "b.rs".to_string(),
|
||||
coverage: 80.0,
|
||||
},
|
||||
],
|
||||
};
|
||||
let result = format_coverage_report(&report);
|
||||
assert!(result.contains("66.2"), "should show overall: {result}");
|
||||
assert!(result.contains("60.0"), "should show threshold: {result}");
|
||||
assert!(result.contains("a.rs"), "should show lowest file: {result}");
|
||||
assert!(result.contains("10.0"), "should show lowest file pct: {result}");
|
||||
assert!(
|
||||
result.contains("10.0"),
|
||||
"should show lowest file pct: {result}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -490,9 +537,18 @@ Frontend line coverage: 70.0%\n\
|
||||
PASS: Coverage 66.25% meets threshold 60.00%\n\
|
||||
";
|
||||
let result = parse_coverage_output(sample, true);
|
||||
assert!(result.contains("62.5"), "should include Rust coverage: {result}");
|
||||
assert!(result.contains("70.0"), "should include Frontend coverage: {result}");
|
||||
assert!(result.contains("66.25"), "should include Overall coverage: {result}");
|
||||
assert!(
|
||||
result.contains("62.5"),
|
||||
"should include Rust coverage: {result}"
|
||||
);
|
||||
assert!(
|
||||
result.contains("70.0"),
|
||||
"should include Frontend coverage: {result}"
|
||||
);
|
||||
assert!(
|
||||
result.contains("66.25"),
|
||||
"should include Overall coverage: {result}"
|
||||
);
|
||||
assert!(result.contains("PASS"), "should indicate PASS: {result}");
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
//! Passing no dependency numbers clears the field entirely.
|
||||
|
||||
use super::CommandContext;
|
||||
use crate::io::story_metadata::{parse_front_matter, write_depends_on};
|
||||
use crate::io::story_metadata::{
|
||||
parse_front_matter, write_depends_on, write_depends_on_in_content,
|
||||
};
|
||||
|
||||
/// Handle the `depends` command.
|
||||
///
|
||||
@@ -51,7 +53,7 @@ pub(super) fn handle_depends(ctx: &CommandContext) -> Option<String> {
|
||||
}
|
||||
|
||||
// Find the story by numeric prefix: CRDT → content store → filesystem.
|
||||
let (story_id, _stage_dir, path, content) =
|
||||
let (story_id, stage_dir, path, content) =
|
||||
match crate::chat::lookup::find_story_by_number(ctx.project_root, num_str) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
@@ -62,23 +64,48 @@ pub(super) fn handle_depends(ctx: &CommandContext) -> Option<String> {
|
||||
};
|
||||
|
||||
let story_name = content
|
||||
.or_else(|| std::fs::read_to_string(&path).ok())
|
||||
.and_then(|c| parse_front_matter(&c).ok())
|
||||
.as_deref()
|
||||
.and_then(|c| parse_front_matter(c).ok())
|
||||
.and_then(|m| m.name)
|
||||
.unwrap_or_else(|| story_id.clone());
|
||||
|
||||
match write_depends_on(&path, &deps) {
|
||||
Ok(()) if deps.is_empty() => Some(format!(
|
||||
"Cleared all dependencies for **{story_name}** ({story_id})."
|
||||
)),
|
||||
Ok(()) => {
|
||||
// Prefer the CRDT content store; fall back to filesystem only when the
|
||||
// story has not been loaded into the DB (e.g. very early startup or tests
|
||||
// that haven't called write_item_with_content).
|
||||
if let Some(existing) = crate::db::read_content(&story_id) {
|
||||
let updated = write_depends_on_in_content(&existing, &deps);
|
||||
crate::db::write_content(&story_id, &updated);
|
||||
let stage = crate::pipeline_state::read_typed(&story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|i| i.stage.dir_name().to_string())
|
||||
.unwrap_or_else(|| stage_dir.clone());
|
||||
crate::db::write_item_with_content(&story_id, &stage, &updated);
|
||||
if deps.is_empty() {
|
||||
Some(format!(
|
||||
"Cleared all dependencies for **{story_name}** ({story_id})."
|
||||
))
|
||||
} else {
|
||||
let nums: Vec<String> = deps.iter().map(|n| n.to_string()).collect();
|
||||
Some(format!(
|
||||
"Set depends_on: [{}] for **{story_name}** ({story_id}).",
|
||||
nums.join(", ")
|
||||
))
|
||||
}
|
||||
Err(e) => Some(format!("Failed to update dependencies for {story_id}: {e}")),
|
||||
} else {
|
||||
match write_depends_on(&path, &deps) {
|
||||
Ok(()) if deps.is_empty() => Some(format!(
|
||||
"Cleared all dependencies for **{story_name}** ({story_id})."
|
||||
)),
|
||||
Ok(()) => {
|
||||
let nums: Vec<String> = deps.iter().map(|n| n.to_string()).collect();
|
||||
Some(format!(
|
||||
"Set depends_on: [{}] for **{story_name}** ({story_id}).",
|
||||
nums.join(", ")
|
||||
))
|
||||
}
|
||||
Err(e) => Some(format!("Failed to update dependencies for {story_id}: {e}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,14 +155,20 @@ mod tests {
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("depends"), "help should list depends command: {output}");
|
||||
assert!(
|
||||
output.contains("depends"),
|
||||
"help should list depends command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn depends_no_args_returns_usage() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = depends_cmd_with_root(tmp.path(), "").unwrap();
|
||||
assert!(output.contains("Usage"), "no args should show usage: {output}");
|
||||
assert!(
|
||||
output.contains("Usage"),
|
||||
"no args should show usage: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -164,10 +197,10 @@ mod tests {
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"1_backlog",
|
||||
"42_story_foo.md",
|
||||
"9912_story_foo.md",
|
||||
"---\nname: Foo\n---\n",
|
||||
);
|
||||
let output = depends_cmd_with_root(tmp.path(), "42 abc").unwrap();
|
||||
let output = depends_cmd_with_root(tmp.path(), "9912 abc").unwrap();
|
||||
assert!(
|
||||
output.contains("Invalid dependency number"),
|
||||
"non-numeric dep should error: {output}"
|
||||
@@ -175,26 +208,24 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn depends_sets_deps_and_writes_to_file() {
|
||||
fn depends_sets_deps_and_writes_to_content_store() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"1_backlog",
|
||||
"42_story_foo.md",
|
||||
"9910_story_foo.md",
|
||||
"---\nname: Foo\n---\n",
|
||||
);
|
||||
let output = depends_cmd_with_root(tmp.path(), "42 477 478").unwrap();
|
||||
let output = depends_cmd_with_root(tmp.path(), "9910 477 478").unwrap();
|
||||
assert!(
|
||||
output.contains("477") && output.contains("478"),
|
||||
"response should mention dep numbers: {output}"
|
||||
);
|
||||
let contents = std::fs::read_to_string(
|
||||
tmp.path().join(".huskies/work/1_backlog/42_story_foo.md"),
|
||||
)
|
||||
.unwrap();
|
||||
let contents = crate::db::read_content("9910_story_foo")
|
||||
.expect("content store should have updated story");
|
||||
assert!(
|
||||
contents.contains("depends_on: [477, 478]"),
|
||||
"file should have depends_on set: {contents}"
|
||||
"content store should have depends_on set: {contents}"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -204,21 +235,19 @@ mod tests {
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"10_story_bar.md",
|
||||
"9911_story_bar.md",
|
||||
"---\nname: Bar\ndepends_on: [477]\n---\n",
|
||||
);
|
||||
let output = depends_cmd_with_root(tmp.path(), "10").unwrap();
|
||||
let output = depends_cmd_with_root(tmp.path(), "9911").unwrap();
|
||||
assert!(
|
||||
output.contains("Cleared"),
|
||||
"should confirm clearing deps: {output}"
|
||||
);
|
||||
let contents = std::fs::read_to_string(
|
||||
tmp.path().join(".huskies/work/2_current/10_story_bar.md"),
|
||||
)
|
||||
.unwrap();
|
||||
let contents = crate::db::read_content("9911_story_bar")
|
||||
.expect("content store should have updated story");
|
||||
assert!(
|
||||
!contents.contains("depends_on"),
|
||||
"file should have depends_on cleared: {contents}"
|
||||
"content store should have depends_on cleared: {contents}"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -228,12 +257,12 @@ mod tests {
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"3_qa",
|
||||
"55_story_inqa.md",
|
||||
"9913_story_inqa.md",
|
||||
"---\nname: In QA\n---\n",
|
||||
);
|
||||
let output = depends_cmd_with_root(tmp.path(), "55 100").unwrap();
|
||||
let output = depends_cmd_with_root(tmp.path(), "9913 100").unwrap();
|
||||
assert!(
|
||||
output.contains("In QA") || output.contains("55_story_inqa"),
|
||||
output.contains("In QA") || output.contains("9913_story_inqa"),
|
||||
"should find story in qa stage: {output}"
|
||||
);
|
||||
assert!(output.contains("100"), "should mention dep 100: {output}");
|
||||
|
||||
@@ -0,0 +1,259 @@
|
||||
//! Handler for the `diff` command.
|
||||
//!
|
||||
//! Shows the git diff from the configured main branch to the story's worktree
|
||||
//! HEAD, formatted for readability in chat.
|
||||
|
||||
use super::CommandContext;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
/// Display the git diff from the configured main branch to a story's worktree HEAD.
|
||||
///
|
||||
/// Usage: `diff <number>`
|
||||
pub(super) fn handle_diff(ctx: &CommandContext) -> Option<String> {
|
||||
let num_str = ctx.args.trim();
|
||||
if num_str.is_empty() {
|
||||
return Some(format!(
|
||||
"Usage: `{} diff <number>`\n\nShows the git diff from the main branch to the story's worktree HEAD.",
|
||||
ctx.bot_name
|
||||
));
|
||||
}
|
||||
if !num_str.chars().all(|c| c.is_ascii_digit()) {
|
||||
return Some(format!(
|
||||
"Invalid story number: `{num_str}`. Usage: `{} diff <number>`",
|
||||
ctx.bot_name
|
||||
));
|
||||
}
|
||||
|
||||
let story_id = match find_story_id(num_str) {
|
||||
Some(id) => id,
|
||||
None => {
|
||||
return Some(format!(
|
||||
"No story with number **{num_str}** found in the pipeline."
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let wt_path = crate::worktree::worktree_path(ctx.project_root, &story_id);
|
||||
if !wt_path.is_dir() {
|
||||
return Some(format!(
|
||||
"Story **{num_str}** has no worktree. The diff is only available once a coder has started working on it."
|
||||
));
|
||||
}
|
||||
|
||||
let base_branch = resolve_base_branch(ctx.project_root);
|
||||
let range = format!("{base_branch}...HEAD");
|
||||
|
||||
let stat = run_git(&wt_path, &["diff", "--stat", &range]);
|
||||
let diff = run_git(&wt_path, &["diff", &range]);
|
||||
|
||||
let mut out = format!("## Diff — story {num_str} vs `{base_branch}`\n\n");
|
||||
|
||||
if stat.is_empty() && diff.is_empty() {
|
||||
out.push_str("*(no changes relative to main branch)*\n");
|
||||
return Some(out);
|
||||
}
|
||||
|
||||
if !stat.is_empty() {
|
||||
out.push_str("**Changed files:**\n```\n");
|
||||
out.push_str(&stat);
|
||||
out.push_str("\n```\n\n");
|
||||
}
|
||||
|
||||
if !diff.is_empty() {
|
||||
const MAX_DIFF_BYTES: usize = 8_000;
|
||||
if diff.len() > MAX_DIFF_BYTES {
|
||||
let truncated = truncate_at_char_boundary(&diff, MAX_DIFF_BYTES);
|
||||
out.push_str("**Diff** *(truncated — showing first 8 KB)*:\n```diff\n");
|
||||
out.push_str(truncated);
|
||||
out.push_str("\n... (truncated)\n```\n");
|
||||
} else {
|
||||
out.push_str("**Diff:**\n```diff\n");
|
||||
out.push_str(&diff);
|
||||
out.push_str("\n```\n");
|
||||
}
|
||||
}
|
||||
|
||||
Some(out)
|
||||
}
|
||||
|
||||
/// Find the story_id in the pipeline whose numeric prefix matches `num_str`.
|
||||
fn find_story_id(num_str: &str) -> Option<String> {
|
||||
let items = crate::pipeline_state::read_all_typed();
|
||||
items.into_iter().find_map(|item| {
|
||||
let file_num = item
|
||||
.story_id
|
||||
.0
|
||||
.split('_')
|
||||
.next()
|
||||
.filter(|s| !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()))
|
||||
.unwrap_or("");
|
||||
if file_num == num_str {
|
||||
Some(item.story_id.0.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the configured base branch, or auto-detect it from the project root HEAD.
|
||||
fn resolve_base_branch(project_root: &Path) -> String {
|
||||
crate::config::ProjectConfig::load(project_root)
|
||||
.ok()
|
||||
.and_then(|c| c.base_branch)
|
||||
.unwrap_or_else(|| {
|
||||
Command::new("git")
|
||||
.args(["rev-parse", "--abbrev-ref", "HEAD"])
|
||||
.current_dir(project_root)
|
||||
.output()
|
||||
.ok()
|
||||
.filter(|o| o.status.success())
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
|
||||
.unwrap_or_else(|| "master".to_string())
|
||||
})
|
||||
}
|
||||
|
||||
/// Run a git command in `dir`, returning trimmed stdout (empty string on failure).
|
||||
fn run_git(dir: &Path, args: &[&str]) -> String {
|
||||
Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.ok()
|
||||
.filter(|o| o.status.success())
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Truncate `s` to at most `max_bytes` bytes without splitting a UTF-8 character.
|
||||
fn truncate_at_char_boundary(s: &str, max_bytes: usize) -> &str {
|
||||
if s.len() <= max_bytes {
|
||||
return s;
|
||||
}
|
||||
let mut boundary = max_bytes;
|
||||
while !s.is_char_boundary(boundary) {
|
||||
boundary -= 1;
|
||||
}
|
||||
&s[..boundary]
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::agents::AgentPool;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use super::super::{CommandDispatch, try_handle_command};
|
||||
|
||||
fn diff_cmd(root: &std::path::Path, args: &str) -> Option<String> {
|
||||
let agents = Arc::new(AgentPool::new_test(3000));
|
||||
let ambient_rooms = Arc::new(Mutex::new(HashSet::new()));
|
||||
let room_id = "!test:example.com".to_string();
|
||||
let dispatch = CommandDispatch {
|
||||
bot_name: "Timmy",
|
||||
bot_user_id: "@timmy:homeserver.local",
|
||||
project_root: root,
|
||||
agents: &agents,
|
||||
ambient_rooms: &ambient_rooms,
|
||||
room_id: &room_id,
|
||||
};
|
||||
try_handle_command(&dispatch, &format!("@timmy diff {args}"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_command_is_registered() {
|
||||
let found = super::super::commands().iter().any(|c| c.name == "diff");
|
||||
assert!(found, "diff command must be in the registry");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_command_appears_in_help() {
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(
|
||||
output.contains("diff"),
|
||||
"help should list diff command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_command_no_args_returns_usage() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = diff_cmd(tmp.path(), "").unwrap();
|
||||
assert!(
|
||||
output.contains("Usage"),
|
||||
"no args should show usage: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_command_non_numeric_returns_error() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = diff_cmd(tmp.path(), "abc").unwrap();
|
||||
assert!(
|
||||
output.contains("Invalid"),
|
||||
"non-numeric arg should return error: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_command_story_not_found_returns_friendly_message() {
|
||||
crate::db::ensure_content_store();
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = diff_cmd(tmp.path(), "99993").unwrap();
|
||||
assert!(
|
||||
output.contains("99993"),
|
||||
"message should include story number: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("found") || output.contains("pipeline"),
|
||||
"message should explain not found: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_command_no_worktree_returns_clear_error() {
|
||||
use crate::chat::test_helpers::write_story_file;
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"55551_story_no_worktree.md",
|
||||
"---\nname: No Worktree\n---\n",
|
||||
);
|
||||
let output = diff_cmd(tmp.path(), "55551").unwrap();
|
||||
assert!(
|
||||
output.contains("worktree")
|
||||
|| output.contains("no worktree")
|
||||
|| output.contains("Worktree"),
|
||||
"should report missing worktree: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncate_at_char_boundary_short_string() {
|
||||
let s = "hello";
|
||||
assert_eq!(truncate_at_char_boundary(s, 100), "hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncate_at_char_boundary_exact_limit() {
|
||||
let s = "hello";
|
||||
assert_eq!(truncate_at_char_boundary(s, 5), "hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncate_at_char_boundary_over_limit() {
|
||||
let s = "hello world";
|
||||
assert_eq!(truncate_at_char_boundary(s, 5), "hello");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,300 @@
|
||||
//! Handler for the `freeze` and `unfreeze` commands.
|
||||
//!
|
||||
//! `freeze <number>` sets `frozen: true` on the story, halting pipeline
|
||||
//! advancement and auto-assign until `unfreeze <number>` clears the flag.
|
||||
|
||||
use super::CommandContext;
|
||||
use crate::io::story_metadata::{
|
||||
clear_front_matter_field_in_content, parse_front_matter, set_front_matter_field,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
/// Handle the `freeze` command.
|
||||
///
|
||||
/// Parses `<number>` from `ctx.args`, locates the work item, and sets
|
||||
/// `frozen: true` in its front matter.
|
||||
pub(super) fn handle_freeze(ctx: &CommandContext) -> Option<String> {
|
||||
let num_str = ctx.args.trim();
|
||||
if num_str.is_empty() || !num_str.chars().all(|c| c.is_ascii_digit()) {
|
||||
return Some(format!(
|
||||
"Usage: `{} freeze <number>` (e.g. `freeze 42`)",
|
||||
ctx.bot_name
|
||||
));
|
||||
}
|
||||
Some(freeze_by_number(ctx.project_root, num_str))
|
||||
}
|
||||
|
||||
/// Core freeze logic: find story by numeric prefix and set `frozen: true`.
|
||||
///
|
||||
/// Returns a Markdown-formatted response string suitable for all transports.
|
||||
pub(crate) fn freeze_by_number(project_root: &Path, story_number: &str) -> String {
|
||||
let (story_id, _, _, _) =
|
||||
match crate::chat::lookup::find_story_by_number(project_root, story_number) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
return format!("No story, bug, or spike with number **{story_number}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
freeze_by_story_id(&story_id)
|
||||
}
|
||||
|
||||
fn freeze_by_story_id(story_id: &str) -> String {
|
||||
let contents = match crate::db::read_content(story_id) {
|
||||
Some(c) => c,
|
||||
None => return format!("Failed to read story content for **{story_id}**"),
|
||||
};
|
||||
|
||||
let meta = match parse_front_matter(&contents) {
|
||||
Ok(m) => m,
|
||||
Err(e) => return format!("Failed to parse front matter for **{story_id}**: {e}"),
|
||||
};
|
||||
|
||||
let story_name = meta.name.as_deref().unwrap_or(story_id).to_string();
|
||||
|
||||
if meta.frozen == Some(true) {
|
||||
return format!("**{story_name}** ({story_id}) is already frozen.");
|
||||
}
|
||||
|
||||
let updated = set_front_matter_field(&contents, "frozen", "true");
|
||||
|
||||
crate::db::write_content(story_id, &updated);
|
||||
let stage = crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|i| i.stage.dir_name().to_string())
|
||||
.unwrap_or_else(|| "2_current".to_string());
|
||||
crate::db::write_item_with_content(story_id, &stage, &updated);
|
||||
|
||||
format!(
|
||||
"Frozen **{story_name}** ({story_id}). Pipeline advancement and auto-assign suppressed until unfrozen."
|
||||
)
|
||||
}
|
||||
|
||||
/// Handle the `unfreeze` command.
|
||||
///
|
||||
/// Parses `<number>` from `ctx.args`, locates the work item, and clears the
|
||||
/// `frozen` flag to resume normal pipeline behaviour.
|
||||
pub(super) fn handle_unfreeze(ctx: &CommandContext) -> Option<String> {
|
||||
let num_str = ctx.args.trim();
|
||||
if num_str.is_empty() || !num_str.chars().all(|c| c.is_ascii_digit()) {
|
||||
return Some(format!(
|
||||
"Usage: `{} unfreeze <number>` (e.g. `unfreeze 42`)",
|
||||
ctx.bot_name
|
||||
));
|
||||
}
|
||||
Some(unfreeze_by_number(ctx.project_root, num_str))
|
||||
}
|
||||
|
||||
/// Core unfreeze logic: find story by numeric prefix and clear `frozen` flag.
|
||||
pub(crate) fn unfreeze_by_number(project_root: &Path, story_number: &str) -> String {
|
||||
let (story_id, _, _, _) =
|
||||
match crate::chat::lookup::find_story_by_number(project_root, story_number) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
return format!("No story, bug, or spike with number **{story_number}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
unfreeze_by_story_id(&story_id)
|
||||
}
|
||||
|
||||
fn unfreeze_by_story_id(story_id: &str) -> String {
|
||||
let contents = match crate::db::read_content(story_id) {
|
||||
Some(c) => c,
|
||||
None => return format!("Failed to read story content for **{story_id}**"),
|
||||
};
|
||||
|
||||
let meta = match parse_front_matter(&contents) {
|
||||
Ok(m) => m,
|
||||
Err(e) => return format!("Failed to parse front matter for **{story_id}**: {e}"),
|
||||
};
|
||||
|
||||
let story_name = meta.name.as_deref().unwrap_or(story_id).to_string();
|
||||
|
||||
if meta.frozen != Some(true) {
|
||||
return format!("**{story_name}** ({story_id}) is not frozen. Nothing to unfreeze.");
|
||||
}
|
||||
|
||||
let updated = clear_front_matter_field_in_content(&contents, "frozen");
|
||||
|
||||
crate::db::write_content(story_id, &updated);
|
||||
let stage = crate::pipeline_state::read_typed(story_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|i| i.stage.dir_name().to_string())
|
||||
.unwrap_or_else(|| "2_current".to_string());
|
||||
crate::db::write_item_with_content(story_id, &stage, &updated);
|
||||
|
||||
format!("Unfrozen **{story_name}** ({story_id}). Normal pipeline behaviour resumed.")
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::agents::AgentPool;
|
||||
use crate::chat::test_helpers::write_story_file;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use super::super::{CommandDispatch, try_handle_command};
|
||||
|
||||
fn freeze_cmd_with_root(root: &std::path::Path, args: &str) -> Option<String> {
|
||||
let agents = Arc::new(AgentPool::new_test(3000));
|
||||
let ambient_rooms = Arc::new(Mutex::new(HashSet::new()));
|
||||
let room_id = "!test:example.com".to_string();
|
||||
let dispatch = CommandDispatch {
|
||||
bot_name: "Timmy",
|
||||
bot_user_id: "@timmy:homeserver.local",
|
||||
project_root: root,
|
||||
agents: &agents,
|
||||
ambient_rooms: &ambient_rooms,
|
||||
room_id: &room_id,
|
||||
};
|
||||
try_handle_command(&dispatch, &format!("@timmy freeze {args}"))
|
||||
}
|
||||
|
||||
fn unfreeze_cmd_with_root(root: &std::path::Path, args: &str) -> Option<String> {
|
||||
let agents = Arc::new(AgentPool::new_test(3000));
|
||||
let ambient_rooms = Arc::new(Mutex::new(HashSet::new()));
|
||||
let room_id = "!test:example.com".to_string();
|
||||
let dispatch = CommandDispatch {
|
||||
bot_name: "Timmy",
|
||||
bot_user_id: "@timmy:homeserver.local",
|
||||
project_root: root,
|
||||
agents: &agents,
|
||||
ambient_rooms: &ambient_rooms,
|
||||
room_id: &room_id,
|
||||
};
|
||||
try_handle_command(&dispatch, &format!("@timmy unfreeze {args}"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn freeze_command_is_registered() {
|
||||
use super::super::commands;
|
||||
assert!(
|
||||
commands().iter().any(|c| c.name == "freeze"),
|
||||
"freeze command must be in the registry"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unfreeze_command_is_registered() {
|
||||
use super::super::commands;
|
||||
assert!(
|
||||
commands().iter().any(|c| c.name == "unfreeze"),
|
||||
"unfreeze command must be in the registry"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn freeze_command_no_args_returns_usage() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = freeze_cmd_with_root(tmp.path(), "").unwrap();
|
||||
assert!(
|
||||
output.contains("Usage"),
|
||||
"no args should show usage: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unfreeze_command_no_args_returns_usage() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = unfreeze_cmd_with_root(tmp.path(), "").unwrap();
|
||||
assert!(
|
||||
output.contains("Usage"),
|
||||
"no args should show usage: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn freeze_command_not_found_returns_error() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = freeze_cmd_with_root(tmp.path(), "9988").unwrap();
|
||||
assert!(
|
||||
output.contains("9988") && output.contains("found"),
|
||||
"not-found message should include number and 'found': {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn freeze_command_sets_frozen_flag() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"9940_story_freezeme.md",
|
||||
"---\nname: Freeze Me\n---\n# Story\n",
|
||||
);
|
||||
let output = freeze_cmd_with_root(tmp.path(), "9940").unwrap();
|
||||
assert!(
|
||||
output.contains("Frozen") && output.contains("Freeze Me"),
|
||||
"should confirm freeze with story name: {output}"
|
||||
);
|
||||
let contents = crate::db::read_content("9940_story_freezeme")
|
||||
.expect("story content should be readable after freeze");
|
||||
assert!(
|
||||
contents.contains("frozen: true"),
|
||||
"frozen flag should be set: {contents}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unfreeze_command_clears_frozen_flag() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
crate::db::ensure_content_store();
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"9941_story_frozen.md",
|
||||
"---\nname: Frozen Story\nfrozen: true\n---\n# Story\n",
|
||||
);
|
||||
let output = unfreeze_cmd_with_root(tmp.path(), "9941").unwrap();
|
||||
assert!(
|
||||
output.contains("Unfrozen") && output.contains("Frozen Story"),
|
||||
"should confirm unfreeze with story name: {output}"
|
||||
);
|
||||
let contents = crate::db::read_content("9941_story_frozen")
|
||||
.expect("story content should be readable after unfreeze");
|
||||
assert!(
|
||||
!contents.contains("frozen:"),
|
||||
"frozen flag should be removed: {contents}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unfreeze_command_not_frozen_returns_error() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"9942_story_notfrozen.md",
|
||||
"---\nname: Not Frozen\n---\n# Story\n",
|
||||
);
|
||||
let output = unfreeze_cmd_with_root(tmp.path(), "9942").unwrap();
|
||||
assert!(
|
||||
output.contains("not frozen"),
|
||||
"should return not-frozen error: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn freeze_command_already_frozen_returns_message() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
write_story_file(
|
||||
tmp.path(),
|
||||
"2_current",
|
||||
"9943_story_alreadyfrozen.md",
|
||||
"---\nname: Already Frozen\nfrozen: true\n---\n# Story\n",
|
||||
);
|
||||
let output = freeze_cmd_with_root(tmp.path(), "9943").unwrap();
|
||||
assert!(
|
||||
output.contains("already frozen"),
|
||||
"should say already frozen: {output}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -100,9 +100,16 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn git_command_appears_in_help() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy help");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("git"), "help should list git command: {output}");
|
||||
assert!(
|
||||
output.contains("git"),
|
||||
"help should list git command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -197,7 +204,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn git_command_case_insensitive() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy GIT");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy GIT",
|
||||
);
|
||||
assert!(result.is_some(), "GIT should match case-insensitively");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Handler for the `help` command.
|
||||
|
||||
use super::{commands, CommandContext};
|
||||
use super::{CommandContext, commands};
|
||||
|
||||
pub(super) fn handle_help(ctx: &CommandContext) -> Option<String> {
|
||||
let mut output = format!("**{} Commands**\n\n", ctx.bot_name);
|
||||
@@ -14,7 +14,7 @@ pub(super) fn handle_help(ctx: &CommandContext) -> Option<String> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::super::tests::{try_cmd_addressed, commands};
|
||||
use super::super::tests::{commands, try_cmd_addressed};
|
||||
|
||||
#[test]
|
||||
fn help_command_matches() {
|
||||
@@ -74,7 +74,10 @@ mod tests {
|
||||
fn help_output_includes_status() {
|
||||
let result = try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy help");
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("status"), "help should list status command: {output}");
|
||||
assert!(
|
||||
output.contains("status"),
|
||||
"help should list status command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -86,7 +89,9 @@ mod tests {
|
||||
.iter()
|
||||
.map(|c| {
|
||||
let marker = format!("**{}**", c.name);
|
||||
let pos = output.find(&marker).expect("command must appear in help as **name**");
|
||||
let pos = output
|
||||
.find(&marker)
|
||||
.expect("command must appear in help as **name**");
|
||||
(pos, c.name)
|
||||
})
|
||||
.collect();
|
||||
@@ -94,20 +99,29 @@ mod tests {
|
||||
let names_in_order: Vec<&str> = positions.iter().map(|(_, n)| *n).collect();
|
||||
let mut sorted = names_in_order.clone();
|
||||
sorted.sort();
|
||||
assert_eq!(names_in_order, sorted, "commands must appear in alphabetical order");
|
||||
assert_eq!(
|
||||
names_in_order, sorted,
|
||||
"commands must appear in alphabetical order"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn help_output_includes_ambient() {
|
||||
let result = try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy help");
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("ambient"), "help should list ambient command: {output}");
|
||||
assert!(
|
||||
output.contains("ambient"),
|
||||
"help should list ambient command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn help_output_includes_htop() {
|
||||
let result = try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy help");
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("htop"), "help should list htop command: {output}");
|
||||
assert!(
|
||||
output.contains("htop"),
|
||||
"help should list htop command: {output}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -152,11 +152,53 @@ fn loc_top_n(project_root: &std::path::Path, top_n: usize) -> String {
|
||||
fn is_source_extension(ext: &str) -> bool {
|
||||
matches!(
|
||||
ext,
|
||||
"rs" | "ts" | "tsx" | "js" | "jsx" | "py" | "go" | "java" | "c" | "cpp" | "h"
|
||||
| "hpp" | "cs" | "rb" | "swift" | "kt" | "scala" | "hs" | "ml" | "ex" | "exs"
|
||||
| "clj" | "lua" | "sh" | "bash" | "zsh" | "fish" | "ps1" | "toml" | "yaml"
|
||||
| "yml" | "json" | "md" | "html" | "css" | "scss" | "less" | "sql" | "graphql"
|
||||
| "proto" | "tf" | "hcl" | "nix" | "r" | "jl" | "dart" | "vue" | "svelte"
|
||||
"rs" | "ts"
|
||||
| "tsx"
|
||||
| "js"
|
||||
| "jsx"
|
||||
| "py"
|
||||
| "go"
|
||||
| "java"
|
||||
| "c"
|
||||
| "cpp"
|
||||
| "h"
|
||||
| "hpp"
|
||||
| "cs"
|
||||
| "rb"
|
||||
| "swift"
|
||||
| "kt"
|
||||
| "scala"
|
||||
| "hs"
|
||||
| "ml"
|
||||
| "ex"
|
||||
| "exs"
|
||||
| "clj"
|
||||
| "lua"
|
||||
| "sh"
|
||||
| "bash"
|
||||
| "zsh"
|
||||
| "fish"
|
||||
| "ps1"
|
||||
| "toml"
|
||||
| "yaml"
|
||||
| "yml"
|
||||
| "json"
|
||||
| "md"
|
||||
| "html"
|
||||
| "css"
|
||||
| "scss"
|
||||
| "less"
|
||||
| "sql"
|
||||
| "graphql"
|
||||
| "proto"
|
||||
| "tf"
|
||||
| "hcl"
|
||||
| "nix"
|
||||
| "r"
|
||||
| "jl"
|
||||
| "dart"
|
||||
| "vue"
|
||||
| "svelte"
|
||||
)
|
||||
}
|
||||
|
||||
@@ -202,7 +244,10 @@ mod tests {
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("loc"), "help should list loc command: {output}");
|
||||
assert!(
|
||||
output.contains("loc"),
|
||||
"help should list loc command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -220,7 +265,10 @@ mod tests {
|
||||
);
|
||||
// At most 10 entries (numbered lines "1." through "10.")
|
||||
let count = output.lines().filter(|l| l.contains(". `")).count();
|
||||
assert!(count <= 10, "default should return at most 10 files, got {count}");
|
||||
assert!(
|
||||
count <= 10,
|
||||
"default should return at most 10 files, got {count}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -233,7 +281,10 @@ mod tests {
|
||||
let ctx = make_ctx(&agents, &ambient_rooms, repo_root, "5");
|
||||
let output = handle_loc(&ctx).unwrap();
|
||||
let count = output.lines().filter(|l| l.contains(". `")).count();
|
||||
assert!(count <= 5, "loc 5 should return at most 5 files, got {count}");
|
||||
assert!(
|
||||
count <= 5,
|
||||
"loc 5 should return at most 5 files, got {count}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -246,7 +297,10 @@ mod tests {
|
||||
let ctx = make_ctx(&agents, &ambient_rooms, repo_root, "20");
|
||||
let output = handle_loc(&ctx).unwrap();
|
||||
let count = output.lines().filter(|l| l.contains(". `")).count();
|
||||
assert!(count <= 20, "loc 20 should return at most 20 files, got {count}");
|
||||
assert!(
|
||||
count <= 20,
|
||||
"loc 20 should return at most 20 files, got {count}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -11,6 +11,8 @@ mod backlog;
|
||||
mod cost;
|
||||
mod coverage;
|
||||
mod depends;
|
||||
mod diff;
|
||||
mod freeze;
|
||||
mod git;
|
||||
mod help;
|
||||
pub(crate) mod loc;
|
||||
@@ -163,6 +165,11 @@ pub fn commands() -> &'static [BotCommand] {
|
||||
description: "Display the full text of a work item: `show <number>`",
|
||||
handler: show::handle_show,
|
||||
},
|
||||
BotCommand {
|
||||
name: "diff",
|
||||
description: "Show git diff from main branch to story worktree HEAD: `diff <number>`",
|
||||
handler: diff::handle_diff,
|
||||
},
|
||||
BotCommand {
|
||||
name: "overview",
|
||||
description: "Show implementation summary for a merged story: `overview <number>`",
|
||||
@@ -203,6 +210,16 @@ pub fn commands() -> &'static [BotCommand] {
|
||||
description: "Reset a blocked story: `unblock <number>` (clears blocked flag and resets retry count)",
|
||||
handler: unblock::handle_unblock,
|
||||
},
|
||||
BotCommand {
|
||||
name: "freeze",
|
||||
description: "Freeze a story at its current stage: `freeze <number>` (suppresses pipeline advancement and auto-assign)",
|
||||
handler: freeze::handle_freeze,
|
||||
},
|
||||
BotCommand {
|
||||
name: "unfreeze",
|
||||
description: "Unfreeze a story: `unfreeze <number>` (resumes normal pipeline behaviour)",
|
||||
handler: freeze::handle_unfreeze,
|
||||
},
|
||||
BotCommand {
|
||||
name: "unreleased",
|
||||
description: "Show stories merged to master since the last release tag",
|
||||
|
||||
@@ -105,51 +105,13 @@ fn find_story_merge_commit(root: &std::path::Path, num_str: &str) -> Option<Stri
|
||||
if hash.is_empty() { None } else { Some(hash) }
|
||||
}
|
||||
|
||||
/// Find the human-readable name of a story by searching content store then filesystem.
|
||||
/// Find the human-readable name of a story by searching CRDT then content store.
|
||||
fn find_story_name(root: &std::path::Path, num_str: &str) -> Option<String> {
|
||||
// Try content store first.
|
||||
for id in crate::db::all_content_ids() {
|
||||
let file_num = id.split('_').next().unwrap_or("");
|
||||
if file_num == num_str && let Some(c) = crate::db::read_content(&id) {
|
||||
return crate::io::story_metadata::parse_front_matter(&c)
|
||||
.ok()
|
||||
.and_then(|m| m.name);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: filesystem scan.
|
||||
let stages = [
|
||||
"1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived",
|
||||
];
|
||||
for stage in &stages {
|
||||
let dir = root.join(".huskies").join("work").join(stage);
|
||||
if !dir.exists() {
|
||||
continue;
|
||||
}
|
||||
if let Ok(entries) = std::fs::read_dir(&dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.extension().and_then(|e| e.to_str()) != Some("md") {
|
||||
continue;
|
||||
}
|
||||
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
|
||||
let file_num = stem
|
||||
.split('_')
|
||||
.next()
|
||||
.filter(|s| !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()))
|
||||
.unwrap_or("");
|
||||
if file_num == num_str {
|
||||
return std::fs::read_to_string(&path).ok().and_then(|c| {
|
||||
crate::io::story_metadata::parse_front_matter(&c)
|
||||
.ok()
|
||||
.and_then(|m| m.name)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
let (_, _, _, content) = crate::chat::lookup::find_story_by_number(root, num_str)?;
|
||||
let content = content?;
|
||||
crate::io::story_metadata::parse_front_matter(&content)
|
||||
.ok()
|
||||
.and_then(|m| m.name)
|
||||
}
|
||||
|
||||
/// Return the `git show --stat` output for a commit.
|
||||
|
||||
@@ -86,9 +86,7 @@ pub(super) fn handle_test(ctx: &CommandContext) -> Option<String> {
|
||||
let mut result = format!("**Test: {status}**\n\n");
|
||||
|
||||
if tests_passed > 0 || tests_failed > 0 {
|
||||
result.push_str(&format!(
|
||||
"{tests_passed} passed, {tests_failed} failed\n\n"
|
||||
));
|
||||
result.push_str(&format!("{tests_passed} passed, {tests_failed} failed\n\n"));
|
||||
}
|
||||
|
||||
result.push_str(&format!("```\n{truncated}\n```"));
|
||||
@@ -128,7 +126,11 @@ fn parse_test_counts(output: &str) -> (u64, u64) {
|
||||
fn extract_count(line: &str, label: &str) -> Option<u64> {
|
||||
let pos = line.find(label)?;
|
||||
let before = line[..pos].trim_end();
|
||||
let num_str: String = before.chars().rev().take_while(|c| c.is_ascii_digit()).collect();
|
||||
let num_str: String = before
|
||||
.chars()
|
||||
.rev()
|
||||
.take_while(|c| c.is_ascii_digit())
|
||||
.collect();
|
||||
if num_str.is_empty() {
|
||||
return None;
|
||||
}
|
||||
@@ -250,10 +252,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_command_works_via_dispatch() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
write_script(
|
||||
dir.path(),
|
||||
"#!/usr/bin/env bash\necho 'ok'\nexit 0\n",
|
||||
);
|
||||
write_script(dir.path(), "#!/usr/bin/env bash\necho 'ok'\nexit 0\n");
|
||||
let agents = test_agents();
|
||||
let ambient = test_ambient();
|
||||
let room_id = "!test:example.com".to_string();
|
||||
@@ -317,8 +316,14 @@ mod tests {
|
||||
let ambient = test_ambient();
|
||||
let ctx = make_ctx(&agents, &ambient, dir.path(), "");
|
||||
let output = handle_test(&ctx).unwrap();
|
||||
assert!(output.contains("PASS"), "no-arg should use project root: {output}");
|
||||
assert!(output.contains('7'), "should show count from project root script: {output}");
|
||||
assert!(
|
||||
output.contains("PASS"),
|
||||
"no-arg should use project root: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains('7'),
|
||||
"should show count from project root script: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -329,8 +334,14 @@ mod tests {
|
||||
let ambient = test_ambient();
|
||||
let ctx = make_ctx(&agents, &ambient, dir.path(), "541");
|
||||
let output = handle_test(&ctx).unwrap();
|
||||
assert!(output.contains("PASS"), "should run tests in worktree: {output}");
|
||||
assert!(output.contains('2'), "should show count from worktree script: {output}");
|
||||
assert!(
|
||||
output.contains("PASS"),
|
||||
"should run tests in worktree: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains('2'),
|
||||
"should show count from worktree script: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -382,6 +393,9 @@ mod tests {
|
||||
"run_tests with story number must respond via dispatch"
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("PASS"), "should PASS for valid worktree: {output}");
|
||||
assert!(
|
||||
output.contains("PASS"),
|
||||
"should PASS for valid worktree: {output}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use super::CommandContext;
|
||||
use crate::http::mcp::wizard_tools::{
|
||||
generation_hint, is_script_step, step_output_path, write_if_missing,
|
||||
};
|
||||
use crate::io::wizard::{format_wizard_state, StepStatus, WizardState};
|
||||
use crate::io::wizard::{StepStatus, WizardState, format_wizard_state};
|
||||
|
||||
pub(super) fn handle_setup(ctx: &CommandContext) -> Option<String> {
|
||||
let sub = ctx.args.trim().to_ascii_lowercase();
|
||||
@@ -84,17 +84,16 @@ fn wizard_confirm_reply(ctx: &CommandContext) -> String {
|
||||
let content = state.steps[idx].content.clone();
|
||||
|
||||
// Write content to disk (only if a file path exists and the file is absent).
|
||||
let write_msg =
|
||||
if let (Some(c), Some(ref path)) = (&content, step_output_path(root, step)) {
|
||||
let executable = is_script_step(step);
|
||||
match write_if_missing(path, c, executable) {
|
||||
Ok(true) => format!(" File written: `{}`.", path.display()),
|
||||
Ok(false) => format!(" File `{}` already exists — skipped.", path.display()),
|
||||
Err(e) => return format!("Error: {e}"),
|
||||
}
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let write_msg = if let (Some(c), Some(ref path)) = (&content, step_output_path(root, step)) {
|
||||
let executable = is_script_step(step);
|
||||
match write_if_missing(path, c, executable) {
|
||||
Ok(true) => format!(" File written: `{}`.", path.display()),
|
||||
Ok(false) => format!(" File `{}` already exists — skipped.", path.display()),
|
||||
Err(e) => return format!("Error: {e}"),
|
||||
}
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
if let Err(e) = state.confirm_step(step) {
|
||||
return format!("Cannot confirm step: {e}");
|
||||
@@ -140,10 +139,7 @@ fn wizard_skip_reply(ctx: &CommandContext) -> String {
|
||||
}
|
||||
|
||||
if state.completed {
|
||||
format!(
|
||||
"Step '{}' skipped. Setup wizard complete!",
|
||||
step.label()
|
||||
)
|
||||
format!("Step '{}' skipped. Setup wizard complete!", step.label())
|
||||
} else {
|
||||
let next = &state.steps[state.current_step_index()];
|
||||
format!(
|
||||
|
||||
@@ -21,8 +21,8 @@ pub(super) fn handle_show(ctx: &CommandContext) -> Option<String> {
|
||||
));
|
||||
}
|
||||
|
||||
// Find the story by numeric prefix: CRDT → content store → filesystem.
|
||||
let (story_id, _stage_dir, path, content) =
|
||||
// Find the story by numeric prefix: CRDT → content store.
|
||||
let (story_id, _stage_dir, _path, content) =
|
||||
match crate::chat::lookup::find_story_by_number(ctx.project_root, num_str) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
@@ -32,16 +32,11 @@ pub(super) fn handle_show(ctx: &CommandContext) -> Option<String> {
|
||||
}
|
||||
};
|
||||
|
||||
// `content` is populated from the content store (CRDT/DB path) or read
|
||||
// from disk during the filesystem fallback. If it is None (story found in
|
||||
// CRDT but no content-store entry yet), attempt a direct disk read.
|
||||
Some(
|
||||
content
|
||||
.or_else(|| std::fs::read_to_string(&path).ok())
|
||||
.unwrap_or_else(|| {
|
||||
format!("Story {story_id} found in pipeline but its content is unavailable.")
|
||||
}),
|
||||
)
|
||||
// `content` comes from the CRDT / content store. If unavailable, report
|
||||
// it rather than silently reading a stale on-disk copy.
|
||||
Some(content.unwrap_or_else(|| {
|
||||
format!("Story {story_id} found in pipeline but its content is unavailable.")
|
||||
}))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -78,9 +73,16 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn show_command_appears_in_help() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy help");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy help",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("show"), "help should list show command: {output}");
|
||||
assert!(
|
||||
output.contains("show"),
|
||||
"help should list show command: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -167,7 +169,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn show_command_case_insensitive() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy SHOW 1");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy SHOW 1",
|
||||
);
|
||||
assert!(result.is_some(), "SHOW should match case-insensitively");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,14 +119,13 @@ fn build_status_from_items(
|
||||
.collect();
|
||||
|
||||
// Read token usage once for all stories to avoid repeated file I/O.
|
||||
let cost_by_story: HashMap<String, f64> =
|
||||
crate::agents::token_usage::read_all(project_root)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.fold(HashMap::new(), |mut map, r| {
|
||||
*map.entry(r.story_id).or_insert(0.0) += r.usage.total_cost_usd;
|
||||
map
|
||||
});
|
||||
let cost_by_story: HashMap<String, f64> = crate::agents::token_usage::read_all(project_root)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.fold(HashMap::new(), |mut map, r| {
|
||||
*map.entry(r.story_id).or_insert(0.0) += r.usage.total_cost_usd;
|
||||
map
|
||||
});
|
||||
|
||||
let config = ProjectConfig::load(project_root).ok();
|
||||
|
||||
@@ -165,10 +164,8 @@ fn build_status_from_items(
|
||||
}
|
||||
|
||||
// Blocked items: Archived { reason: Blocked } shown with 🔴 indicator.
|
||||
let mut blocked_items: Vec<&PipelineItem> = items
|
||||
.iter()
|
||||
.filter(|i| i.stage.is_blocked())
|
||||
.collect();
|
||||
let mut blocked_items: Vec<&PipelineItem> =
|
||||
items.iter().filter(|i| i.stage.is_blocked()).collect();
|
||||
blocked_items.sort_by(|a, b| a.story_id.0.cmp(&b.story_id.0));
|
||||
if !blocked_items.is_empty() {
|
||||
out.push_str(&format!("**Blocked** ({})\n", blocked_items.len()));
|
||||
@@ -231,7 +228,13 @@ fn render_item_line(
|
||||
} else {
|
||||
Some(item.name.as_str())
|
||||
};
|
||||
let display = story_short_label(story_id, name_opt);
|
||||
let frozen = crate::io::story_metadata::is_story_frozen_in_store(story_id);
|
||||
let base_label = story_short_label(story_id, name_opt);
|
||||
let display = if frozen {
|
||||
format!("\u{2744}\u{FE0F} {base_label}") // ❄️ prefix
|
||||
} else {
|
||||
base_label
|
||||
};
|
||||
let cost_suffix = cost_by_story
|
||||
.get(story_id)
|
||||
.filter(|&&c| c > 0.0)
|
||||
@@ -294,13 +297,21 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn status_command_matches() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy status");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy status",
|
||||
);
|
||||
assert!(result.is_some(), "status command should match");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn status_command_returns_pipeline_text() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy status");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy status",
|
||||
);
|
||||
let output = result.unwrap();
|
||||
assert!(
|
||||
output.contains("Pipeline Status"),
|
||||
@@ -310,7 +321,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn status_command_case_insensitive() {
|
||||
let result = super::super::tests::try_cmd_addressed("Timmy", "@timmy:homeserver.local", "@timmy STATUS");
|
||||
let result = super::super::tests::try_cmd_addressed(
|
||||
"Timmy",
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy STATUS",
|
||||
);
|
||||
assert!(result.is_some(), "STATUS should match case-insensitively");
|
||||
}
|
||||
|
||||
@@ -318,7 +333,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn short_label_extracts_number_and_name() {
|
||||
let label = story_short_label("293_story_register_all_bot_commands", Some("Register all bot commands"));
|
||||
let label = story_short_label(
|
||||
"293_story_register_all_bot_commands",
|
||||
Some("Register all bot commands"),
|
||||
);
|
||||
assert_eq!(label, "293 [story] — Register all bot commands");
|
||||
}
|
||||
|
||||
@@ -336,7 +354,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn short_label_does_not_include_underscore_slug() {
|
||||
let label = story_short_label("293_story_register_all_bot_commands_in_the_command_registry", Some("Register all bot commands"));
|
||||
let label = story_short_label(
|
||||
"293_story_register_all_bot_commands_in_the_command_registry",
|
||||
Some("Register all bot commands"),
|
||||
);
|
||||
assert!(
|
||||
!label.contains("story_register"),
|
||||
"label should not contain the slug portion: {label}"
|
||||
@@ -345,19 +366,28 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn short_label_shows_bug_type() {
|
||||
let label = story_short_label("375_bug_default_project_toml", Some("Default project.toml issue"));
|
||||
let label = story_short_label(
|
||||
"375_bug_default_project_toml",
|
||||
Some("Default project.toml issue"),
|
||||
);
|
||||
assert_eq!(label, "375 [bug] — Default project.toml issue");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn short_label_shows_spike_type() {
|
||||
let label = story_short_label("61_spike_filesystem_watcher_architecture", Some("Filesystem watcher architecture"));
|
||||
let label = story_short_label(
|
||||
"61_spike_filesystem_watcher_architecture",
|
||||
Some("Filesystem watcher architecture"),
|
||||
);
|
||||
assert_eq!(label, "61 [spike] — Filesystem watcher architecture");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn short_label_shows_refactor_type() {
|
||||
let label = story_short_label("260_refactor_upgrade_libsqlite3_sys", Some("Upgrade libsqlite3-sys"));
|
||||
let label = story_short_label(
|
||||
"260_refactor_upgrade_libsqlite3_sys",
|
||||
Some("Upgrade libsqlite3-sys"),
|
||||
);
|
||||
assert_eq!(label, "260 [refactor] — Upgrade libsqlite3-sys");
|
||||
}
|
||||
|
||||
@@ -506,7 +536,12 @@ mod tests {
|
||||
// Story 10 depends on story 999, which is NOT in all_items (treated as met)
|
||||
// OR present in backlog (unmet). Let's add dep 999 in Backlog stage (unmet).
|
||||
let items = vec![
|
||||
make_item_with_deps("10_story_waiting", "Waiting Story", Stage::Coding, vec![999]),
|
||||
make_item_with_deps(
|
||||
"10_story_waiting",
|
||||
"Waiting Story",
|
||||
Stage::Coding,
|
||||
vec![999],
|
||||
),
|
||||
make_item("999_story_dep", "Dep Story", Stage::Backlog),
|
||||
];
|
||||
|
||||
@@ -526,11 +561,20 @@ mod tests {
|
||||
|
||||
// Dep 999 is in Done stage — met.
|
||||
let items = vec![
|
||||
make_item_with_deps("10_story_unblocked", "Unblocked Story", Stage::Coding, vec![999]),
|
||||
make_item("999_story_dep", "Dep Story", Stage::Done {
|
||||
merged_at: Utc::now(),
|
||||
merge_commit: crate::pipeline_state::GitSha("abc123".to_string()),
|
||||
}),
|
||||
make_item_with_deps(
|
||||
"10_story_unblocked",
|
||||
"Unblocked Story",
|
||||
Stage::Coding,
|
||||
vec![999],
|
||||
),
|
||||
make_item(
|
||||
"999_story_dep",
|
||||
"Dep Story",
|
||||
Stage::Done {
|
||||
merged_at: Utc::now(),
|
||||
merge_commit: crate::pipeline_state::GitSha("abc123".to_string()),
|
||||
},
|
||||
),
|
||||
];
|
||||
|
||||
let agents = AgentPool::new_test(3000);
|
||||
@@ -678,8 +722,12 @@ mod tests {
|
||||
|
||||
// Must appear under Done, not Backlog.
|
||||
let done_pos = output.find("**Done**").expect("Done section must exist");
|
||||
let backlog_pos = output.find("**Backlog**").expect("Backlog section must exist");
|
||||
let story_pos = output.find("503 [story]").expect("story must appear in output");
|
||||
let backlog_pos = output
|
||||
.find("**Backlog**")
|
||||
.expect("Backlog section must exist");
|
||||
let story_pos = output
|
||||
.find("503 [story]")
|
||||
.expect("story must appear in output");
|
||||
|
||||
assert!(
|
||||
story_pos > done_pos,
|
||||
|
||||
@@ -33,17 +33,13 @@ pub(super) fn handle_triage(ctx: &CommandContext) -> Option<String> {
|
||||
|
||||
match find_story_by_number(num_str) {
|
||||
Some((story_id, item)) => Some(build_triage_dump(ctx, &story_id, &item, num_str)),
|
||||
None => Some(format!(
|
||||
"Story **{num_str}** not found in the pipeline."
|
||||
)),
|
||||
None => Some(format!("Story **{num_str}** not found in the pipeline.")),
|
||||
}
|
||||
}
|
||||
|
||||
/// Find a pipeline item whose numeric prefix matches `num_str` by querying the
|
||||
/// CRDT state. Returns `(story_id, PipelineItem)` for the first match.
|
||||
fn find_story_by_number(
|
||||
num_str: &str,
|
||||
) -> Option<(String, crate::pipeline_state::PipelineItem)> {
|
||||
fn find_story_by_number(num_str: &str) -> Option<(String, crate::pipeline_state::PipelineItem)> {
|
||||
let items = crate::pipeline_state::read_all_typed();
|
||||
for item in items {
|
||||
let file_num = item
|
||||
@@ -74,7 +70,10 @@ fn build_triage_dump(
|
||||
};
|
||||
|
||||
let meta = crate::io::story_metadata::parse_front_matter(&contents).ok();
|
||||
let name = meta.as_ref().and_then(|m| m.name.as_deref()).unwrap_or("(unnamed)");
|
||||
let name = meta
|
||||
.as_ref()
|
||||
.and_then(|m| m.name.as_deref())
|
||||
.unwrap_or("(unnamed)");
|
||||
|
||||
let mut out = String::new();
|
||||
|
||||
@@ -147,10 +146,7 @@ fn build_triage_dump(
|
||||
out.push_str(&format!("**Branch:** `{branch}`\n\n"));
|
||||
|
||||
// ---- git diff --stat ----
|
||||
let diff_stat = run_git(
|
||||
&wt_path,
|
||||
&["diff", "--stat", "master...HEAD"],
|
||||
);
|
||||
let diff_stat = run_git(&wt_path, &["diff", "--stat", "master...HEAD"]);
|
||||
if !diff_stat.is_empty() {
|
||||
out.push_str("**Diff stat (vs master):**\n```\n");
|
||||
out.push_str(&diff_stat);
|
||||
@@ -162,12 +158,7 @@ fn build_triage_dump(
|
||||
// ---- Last 5 commits on feature branch ----
|
||||
let log = run_git(
|
||||
&wt_path,
|
||||
&[
|
||||
"log",
|
||||
"master..HEAD",
|
||||
"--pretty=format:%h %s",
|
||||
"-5",
|
||||
],
|
||||
&["log", "master..HEAD", "--pretty=format:%h %s", "-5"],
|
||||
);
|
||||
if !log.is_empty() {
|
||||
out.push_str("**Recent commits (branch only):**\n```\n");
|
||||
@@ -192,10 +183,15 @@ fn parse_acceptance_criteria(contents: &str) -> Vec<(bool, String)> {
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
let trimmed = line.trim();
|
||||
if let Some(text) = trimmed.strip_prefix("- [x] ").or_else(|| trimmed.strip_prefix("- [X] ")) {
|
||||
if let Some(text) = trimmed
|
||||
.strip_prefix("- [x] ")
|
||||
.or_else(|| trimmed.strip_prefix("- [X] "))
|
||||
{
|
||||
Some((true, text.to_string()))
|
||||
} else {
|
||||
trimmed.strip_prefix("- [ ] ").map(|text| (false, text.to_string()))
|
||||
trimmed
|
||||
.strip_prefix("- [ ] ")
|
||||
.map(|text| (false, text.to_string()))
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
@@ -248,7 +244,10 @@ mod tests {
|
||||
#[test]
|
||||
fn whatsup_command_is_not_registered() {
|
||||
let found = super::super::commands().iter().any(|c| c.name == "whatsup");
|
||||
assert!(!found, "whatsup command must not be in the registry (renamed to status)");
|
||||
assert!(
|
||||
!found,
|
||||
"whatsup command must not be in the registry (renamed to status)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -340,7 +339,10 @@ mod tests {
|
||||
"---\nname: Backlog Item\n---\n",
|
||||
);
|
||||
let output = status_triage_cmd(tmp.path(), "9901").unwrap();
|
||||
assert!(output.contains("9901"), "should show story number: {output}");
|
||||
assert!(
|
||||
output.contains("9901"),
|
||||
"should show story number: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("Backlog Item"),
|
||||
"should show story name: {output}"
|
||||
@@ -361,7 +363,10 @@ mod tests {
|
||||
"---\nname: QA Item\n---\n",
|
||||
);
|
||||
let output = status_triage_cmd(tmp.path(), "9902").unwrap();
|
||||
assert!(output.contains("9902"), "should show story number: {output}");
|
||||
assert!(
|
||||
output.contains("9902"),
|
||||
"should show story number: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("QA Item"),
|
||||
"should show story name: {output}"
|
||||
@@ -439,7 +444,10 @@ mod tests {
|
||||
output.contains("depends_on") || output.contains("#477"),
|
||||
"should show depends_on field: {output}"
|
||||
);
|
||||
assert!(output.contains("478"), "should list all dependency numbers: {output}");
|
||||
assert!(
|
||||
output.contains("478"),
|
||||
"should list all dependency numbers: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -459,7 +467,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// -- parse_acceptance_criteria -----------------------------------------
|
||||
|
||||
#[test]
|
||||
@@ -479,5 +486,4 @@ mod tests {
|
||||
let result = parse_acceptance_criteria(input);
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,9 @@
|
||||
//! and returns a confirmation.
|
||||
|
||||
use super::CommandContext;
|
||||
use crate::io::story_metadata::{clear_front_matter_field, clear_front_matter_field_in_content, parse_front_matter, set_front_matter_field};
|
||||
use crate::io::story_metadata::{
|
||||
clear_front_matter_field_in_content, parse_front_matter, set_front_matter_field,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
/// Handle the `unblock` command.
|
||||
@@ -31,27 +33,17 @@ pub(super) fn handle_unblock(ctx: &CommandContext) -> Option<String> {
|
||||
/// Returns a Markdown-formatted response string suitable for all transports.
|
||||
/// Also used by the MCP `unblock` tool.
|
||||
///
|
||||
/// Lookup priority: CRDT → content store → filesystem (Story 512).
|
||||
/// Lookup priority: CRDT → content store.
|
||||
pub(crate) fn unblock_by_number(project_root: &Path, story_number: &str) -> String {
|
||||
let (story_id, _stage_dir, path, _content) =
|
||||
let (story_id, _, _, _) =
|
||||
match crate::chat::lookup::find_story_by_number(project_root, story_number) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
return format!(
|
||||
"No story, bug, or spike with number **{story_number}** found."
|
||||
);
|
||||
return format!("No story, bug, or spike with number **{story_number}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
// Prefer DB-backed unblock when the story is in the content store.
|
||||
// Note: `content` may have come from the filesystem fallback in
|
||||
// `find_story_by_number`, so we must re-check the DB rather than
|
||||
// relying on `content.is_some()` alone.
|
||||
if crate::db::read_content(&story_id).is_some() {
|
||||
unblock_by_story_id(&story_id)
|
||||
} else {
|
||||
unblock_by_path(&path, &story_id)
|
||||
}
|
||||
unblock_by_story_id(&story_id)
|
||||
}
|
||||
|
||||
/// Unblock a story using the content store (DB-backed).
|
||||
@@ -71,9 +63,7 @@ fn unblock_by_story_id(story_id: &str) -> String {
|
||||
let has_merge_failure = meta.merge_failure.is_some();
|
||||
|
||||
if !has_blocked && !has_merge_failure {
|
||||
return format!(
|
||||
"**{story_name}** ({story_id}) is not blocked. Nothing to unblock."
|
||||
);
|
||||
return format!("**{story_name}** ({story_id}) is not blocked. Nothing to unblock.");
|
||||
}
|
||||
|
||||
let mut updated = contents;
|
||||
@@ -94,62 +84,16 @@ fn unblock_by_story_id(story_id: &str) -> String {
|
||||
crate::db::write_item_with_content(story_id, &stage, &updated);
|
||||
|
||||
let mut cleared = Vec::new();
|
||||
if has_blocked { cleared.push("blocked"); }
|
||||
if has_merge_failure { cleared.push("merge_failure"); }
|
||||
format!("Unblocked **{story_name}** ({story_id}). Cleared: {}. Retry count reset to 0.", cleared.join(", "))
|
||||
}
|
||||
|
||||
/// Core unblock logic: reset blocked state for a known story file path.
|
||||
///
|
||||
/// Reads front matter, verifies the story is blocked, clears the `blocked`
|
||||
/// flag, and resets `retry_count` to 0. Also used by the MCP `unblock` tool
|
||||
/// when the caller has already resolved the story path from a full `story_id`.
|
||||
pub(crate) fn unblock_by_path(path: &Path, story_id: &str) -> String {
|
||||
let contents = match std::fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return format!("Failed to read story file: {e}"),
|
||||
};
|
||||
|
||||
let meta = match parse_front_matter(&contents) {
|
||||
Ok(m) => m,
|
||||
Err(e) => return format!("Failed to parse front matter for **{story_id}**: {e}"),
|
||||
};
|
||||
|
||||
let story_name = meta.name.as_deref().unwrap_or(story_id).to_string();
|
||||
|
||||
let has_blocked = meta.blocked == Some(true);
|
||||
let has_merge_failure = meta.merge_failure.is_some();
|
||||
|
||||
if !has_blocked && !has_merge_failure {
|
||||
return format!(
|
||||
"**{story_name}** ({story_id}) is not blocked. Nothing to unblock."
|
||||
);
|
||||
if has_blocked {
|
||||
cleared.push("blocked");
|
||||
}
|
||||
|
||||
// Clear the blocked flag if present.
|
||||
if has_blocked && let Err(e) = clear_front_matter_field(path, "blocked") {
|
||||
return format!("Failed to clear blocked flag on **{story_id}**: {e}");
|
||||
if has_merge_failure {
|
||||
cleared.push("merge_failure");
|
||||
}
|
||||
|
||||
// Clear merge_failure if present.
|
||||
if has_merge_failure && let Err(e) = clear_front_matter_field(path, "merge_failure") {
|
||||
return format!("Failed to clear merge_failure on **{story_id}**: {e}");
|
||||
}
|
||||
|
||||
// Reset retry_count to 0 (re-read the updated file, modify, write).
|
||||
let updated_contents = match std::fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return format!("Failed to re-read story file after unblocking: {e}"),
|
||||
};
|
||||
let with_retry_reset = set_front_matter_field(&updated_contents, "retry_count", "0");
|
||||
if let Err(e) = std::fs::write(path, &with_retry_reset) {
|
||||
return format!("Failed to reset retry_count on **{story_id}**: {e}");
|
||||
}
|
||||
|
||||
let mut cleared = Vec::new();
|
||||
if has_blocked { cleared.push("blocked"); }
|
||||
if has_merge_failure { cleared.push("merge_failure"); }
|
||||
format!("Unblocked **{story_name}** ({story_id}). Cleared: {}. Retry count reset to 0.", cleared.join(", "))
|
||||
format!(
|
||||
"Unblocked **{story_name}** ({story_id}). Cleared: {}. Retry count reset to 0.",
|
||||
cleared.join(", ")
|
||||
)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -276,7 +220,8 @@ mod tests {
|
||||
let contents = crate::db::read_content("9903_story_stuck")
|
||||
.or_else(|| {
|
||||
std::fs::read_to_string(
|
||||
tmp.path().join(".huskies/work/2_current/9903_story_stuck.md"),
|
||||
tmp.path()
|
||||
.join(".huskies/work/2_current/9903_story_stuck.md"),
|
||||
)
|
||||
.ok()
|
||||
})
|
||||
|
||||
@@ -17,9 +17,7 @@ pub(super) fn handle_unreleased(ctx: &CommandContext) -> Option<String> {
|
||||
|
||||
if commits.is_empty() {
|
||||
let msg = match &tag {
|
||||
Some(t) => format!(
|
||||
"No unreleased stories since the last release tag **{t}**."
|
||||
),
|
||||
Some(t) => format!("No unreleased stories since the last release tag **{t}**."),
|
||||
None => "No release tags found and no story merge commits on master.".to_string(),
|
||||
};
|
||||
return Some(msg);
|
||||
@@ -36,9 +34,7 @@ pub(super) fn handle_unreleased(ctx: &CommandContext) -> Option<String> {
|
||||
|
||||
if stories.is_empty() {
|
||||
let msg = match &tag {
|
||||
Some(t) => format!(
|
||||
"No unreleased stories since the last release tag **{t}**."
|
||||
),
|
||||
Some(t) => format!("No unreleased stories since the last release tag **{t}**."),
|
||||
None => "No release tags found and no story merge commits on master.".to_string(),
|
||||
};
|
||||
return Some(msg);
|
||||
@@ -50,8 +46,7 @@ pub(super) fn handle_unreleased(ctx: &CommandContext) -> Option<String> {
|
||||
None => "**Unreleased stories (no prior release tag):**\n\n".to_string(),
|
||||
};
|
||||
for (num, slug) in &stories {
|
||||
let name = find_story_name(root, &num.to_string())
|
||||
.unwrap_or_else(|| slug_to_name(slug));
|
||||
let name = find_story_name(root, &num.to_string()).unwrap_or_else(|| slug_to_name(slug));
|
||||
out.push_str(&format!("- **{num}** — {name}\n"));
|
||||
}
|
||||
Some(out)
|
||||
@@ -79,10 +74,7 @@ fn find_last_release_tag(root: &std::path::Path) -> Option<String> {
|
||||
|
||||
/// Return the subjects of all `huskies: merge …` commits reachable from HEAD
|
||||
/// but not from `since_tag` (or all commits when `since_tag` is `None`).
|
||||
fn list_merge_commits_since(
|
||||
root: &std::path::Path,
|
||||
since_tag: Option<&str>,
|
||||
) -> Vec<String> {
|
||||
fn list_merge_commits_since(root: &std::path::Path, since_tag: Option<&str>) -> Vec<String> {
|
||||
use std::process::Command;
|
||||
|
||||
let range = match since_tag {
|
||||
@@ -153,7 +145,9 @@ fn find_story_name(root: &std::path::Path, num_str: &str) -> Option<String> {
|
||||
// Try content store first.
|
||||
for id in crate::db::all_content_ids() {
|
||||
let file_num = id.split('_').next().unwrap_or("");
|
||||
if file_num == num_str && let Some(c) = crate::db::read_content(&id) {
|
||||
if file_num == num_str
|
||||
&& let Some(c) = crate::db::read_content(&id)
|
||||
{
|
||||
return crate::io::story_metadata::parse_front_matter(&c)
|
||||
.ok()
|
||||
.and_then(|m| m.name);
|
||||
@@ -162,7 +156,12 @@ fn find_story_name(root: &std::path::Path, num_str: &str) -> Option<String> {
|
||||
|
||||
// Fallback: filesystem scan.
|
||||
const STAGES: &[&str] = &[
|
||||
"1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived",
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
];
|
||||
for stage in STAGES {
|
||||
let dir = root.join(".huskies").join("work").join(stage);
|
||||
@@ -225,7 +224,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unreleased_command_is_registered() {
|
||||
let found = super::super::commands().iter().any(|c| c.name == "unreleased");
|
||||
let found = super::super::commands()
|
||||
.iter()
|
||||
.any(|c| c.name == "unreleased");
|
||||
assert!(found, "unreleased command must be in the registry");
|
||||
}
|
||||
|
||||
@@ -249,7 +250,10 @@ mod tests {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let output = unreleased_cmd_with_root(tmp.path()).unwrap();
|
||||
// Should return some message (not panic), either about no tags or no commits.
|
||||
assert!(!output.is_empty(), "should return a non-empty message: {output}");
|
||||
assert!(
|
||||
!output.is_empty(),
|
||||
"should return a non-empty message: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -261,7 +265,10 @@ mod tests {
|
||||
let output = unreleased_cmd_with_root(repo_root).unwrap();
|
||||
// The response should mention "unreleased" or "no unreleased" — just make
|
||||
// sure it's non-empty and doesn't panic.
|
||||
assert!(!output.is_empty(), "should return a non-empty message: {output}");
|
||||
assert!(
|
||||
!output.is_empty(),
|
||||
"should return a non-empty message: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -271,7 +278,10 @@ mod tests {
|
||||
"@timmy:homeserver.local",
|
||||
"@timmy UNRELEASED",
|
||||
);
|
||||
assert!(result.is_some(), "UNRELEASED should match case-insensitively");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"UNRELEASED should match case-insensitively"
|
||||
);
|
||||
}
|
||||
|
||||
// -- parse_story_from_subject ------------------------------------------
|
||||
|
||||
@@ -80,7 +80,10 @@ mod tests {
|
||||
fn not_found_returns_none() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let result = find_story_by_number(tmp.path(), "999");
|
||||
assert!(result.is_none(), "should return None when story is not found");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"should return None when story is not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -6,11 +6,11 @@
|
||||
|
||||
pub mod commands;
|
||||
pub(crate) mod lookup;
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test_helpers;
|
||||
pub mod timer;
|
||||
pub mod transport;
|
||||
pub mod util;
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test_helpers;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
@@ -96,8 +96,9 @@ mod tests {
|
||||
fn assert_transport<T: ChatTransport>() {}
|
||||
assert_transport::<crate::chat::transport::slack::SlackTransport>();
|
||||
|
||||
let _: Arc<dyn ChatTransport> =
|
||||
Arc::new(crate::chat::transport::slack::SlackTransport::new("xoxb-test".to_string()));
|
||||
let _: Arc<dyn ChatTransport> = Arc::new(
|
||||
crate::chat::transport::slack::SlackTransport::new("xoxb-test".to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
/// Verify that TwilioWhatsAppTransport satisfies the ChatTransport trait
|
||||
@@ -107,11 +108,12 @@ mod tests {
|
||||
fn assert_transport<T: ChatTransport>() {}
|
||||
assert_transport::<crate::chat::transport::whatsapp::TwilioWhatsAppTransport>();
|
||||
|
||||
let _: Arc<dyn ChatTransport> =
|
||||
Arc::new(crate::chat::transport::whatsapp::TwilioWhatsAppTransport::new(
|
||||
let _: Arc<dyn ChatTransport> = Arc::new(
|
||||
crate::chat::transport::whatsapp::TwilioWhatsAppTransport::new(
|
||||
"ACtest".to_string(),
|
||||
"authtoken".to_string(),
|
||||
"+14155551234".to_string(),
|
||||
));
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+48
-67
@@ -161,10 +161,7 @@ pub(crate) async fn tick_once(
|
||||
}
|
||||
|
||||
let remaining = store.list().len();
|
||||
crate::slog!(
|
||||
"[timer] Tick: {} due, {remaining} remaining",
|
||||
due.len()
|
||||
);
|
||||
crate::slog!("[timer] Tick: {} due, {remaining} remaining", due.len());
|
||||
|
||||
for entry in due {
|
||||
crate::slog!("[timer] Timer fired for story {}", entry.story_id);
|
||||
@@ -287,9 +284,7 @@ pub fn spawn_rate_limit_auto_scheduler(
|
||||
}
|
||||
Ok(_) => {}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(n)) => {
|
||||
crate::slog!(
|
||||
"[timer] Rate-limit auto-scheduler lagged, skipped {n} events"
|
||||
);
|
||||
crate::slog!("[timer] Rate-limit auto-scheduler lagged, skipped {n} events");
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => {
|
||||
crate::slog!(
|
||||
@@ -398,44 +393,43 @@ pub async fn handle_timer_command(
|
||||
let story_id = match resolve_story_id(&story_number_or_id, project_root) {
|
||||
Some(id) => id,
|
||||
None => {
|
||||
return format!(
|
||||
"No story with number or ID **{story_number_or_id}** found."
|
||||
);
|
||||
return format!("No story with number or ID **{story_number_or_id}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
// The story must be in backlog or current. When the timer fires,
|
||||
// backlog stories are moved to current automatically.
|
||||
// Check CRDT state first, then fall back to filesystem.
|
||||
let in_valid_stage = if let Ok(Some(item)) = crate::pipeline_state::read_typed(&story_id) {
|
||||
use crate::pipeline_state::Stage;
|
||||
matches!(item.stage, Stage::Backlog | Stage::Coding)
|
||||
} else {
|
||||
let work_dir = project_root.join(".huskies").join("work");
|
||||
work_dir.join("1_backlog").join(format!("{story_id}.md")).exists()
|
||||
|| work_dir.join("2_current").join(format!("{story_id}.md")).exists()
|
||||
};
|
||||
let in_valid_stage =
|
||||
if let Ok(Some(item)) = crate::pipeline_state::read_typed(&story_id) {
|
||||
use crate::pipeline_state::Stage;
|
||||
matches!(item.stage, Stage::Backlog | Stage::Coding)
|
||||
} else {
|
||||
let work_dir = project_root.join(".huskies").join("work");
|
||||
work_dir
|
||||
.join("1_backlog")
|
||||
.join(format!("{story_id}.md"))
|
||||
.exists()
|
||||
|| work_dir
|
||||
.join("2_current")
|
||||
.join(format!("{story_id}.md"))
|
||||
.exists()
|
||||
};
|
||||
if !in_valid_stage {
|
||||
return format!(
|
||||
"Story **{story_id}** is not in backlog or current."
|
||||
);
|
||||
return format!("Story **{story_id}** is not in backlog or current.");
|
||||
}
|
||||
|
||||
let scheduled_at = match next_occurrence_of_hhmm(&hhmm, tz_str) {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
return format!(
|
||||
"Invalid time **{hhmm}**. Use `HH:MM` format (e.g. `14:30`)."
|
||||
);
|
||||
return format!("Invalid time **{hhmm}**. Use `HH:MM` format (e.g. `14:30`).");
|
||||
}
|
||||
};
|
||||
|
||||
match store.add(story_id.clone(), scheduled_at) {
|
||||
Ok(()) => {
|
||||
let (display_time, tz_label) = format_in_timezone(scheduled_at, tz_str);
|
||||
format!(
|
||||
"Timer set for **{story_id}** at **{display_time}** ({tz_label})."
|
||||
)
|
||||
format!("Timer set for **{story_id}** at **{display_time}** ({tz_label}).")
|
||||
}
|
||||
Err(e) => format!("Failed to save timer: {e}"),
|
||||
}
|
||||
@@ -448,11 +442,7 @@ pub async fn handle_timer_command(
|
||||
let mut lines = vec!["**Pending timers:**".to_string()];
|
||||
for t in &timers {
|
||||
let (display_time, _) = format_in_timezone(t.scheduled_at, tz_str);
|
||||
lines.push(format!(
|
||||
"- **{}** → {}",
|
||||
t.story_id,
|
||||
display_time
|
||||
));
|
||||
lines.push(format!("- **{}** → {}", t.story_id, display_time));
|
||||
}
|
||||
lines.join("\n")
|
||||
}
|
||||
@@ -465,13 +455,11 @@ pub async fn handle_timer_command(
|
||||
format!("No timer found for **{story_id}**.")
|
||||
}
|
||||
}
|
||||
TimerCommand::BadArgs => {
|
||||
"Usage:\n\
|
||||
TimerCommand::BadArgs => "Usage:\n\
|
||||
- `timer <story_id> <HH:MM>` — schedule deferred start\n\
|
||||
- `timer list` — show pending timers\n\
|
||||
- `timer cancel <story_id>` — remove a timer"
|
||||
.to_string()
|
||||
}
|
||||
.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -529,10 +517,7 @@ fn format_in_timezone(dt: DateTime<Utc>, timezone: Option<&str>) -> (String, Str
|
||||
match timezone.and_then(|s| s.parse::<Tz>().ok()) {
|
||||
Some(tz) => {
|
||||
let tz_time = dt.with_timezone(&tz);
|
||||
(
|
||||
tz_time.format("%Y-%m-%d %H:%M").to_string(),
|
||||
tz.to_string(),
|
||||
)
|
||||
(tz_time.format("%Y-%m-%d %H:%M").to_string(), tz.to_string())
|
||||
}
|
||||
None => {
|
||||
let local_time = dt.with_timezone(&Local);
|
||||
@@ -571,7 +556,12 @@ fn resolve_story_id(number_or_id: &str, project_root: &Path) -> Option<String> {
|
||||
// --- DB-first lookup ---
|
||||
for id in crate::db::all_content_ids() {
|
||||
let file_num = id.split('_').next().unwrap_or("");
|
||||
if file_num == number_or_id && crate::pipeline_state::read_typed(&id).ok().flatten().is_some() {
|
||||
if file_num == number_or_id
|
||||
&& crate::pipeline_state::read_typed(&id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some()
|
||||
{
|
||||
return Some(id);
|
||||
}
|
||||
}
|
||||
@@ -643,14 +633,20 @@ mod tests {
|
||||
#[test]
|
||||
fn next_occurrence_with_named_timezone_is_in_the_future() {
|
||||
let result = next_occurrence_of_hhmm("14:30", Some("Europe/London")).unwrap();
|
||||
assert!(result > Utc::now(), "next occurrence (Europe/London) must be in the future");
|
||||
assert!(
|
||||
result > Utc::now(),
|
||||
"next occurrence (Europe/London) must be in the future"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn next_occurrence_with_invalid_timezone_falls_back_to_local() {
|
||||
// An unrecognised timezone name falls back to chrono::Local (returns Some).
|
||||
let result = next_occurrence_of_hhmm("14:30", Some("Invalid/Zone"));
|
||||
assert!(result.is_some(), "invalid timezone should fall back to local and return Some");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"invalid timezone should fall back to local and return Some"
|
||||
);
|
||||
}
|
||||
|
||||
// ── extract_timer_command ───────────────────────────────────────────
|
||||
@@ -679,11 +675,7 @@ mod tests {
|
||||
#[test]
|
||||
fn timer_cancel_story_id() {
|
||||
assert_eq!(
|
||||
extract_timer_command(
|
||||
"Timmy timer cancel 421_story_foo",
|
||||
"Timmy",
|
||||
"@bot:home"
|
||||
),
|
||||
extract_timer_command("Timmy timer cancel 421_story_foo", "Timmy", "@bot:home"),
|
||||
Some(TimerCommand::Cancel {
|
||||
story_number_or_id: "421_story_foo".to_string()
|
||||
})
|
||||
@@ -701,11 +693,7 @@ mod tests {
|
||||
#[test]
|
||||
fn timer_schedule_with_story_id() {
|
||||
assert_eq!(
|
||||
extract_timer_command(
|
||||
"Timmy timer 421_story_foo 14:30",
|
||||
"Timmy",
|
||||
"@bot:home"
|
||||
),
|
||||
extract_timer_command("Timmy timer 421_story_foo 14:30", "Timmy", "@bot:home"),
|
||||
Some(TimerCommand::Schedule {
|
||||
story_number_or_id: "421_story_foo".to_string(),
|
||||
hhmm: "14:30".to_string(),
|
||||
@@ -727,11 +715,7 @@ mod tests {
|
||||
#[test]
|
||||
fn timer_schedule_missing_time_is_bad_args() {
|
||||
assert_eq!(
|
||||
extract_timer_command(
|
||||
"Timmy timer 421_story_foo",
|
||||
"Timmy",
|
||||
"@bot:home"
|
||||
),
|
||||
extract_timer_command("Timmy timer 421_story_foo", "Timmy", "@bot:home"),
|
||||
Some(TimerCommand::BadArgs)
|
||||
);
|
||||
}
|
||||
@@ -944,10 +928,7 @@ mod tests {
|
||||
dir.path(),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
result.contains("No timer found"),
|
||||
"unexpected: {result}"
|
||||
);
|
||||
assert!(result.contains("No timer found"), "unexpected: {result}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -1014,10 +995,7 @@ mod tests {
|
||||
dir.path(),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
result.contains("Timer set for"),
|
||||
"unexpected: {result}"
|
||||
);
|
||||
assert!(result.contains("Timer set for"), "unexpected: {result}");
|
||||
assert_eq!(store.list().len(), 1);
|
||||
}
|
||||
|
||||
@@ -1111,7 +1089,10 @@ mod tests {
|
||||
"story should be in the content store after timer fires"
|
||||
);
|
||||
// Timer was consumed.
|
||||
assert!(store.list().is_empty(), "fired timer should be removed from store");
|
||||
assert!(
|
||||
store.list().is_empty(),
|
||||
"fired timer should be removed from store"
|
||||
);
|
||||
}
|
||||
|
||||
// ── AC4: tick_once integration test ─────────────────────────────────
|
||||
|
||||
@@ -6,9 +6,9 @@ use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::{Mutex as TokioMutex, oneshot};
|
||||
|
||||
use crate::agents::AgentPool;
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
use crate::chat::util::is_permission_approval;
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::http::context::{PermissionDecision, PermissionForward};
|
||||
use crate::slog;
|
||||
|
||||
@@ -42,8 +42,7 @@ pub struct DiscordContext {
|
||||
/// Permission requests from the MCP `prompt_permission` tool arrive here.
|
||||
pub perm_rx: Arc<TokioMutex<tokio::sync::mpsc::UnboundedReceiver<PermissionForward>>>,
|
||||
/// Pending permission replies keyed by channel ID.
|
||||
pub pending_perm_replies:
|
||||
Arc<TokioMutex<HashMap<String, oneshot::Sender<PermissionDecision>>>>,
|
||||
pub pending_perm_replies: Arc<TokioMutex<HashMap<String, oneshot::Sender<PermissionDecision>>>>,
|
||||
/// Seconds before an unanswered permission prompt is auto-denied.
|
||||
pub permission_timeout_secs: u64,
|
||||
}
|
||||
@@ -135,16 +134,13 @@ pub(super) async fn handle_incoming_message(
|
||||
let total_ticks = (duration_secs as usize) / 2;
|
||||
for tick in 1..=total_ticks {
|
||||
tokio::time::sleep(interval).await;
|
||||
let updated =
|
||||
crate::chat::transport::matrix::htop::build_htop_message(
|
||||
&agents,
|
||||
(tick * 2) as u32,
|
||||
duration_secs,
|
||||
);
|
||||
let updated = crate::chat::transport::matrix::htop::build_htop_message(
|
||||
&agents,
|
||||
(tick * 2) as u32,
|
||||
duration_secs,
|
||||
);
|
||||
let updated = markdown_to_discord(&updated);
|
||||
if let Err(e) =
|
||||
transport.edit_message(&ch, &msg_id, &updated, "").await
|
||||
{
|
||||
if let Err(e) = transport.edit_message(&ch, &msg_id, &updated, "").await {
|
||||
slog!("[discord] Failed to edit htop message: {e}");
|
||||
break;
|
||||
}
|
||||
@@ -320,12 +316,7 @@ pub(super) async fn handle_incoming_message(
|
||||
}
|
||||
|
||||
/// Forward a message to Claude Code and send the response back via Discord.
|
||||
async fn handle_llm_message(
|
||||
ctx: &DiscordContext,
|
||||
channel: &str,
|
||||
user: &str,
|
||||
user_message: &str,
|
||||
) {
|
||||
async fn handle_llm_message(ctx: &DiscordContext, channel: &str, user: &str, user_message: &str) {
|
||||
use crate::chat::util::drain_complete_paragraphs;
|
||||
use crate::llm::providers::claude_code::{ClaudeCodeProvider, ClaudeCodeResult};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
@@ -334,9 +325,7 @@ async fn handle_llm_message(
|
||||
// Look up existing session ID for this channel.
|
||||
let resume_session_id: Option<String> = {
|
||||
let guard = ctx.history.lock().await;
|
||||
guard
|
||||
.get(channel)
|
||||
.and_then(|conv| conv.session_id.clone())
|
||||
guard.get(channel).and_then(|conv| conv.session_id.clone())
|
||||
};
|
||||
|
||||
let bot_name = &ctx.bot_name;
|
||||
@@ -446,9 +435,7 @@ async fn handle_llm_message(
|
||||
let last_text = messages
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|m| {
|
||||
m.role == crate::llm::types::Role::Assistant && !m.content.is_empty()
|
||||
})
|
||||
.find(|m| m.role == crate::llm::types::Role::Assistant && !m.content.is_empty())
|
||||
.map(|m| m.content.clone())
|
||||
.unwrap_or_default();
|
||||
if !last_text.is_empty() {
|
||||
|
||||
@@ -150,8 +150,7 @@ async fn run_gateway(ctx: Arc<DiscordContext>) -> Result<(), String> {
|
||||
.ok_or("Gateway closed before Hello")?
|
||||
.map_err(|e| format!("Gateway read error: {e}"))?;
|
||||
|
||||
let hello_payload: GatewayPayload =
|
||||
parse_ws_message(&hello).ok_or("Failed to parse Hello")?;
|
||||
let hello_payload: GatewayPayload = parse_ws_message(&hello).ok_or("Failed to parse Hello")?;
|
||||
|
||||
if hello_payload.op != OP_HELLO {
|
||||
return Err(format!(
|
||||
@@ -164,8 +163,7 @@ async fn run_gateway(ctx: Arc<DiscordContext>) -> Result<(), String> {
|
||||
serde_json::from_value(hello_payload.d.ok_or("Hello missing data")?)
|
||||
.map_err(|e| format!("Failed to parse Hello data: {e}"))?;
|
||||
|
||||
let heartbeat_interval =
|
||||
std::time::Duration::from_millis(hello_data.heartbeat_interval);
|
||||
let heartbeat_interval = std::time::Duration::from_millis(hello_data.heartbeat_interval);
|
||||
slog!(
|
||||
"[discord] Heartbeat interval: {}ms",
|
||||
hello_data.heartbeat_interval
|
||||
@@ -258,19 +256,12 @@ async fn run_gateway(ctx: Arc<DiscordContext>) -> Result<(), String> {
|
||||
&& let Ok(ready) = serde_json::from_value::<ReadyData>(d)
|
||||
{
|
||||
bot_user_id = Some(ready.user.id.clone());
|
||||
slog!(
|
||||
"[discord] READY — bot user ID: {}",
|
||||
ready.user.id
|
||||
);
|
||||
slog!("[discord] READY — bot user ID: {}", ready.user.id);
|
||||
}
|
||||
}
|
||||
"MESSAGE_CREATE" => {
|
||||
if let Some(d) = payload.d {
|
||||
dispatch_message(
|
||||
Arc::clone(&ctx),
|
||||
d,
|
||||
bot_user_id.clone(),
|
||||
);
|
||||
dispatch_message(Arc::clone(&ctx), d, bot_user_id.clone());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -355,15 +346,11 @@ fn dispatch_message(
|
||||
|
||||
// Check if the bot was mentioned, or if we respond to all messages in
|
||||
// configured channels (ambient mode).
|
||||
let bot_mentioned = bot_user_id.as_ref().is_some_and(|bid| {
|
||||
msg.mentions.iter().any(|m| m.id == *bid)
|
||||
});
|
||||
let bot_mentioned = bot_user_id
|
||||
.as_ref()
|
||||
.is_some_and(|bid| msg.mentions.iter().any(|m| m.id == *bid));
|
||||
|
||||
let in_ambient = ctx
|
||||
.ambient_rooms
|
||||
.lock()
|
||||
.unwrap()
|
||||
.contains(&msg.channel_id);
|
||||
let in_ambient = ctx.ambient_rooms.lock().unwrap().contains(&msg.channel_id);
|
||||
|
||||
if !bot_mentioned && !in_ambient {
|
||||
return;
|
||||
@@ -392,8 +379,7 @@ fn dispatch_message(
|
||||
msg.channel_id
|
||||
);
|
||||
|
||||
commands::handle_incoming_message(&ctx, &msg.channel_id, &author.id, &content)
|
||||
.await;
|
||||
commands::handle_incoming_message(&ctx, &msg.channel_id, &author.id, &content).await;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -417,8 +403,7 @@ mod tests {
|
||||
let json = r#"{"op": 10, "d": {"heartbeat_interval": 41250}}"#;
|
||||
let payload: GatewayPayload = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(payload.op, OP_HELLO);
|
||||
let hello: HelloData =
|
||||
serde_json::from_value(payload.d.unwrap()).unwrap();
|
||||
let hello: HelloData = serde_json::from_value(payload.d.unwrap()).unwrap();
|
||||
assert_eq!(hello.heartbeat_interval, 41250);
|
||||
}
|
||||
|
||||
|
||||
@@ -181,8 +181,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport =
|
||||
DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
let transport = DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.send_message("123456", "hello", "<p>hello</p>")
|
||||
@@ -202,8 +201,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport =
|
||||
DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
let transport = DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
|
||||
let result = transport.send_message("bad", "hello", "").await;
|
||||
assert!(result.is_err());
|
||||
@@ -220,8 +218,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport =
|
||||
DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
let transport = DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.edit_message("123456", "999888777", "updated", "")
|
||||
@@ -240,12 +237,9 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport =
|
||||
DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
let transport = DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.edit_message("123456", "bad", "updated", "")
|
||||
.await;
|
||||
let result = transport.edit_message("123456", "bad", "updated", "").await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("404"));
|
||||
}
|
||||
@@ -259,8 +253,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport =
|
||||
DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
let transport = DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
|
||||
assert!(transport.send_typing("123456", true).await.is_ok());
|
||||
}
|
||||
@@ -281,8 +274,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport =
|
||||
DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
let transport = DiscordTransport::with_api_base("test-token".to_string(), server.url());
|
||||
|
||||
let result = transport.send_message("123456", "hello", "").await;
|
||||
assert!(result.is_err());
|
||||
@@ -296,7 +288,6 @@ mod tests {
|
||||
fn assert_transport<T: ChatTransport>() {}
|
||||
assert_transport::<DiscordTransport>();
|
||||
|
||||
let _: Arc<dyn ChatTransport> =
|
||||
Arc::new(DiscordTransport::new("test-token".to_string()));
|
||||
let _: Arc<dyn ChatTransport> = Arc::new(DiscordTransport::new("test-token".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,10 +17,7 @@ use std::path::Path;
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum AssignCommand {
|
||||
/// Assign the story with this number to the given model.
|
||||
Assign {
|
||||
story_number: String,
|
||||
model: String,
|
||||
},
|
||||
Assign { story_number: String, model: String },
|
||||
/// The user typed `assign` but without valid arguments.
|
||||
BadArgs,
|
||||
}
|
||||
@@ -96,9 +93,7 @@ pub async fn handle_assign(
|
||||
match crate::chat::lookup::find_story_by_number(project_root, story_number) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
return format!(
|
||||
"No story, bug, or spike with number **{story_number}** found."
|
||||
);
|
||||
return format!("No story, bug, or spike with number **{story_number}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -282,11 +277,8 @@ mod tests {
|
||||
fn extract_assign_command_multibyte_prefix_no_panic() {
|
||||
// "xxxx⏺ assign 42 opus" — ⏺ (U+23FA) is 3 bytes, starting at byte 4.
|
||||
// "@timmy" has len 6 so text[..6] lands inside ⏺ — panics without the fix.
|
||||
let cmd = extract_assign_command(
|
||||
"xxxx\u{23FA} assign 42 opus",
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
let cmd =
|
||||
extract_assign_command("xxxx\u{23FA} assign 42 opus", "Timmy", "@timmy:home.local");
|
||||
assert_eq!(cmd, None);
|
||||
}
|
||||
|
||||
@@ -453,7 +445,8 @@ mod tests {
|
||||
);
|
||||
// Should indicate a restart occurred (not just "will be used when starts")
|
||||
assert!(
|
||||
response.to_lowercase().contains("stop") || response.to_lowercase().contains("reassign"),
|
||||
response.to_lowercase().contains("stop")
|
||||
|| response.to_lowercase().contains("reassign"),
|
||||
"response should indicate stop/reassign: {response}"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
//! Matrix bot context — shared state for the Matrix bot (rooms, history, permissions).
|
||||
use crate::agents::AgentPool;
|
||||
use crate::chat::timer::TimerStore;
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::chat::timer::TimerStore;
|
||||
use crate::http::context::{PermissionDecision, PermissionForward};
|
||||
use matrix_sdk::ruma::{OwnedEventId, OwnedRoomId, OwnedUserId};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex as TokioMutex;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use tokio::sync::{RwLock, mpsc, oneshot};
|
||||
|
||||
use super::history::ConversationHistory;
|
||||
|
||||
@@ -59,6 +59,12 @@ pub struct BotContext {
|
||||
pub transport: Arc<dyn ChatTransport>,
|
||||
/// Persistent store for pending deferred-start timers.
|
||||
pub timer_store: Arc<TimerStore>,
|
||||
/// In gateway mode: the currently active project (shared with the gateway HTTP handler).
|
||||
/// `None` in standalone single-project mode.
|
||||
pub gateway_active_project: Option<Arc<RwLock<String>>>,
|
||||
/// In gateway mode: valid project names accepted by the `switch` command.
|
||||
/// Empty in standalone mode.
|
||||
pub gateway_projects: Vec<String>,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -110,6 +116,8 @@ mod tests {
|
||||
timer_store: Arc::new(crate::chat::timer::TimerStore::load(
|
||||
std::path::PathBuf::from("/tmp/timers.json"),
|
||||
)),
|
||||
gateway_active_project: None,
|
||||
gateway_projects: vec![],
|
||||
};
|
||||
// Clone must work (required by Matrix SDK event handler injection).
|
||||
let _cloned = ctx.clone();
|
||||
|
||||
@@ -104,7 +104,10 @@ mod tests {
|
||||
#[test]
|
||||
fn startup_announcement_uses_configured_display_name_not_hardcoded() {
|
||||
assert_eq!(format_startup_announcement("HAL"), "HAL is online.");
|
||||
assert_eq!(format_startup_announcement("Assistant"), "Assistant is online.");
|
||||
assert_eq!(
|
||||
format_startup_announcement("Assistant"),
|
||||
"Assistant is online."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -71,11 +71,7 @@ pub fn load_history(project_root: &std::path::Path) -> HashMap<OwnedRoomId, Room
|
||||
persisted
|
||||
.rooms
|
||||
.into_iter()
|
||||
.filter_map(|(k, v)| {
|
||||
k.parse::<OwnedRoomId>()
|
||||
.ok()
|
||||
.map(|room_id| (room_id, v))
|
||||
})
|
||||
.filter_map(|(k, v)| k.parse::<OwnedRoomId>().ok().map(|room_id| (room_id, v)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
||||
@@ -97,9 +97,7 @@ pub fn is_addressed_to_other(body: &str, bot_user_id: &OwnedUserId, bot_name: &s
|
||||
// Handles both "@localpart" and "@localpart:homeserver" forms.
|
||||
if let Some(rest) = lower.strip_prefix('@') {
|
||||
// Extract everything up to the first whitespace character.
|
||||
let word_end = rest
|
||||
.find(|c: char| c.is_whitespace())
|
||||
.unwrap_or(rest.len());
|
||||
let word_end = rest.find(|c: char| c.is_whitespace()).unwrap_or(rest.len());
|
||||
let mention = &rest[..word_end]; // e.g. "sally" or "sally:example.com"
|
||||
|
||||
// Strip the homeserver part to get just the localpart.
|
||||
|
||||
@@ -82,9 +82,7 @@ pub(super) async fn on_room_message(
|
||||
// Always let "ambient on" through — it is the one command that must work
|
||||
// even when the bot is not mentioned and ambient mode is off, otherwise
|
||||
// there is no way to re-enable ambient mode without an @-mention.
|
||||
let is_ambient_on = body
|
||||
.to_ascii_lowercase()
|
||||
.contains("ambient on");
|
||||
let is_ambient_on = body.to_ascii_lowercase().contains("ambient on");
|
||||
|
||||
if !is_addressed && !is_ambient && !is_ambient_on {
|
||||
slog!(
|
||||
@@ -97,7 +95,9 @@ pub(super) async fn on_room_message(
|
||||
// In ambient mode, ignore messages that are explicitly addressed to a
|
||||
// different entity (e.g. "sally: do X" or "@sally do X" when we are stu).
|
||||
// We still let through messages addressed to us and the "ambient on" command.
|
||||
if is_ambient && !is_addressed && !is_ambient_on
|
||||
if is_ambient
|
||||
&& !is_addressed
|
||||
&& !is_ambient_on
|
||||
&& is_addressed_to_other(&body, &ctx.bot_user_id, &ctx.bot_name)
|
||||
{
|
||||
slog!(
|
||||
@@ -158,7 +158,10 @@ pub(super) async fn on_room_message(
|
||||
"Permission denied."
|
||||
};
|
||||
let html = markdown_to_html(confirmation);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, confirmation, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, confirmation, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -182,9 +185,14 @@ pub(super) async fn on_room_message(
|
||||
ambient_rooms: &ctx.ambient_rooms,
|
||||
room_id: &room_id_str,
|
||||
};
|
||||
if let Some((response, response_html)) = super::super::commands::try_handle_command_with_html(&dispatch, &user_message) {
|
||||
if let Some((response, response_html)) =
|
||||
super::super::commands::try_handle_command_with_html(&dispatch, &user_message)
|
||||
{
|
||||
slog!("[matrix-bot] Handled bot command from {sender}");
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &response_html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &response_html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -224,7 +232,10 @@ pub(super) async fn on_room_message(
|
||||
}
|
||||
};
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -272,9 +283,7 @@ pub(super) async fn on_room_message(
|
||||
) {
|
||||
let response = match del_cmd {
|
||||
super::super::delete::DeleteCommand::Delete { story_number } => {
|
||||
slog!(
|
||||
"[matrix-bot] Handling delete command from {sender}: story {story_number}"
|
||||
);
|
||||
slog!("[matrix-bot] Handling delete command from {sender}: story {story_number}");
|
||||
super::super::delete::handle_delete(
|
||||
&ctx.bot_name,
|
||||
&story_number,
|
||||
@@ -288,7 +297,10 @@ pub(super) async fn on_room_message(
|
||||
}
|
||||
};
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -305,9 +317,7 @@ pub(super) async fn on_room_message(
|
||||
) {
|
||||
let response = match rmtree_cmd {
|
||||
super::super::rmtree::RmtreeCommand::Rmtree { story_number } => {
|
||||
slog!(
|
||||
"[matrix-bot] Handling rmtree command from {sender}: story {story_number}"
|
||||
);
|
||||
slog!("[matrix-bot] Handling rmtree command from {sender}: story {story_number}");
|
||||
super::super::rmtree::handle_rmtree(
|
||||
&ctx.bot_name,
|
||||
&story_number,
|
||||
@@ -321,7 +331,10 @@ pub(super) async fn on_room_message(
|
||||
}
|
||||
};
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -361,7 +374,10 @@ pub(super) async fn on_room_message(
|
||||
}
|
||||
};
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -387,7 +403,10 @@ pub(super) async fn on_room_message(
|
||||
)
|
||||
.await;
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -408,19 +427,22 @@ pub(super) async fn on_room_message(
|
||||
// Acknowledge immediately — the rebuild may take a while or re-exec.
|
||||
let ack = "Rebuilding server… this may take a moment.";
|
||||
let ack_html = markdown_to_html(ack);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, ack, &ack_html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, ack, &ack_html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
}
|
||||
let response = super::super::rebuild::handle_rebuild(
|
||||
&ctx.bot_name,
|
||||
&ctx.project_root,
|
||||
&ctx.agents,
|
||||
)
|
||||
.await;
|
||||
let response =
|
||||
super::super::rebuild::handle_rebuild(&ctx.bot_name, &ctx.project_root, &ctx.agents)
|
||||
.await;
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -428,6 +450,47 @@ pub(super) async fn on_room_message(
|
||||
return;
|
||||
}
|
||||
|
||||
// In gateway mode, handle the "switch <project>" command to change the
|
||||
// active project without invoking the LLM.
|
||||
if let Some(ref active_project) = ctx.gateway_active_project {
|
||||
let stripped = crate::chat::util::strip_bot_mention(
|
||||
&user_message,
|
||||
&ctx.bot_name,
|
||||
ctx.bot_user_id.as_str(),
|
||||
)
|
||||
.trim()
|
||||
.trim_start_matches(|c: char| !c.is_alphanumeric())
|
||||
.to_string();
|
||||
|
||||
let (cmd, arg) = match stripped.split_once(char::is_whitespace) {
|
||||
Some((c, a)) => (c.to_string(), a.trim().to_string()),
|
||||
None => (stripped.clone(), String::new()),
|
||||
};
|
||||
|
||||
if cmd.eq_ignore_ascii_case("switch") {
|
||||
let response = if arg.is_empty() {
|
||||
let available = ctx.gateway_projects.join(", ");
|
||||
format!("Usage: `switch <project>`. Available projects: {available}")
|
||||
} else if ctx.gateway_projects.iter().any(|p| p == &arg) {
|
||||
*active_project.write().await = arg.clone();
|
||||
format!("Switched to project **{arg}**.")
|
||||
} else {
|
||||
let available = ctx.gateway_projects.join(", ");
|
||||
format!("Unknown project `{arg}`. Available: {available}")
|
||||
};
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for the timer command, which requires async file I/O and cannot
|
||||
// be handled by the sync command registry.
|
||||
if let Some(timer_cmd) = crate::chat::timer::extract_timer_command(
|
||||
@@ -443,7 +506,10 @@ pub(super) async fn on_room_message(
|
||||
)
|
||||
.await;
|
||||
let html = markdown_to_html(&response);
|
||||
if let Ok(msg_id) = ctx.transport.send_message(&room_id_str, &response, &html).await
|
||||
if let Ok(msg_id) = ctx
|
||||
.transport
|
||||
.send_message(&room_id_str, &response, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
ctx.bot_sent_event_ids.lock().await.insert(event_id);
|
||||
@@ -470,16 +536,20 @@ pub(super) async fn handle_message(
|
||||
// flattening history into a text prefix.
|
||||
let resume_session_id: Option<String> = {
|
||||
let guard = ctx.history.lock().await;
|
||||
guard
|
||||
.get(&room_id)
|
||||
.and_then(|conv| conv.session_id.clone())
|
||||
guard.get(&room_id).and_then(|conv| conv.session_id.clone())
|
||||
};
|
||||
|
||||
// The prompt is just the current message with sender attribution.
|
||||
// Prior conversation context is carried by the Claude Code session.
|
||||
let bot_name = &ctx.bot_name;
|
||||
let active_project_ctx = if let Some(ref ap) = ctx.gateway_active_project {
|
||||
let name = ap.read().await.clone();
|
||||
format!("[Active project: {name}]\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let prompt = format!(
|
||||
"[Your name is {bot_name}. Refer to yourself as {bot_name}, not Claude.]\n\n{}",
|
||||
"[Your name is {bot_name}. Refer to yourself as {bot_name}, not Claude.]\n{active_project_ctx}\n{}",
|
||||
format_user_prompt(&sender, &user_message)
|
||||
);
|
||||
|
||||
@@ -501,7 +571,9 @@ pub(super) async fn handle_message(
|
||||
let post_task = tokio::spawn(async move {
|
||||
while let Some(chunk) = msg_rx.recv().await {
|
||||
let html = markdown_to_html(&chunk);
|
||||
if let Ok(msg_id) = post_transport.send_message(&post_room_id, &chunk, &html).await
|
||||
if let Ok(msg_id) = post_transport
|
||||
.send_message(&post_room_id, &chunk, &html)
|
||||
.await
|
||||
&& let Ok(event_id) = msg_id.parse()
|
||||
{
|
||||
sent_ids_for_post.lock().await.insert(event_id);
|
||||
@@ -631,9 +703,7 @@ pub(super) async fn handle_message(
|
||||
Err(e) => {
|
||||
slog!("[matrix-bot] LLM error: {e}");
|
||||
let err_msg = if let Some(url) = crate::llm::oauth::extract_login_url_from_error(&e) {
|
||||
format!(
|
||||
"Authentication required. [Click here to log in to Claude]({url})"
|
||||
)
|
||||
format!("Authentication required. [Click here to log in to Claude]({url})")
|
||||
} else {
|
||||
format!("Error processing your request: {e}")
|
||||
};
|
||||
@@ -654,7 +724,11 @@ pub(super) async fn handle_message(
|
||||
let conv = guard.entry(room_id).or_default();
|
||||
|
||||
// Store the session ID so the next turn uses --resume.
|
||||
slog!("[matrix-bot] storing session_id: {:?} (was: {:?})", new_session_id, conv.session_id);
|
||||
slog!(
|
||||
"[matrix-bot] storing session_id: {:?} (was: {:?})",
|
||||
new_session_id,
|
||||
conv.session_id
|
||||
);
|
||||
if new_session_id.is_some() {
|
||||
conv.session_id = new_session_id;
|
||||
}
|
||||
@@ -713,7 +787,10 @@ mod tests {
|
||||
let err = "OAuth session expired or credentials missing. Please log in: http://localhost:3001/oauth/authorize";
|
||||
let url = crate::llm::oauth::extract_login_url_from_error(err);
|
||||
assert!(url.is_some(), "should extract URL from OAuth error");
|
||||
let msg = format!("Authentication required. [Click here to log in to Claude]({})", url.unwrap());
|
||||
let msg = format!(
|
||||
"Authentication required. [Click here to log in to Claude]({})",
|
||||
url.unwrap()
|
||||
);
|
||||
assert!(msg.contains("http://localhost:3001/oauth/authorize"));
|
||||
assert!(msg.contains("[Click here to log in to Claude]"));
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
//! Matrix bot run loop — connects to the homeserver and processes sync events.
|
||||
use crate::agents::AgentPool;
|
||||
use crate::slog;
|
||||
use matrix_sdk::{Client, LoopCtrl, config::SyncSettings};
|
||||
use matrix_sdk::ruma::OwnedRoomId;
|
||||
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
|
||||
use matrix_sdk::{Client, LoopCtrl, config::SyncSettings};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
|
||||
use tokio::sync::Mutex as TokioMutex;
|
||||
use tokio::sync::{mpsc, watch};
|
||||
use tokio::sync::{RwLock, mpsc, watch};
|
||||
|
||||
use super::context::BotContext;
|
||||
use super::format::{format_startup_announcement, markdown_to_html};
|
||||
@@ -19,6 +19,7 @@ use super::verification::{on_room_verification_request, on_to_device_verificatio
|
||||
/// Connect to the Matrix homeserver, join all configured rooms, and start
|
||||
/// listening for messages. Runs the full Matrix sync loop — call from a
|
||||
/// `tokio::spawn` task so it doesn't block the main thread.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn run_bot(
|
||||
config: super::super::config::BotConfig,
|
||||
project_root: PathBuf,
|
||||
@@ -27,6 +28,8 @@ pub async fn run_bot(
|
||||
perm_rx: Arc<TokioMutex<mpsc::UnboundedReceiver<crate::http::context::PermissionForward>>>,
|
||||
agents: Arc<AgentPool>,
|
||||
shutdown_rx: watch::Receiver<Option<crate::rebuild::ShutdownReason>>,
|
||||
gateway_active_project: Option<Arc<RwLock<String>>>,
|
||||
gateway_projects: Vec<String>,
|
||||
) -> Result<(), String> {
|
||||
let store_path = project_root.join(".huskies").join("matrix_store");
|
||||
let client = Client::builder()
|
||||
@@ -73,7 +76,10 @@ pub async fn run_bot(
|
||||
.ok_or_else(|| "No user ID after login".to_string())?
|
||||
.to_owned();
|
||||
|
||||
slog!("[matrix-bot] Logged in as {bot_user_id} (device: {})", login_response.device_id);
|
||||
slog!(
|
||||
"[matrix-bot] Logged in as {bot_user_id} (device: {})",
|
||||
login_response.device_id
|
||||
);
|
||||
|
||||
// Bootstrap cross-signing keys for E2EE verification support.
|
||||
// Pass the bot's password for UIA (User-Interactive Authentication) —
|
||||
@@ -81,9 +87,7 @@ pub async fn run_bot(
|
||||
{
|
||||
use matrix_sdk::ruma::api::client::uiaa;
|
||||
let password_auth = uiaa::AuthData::Password(uiaa::Password::new(
|
||||
uiaa::UserIdentifier::UserIdOrLocalpart(
|
||||
config.username.clone().unwrap_or_default(),
|
||||
),
|
||||
uiaa::UserIdentifier::UserIdOrLocalpart(config.username.clone().unwrap_or_default()),
|
||||
config.password.clone().unwrap_or_default(),
|
||||
));
|
||||
if let Err(e) = client
|
||||
@@ -171,11 +175,7 @@ pub async fn run_bot(
|
||||
);
|
||||
|
||||
// Restore persisted ambient rooms from config.
|
||||
let persisted_ambient: HashSet<String> = config
|
||||
.ambient_rooms
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect();
|
||||
let persisted_ambient: HashSet<String> = config.ambient_rooms.iter().cloned().collect();
|
||||
if !persisted_ambient.is_empty() {
|
||||
slog!(
|
||||
"[matrix-bot] Restored ambient mode for {} room(s): {:?}",
|
||||
@@ -189,11 +189,13 @@ pub async fn run_bot(
|
||||
"whatsapp" => {
|
||||
if config.whatsapp_provider == "twilio" {
|
||||
slog!("[matrix-bot] Using WhatsApp/Twilio transport");
|
||||
Arc::new(crate::chat::transport::whatsapp::TwilioWhatsAppTransport::new(
|
||||
config.twilio_account_sid.clone().unwrap_or_default(),
|
||||
config.twilio_auth_token.clone().unwrap_or_default(),
|
||||
config.twilio_whatsapp_number.clone().unwrap_or_default(),
|
||||
))
|
||||
Arc::new(
|
||||
crate::chat::transport::whatsapp::TwilioWhatsAppTransport::new(
|
||||
config.twilio_account_sid.clone().unwrap_or_default(),
|
||||
config.twilio_auth_token.clone().unwrap_or_default(),
|
||||
config.twilio_whatsapp_number.clone().unwrap_or_default(),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
slog!("[matrix-bot] Using WhatsApp/Meta transport");
|
||||
Arc::new(crate::chat::transport::whatsapp::WhatsAppTransport::new(
|
||||
@@ -208,7 +210,9 @@ pub async fn run_bot(
|
||||
}
|
||||
_ => {
|
||||
slog!("[matrix-bot] Using Matrix transport");
|
||||
Arc::new(super::super::transport_impl::MatrixTransport::new(client.clone()))
|
||||
Arc::new(super::super::transport_impl::MatrixTransport::new(
|
||||
client.clone(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
@@ -222,10 +226,7 @@ pub async fn run_bot(
|
||||
project_root.join(".huskies").join("timers.json"),
|
||||
));
|
||||
// Auto-schedule timers when an agent hits a hard rate limit.
|
||||
crate::chat::timer::spawn_rate_limit_auto_scheduler(
|
||||
Arc::clone(&timer_store),
|
||||
watcher_rx_auto,
|
||||
);
|
||||
crate::chat::timer::spawn_rate_limit_auto_scheduler(Arc::clone(&timer_store), watcher_rx_auto);
|
||||
|
||||
let ctx = BotContext {
|
||||
bot_user_id,
|
||||
@@ -244,9 +245,13 @@ pub async fn run_bot(
|
||||
htop_sessions: Arc::new(TokioMutex::new(HashMap::new())),
|
||||
transport: Arc::clone(&transport),
|
||||
timer_store,
|
||||
gateway_active_project,
|
||||
gateway_projects,
|
||||
};
|
||||
|
||||
slog!("[matrix-bot] Cryptographic identity verification is always ON — commands from unencrypted rooms or unverified devices are rejected");
|
||||
slog!(
|
||||
"[matrix-bot] Cryptographic identity verification is always ON — commands from unencrypted rooms or unverified devices are rejected"
|
||||
);
|
||||
|
||||
// Register event handlers and inject shared context.
|
||||
client.add_event_handler_context(ctx);
|
||||
@@ -256,8 +261,7 @@ pub async fn run_bot(
|
||||
|
||||
// Spawn the stage-transition notification listener before entering the
|
||||
// sync loop so it starts receiving watcher events immediately.
|
||||
let notif_room_id_strings: Vec<String> =
|
||||
notif_room_ids.iter().map(|r| r.to_string()).collect();
|
||||
let notif_room_id_strings: Vec<String> = notif_room_ids.iter().map(|r| r.to_string()).collect();
|
||||
super::super::notifications::spawn_notification_listener(
|
||||
Arc::clone(&transport),
|
||||
move || notif_room_id_strings.clone(),
|
||||
@@ -269,8 +273,7 @@ pub async fn run_bot(
|
||||
// configured rooms when the server is about to stop (SIGINT/SIGTERM or rebuild).
|
||||
{
|
||||
let shutdown_transport = Arc::clone(&transport);
|
||||
let shutdown_rooms: Vec<String> =
|
||||
announce_room_ids.iter().map(|r| r.to_string()).collect();
|
||||
let shutdown_rooms: Vec<String> = announce_room_ids.iter().map(|r| r.to_string()).collect();
|
||||
let shutdown_bot_name = announce_bot_name.clone();
|
||||
let mut rx = shutdown_rx;
|
||||
tokio::spawn(async move {
|
||||
@@ -400,8 +403,7 @@ mod tests {
|
||||
#[test]
|
||||
fn io_error_is_not_fatal() {
|
||||
let e: matrix_sdk::Error =
|
||||
std::io::Error::new(std::io::ErrorKind::ConnectionRefused, "connection refused")
|
||||
.into();
|
||||
std::io::Error::new(std::io::ErrorKind::ConnectionRefused, "connection refused").into();
|
||||
assert!(!is_fatal_sync_error(&e));
|
||||
}
|
||||
|
||||
@@ -423,7 +425,11 @@ mod tests {
|
||||
const MAX_BACKOFF_SECS: u64 = 300;
|
||||
let steps: Vec<u64> = std::iter::successors(Some(5u64), |&d| {
|
||||
let next = (d * 2).min(MAX_BACKOFF_SECS);
|
||||
if next < MAX_BACKOFF_SECS { Some(next) } else { None }
|
||||
if next < MAX_BACKOFF_SECS {
|
||||
Some(next)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
// First few steps: 5, 10, 20, 40, 80, 160
|
||||
@@ -433,4 +439,3 @@ mod tests {
|
||||
assert_eq!(steps[3], 40);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -84,8 +84,9 @@ pub(super) async fn on_to_device_verification_request(
|
||||
}
|
||||
break;
|
||||
}
|
||||
VerificationRequestState::Done
|
||||
| VerificationRequestState::Cancelled(_) => break,
|
||||
VerificationRequestState::Done | VerificationRequestState::Cancelled(_) => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -100,10 +101,7 @@ pub(super) async fn on_to_device_verification_request(
|
||||
/// Modern Element sends `m.key.verification.request` as an `m.room.message`
|
||||
/// event rather than a to-device event. We look for that message type and
|
||||
/// drive the same SAS flow as the to-device handler.
|
||||
pub(super) async fn on_room_verification_request(
|
||||
ev: OriginalSyncRoomMessageEvent,
|
||||
client: Client,
|
||||
) {
|
||||
pub(super) async fn on_room_verification_request(ev: OriginalSyncRoomMessageEvent, client: Client) {
|
||||
// Only act on in-room verification request messages.
|
||||
if !matches!(ev.content.msgtype, MessageType::VerificationRequest(_)) {
|
||||
return;
|
||||
@@ -152,8 +150,9 @@ pub(super) async fn on_room_verification_request(
|
||||
}
|
||||
break;
|
||||
}
|
||||
VerificationRequestState::Done
|
||||
| VerificationRequestState::Cancelled(_) => break,
|
||||
VerificationRequestState::Done | VerificationRequestState::Cancelled(_) => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,7 +77,6 @@ pub struct BotConfig {
|
||||
|
||||
// ── WhatsApp Business API fields ─────────────────────────────────
|
||||
// These are only required when `transport = "whatsapp"`.
|
||||
|
||||
/// WhatsApp Business phone number ID from the Meta dashboard.
|
||||
#[serde(default)]
|
||||
pub whatsapp_phone_number_id: Option<String>,
|
||||
@@ -105,7 +104,6 @@ pub struct BotConfig {
|
||||
|
||||
// ── Twilio WhatsApp fields ─────────────────────────────────────────
|
||||
// Only required when `transport = "whatsapp"` and `whatsapp_provider = "twilio"`.
|
||||
|
||||
/// Twilio Account SID (starts with `AC`).
|
||||
#[serde(default)]
|
||||
pub twilio_account_sid: Option<String>,
|
||||
@@ -126,7 +124,6 @@ pub struct BotConfig {
|
||||
|
||||
// ── Slack Bot API fields ─────────────────────────────────────────
|
||||
// These are only required when `transport = "slack"`.
|
||||
|
||||
/// Slack Bot User OAuth Token (starts with `xoxb-`).
|
||||
#[serde(default)]
|
||||
pub slack_bot_token: Option<String>,
|
||||
@@ -139,7 +136,6 @@ pub struct BotConfig {
|
||||
|
||||
// ── Discord Bot API fields ──────────────────────────────────────
|
||||
// These are only required when `transport = "discord"`.
|
||||
|
||||
/// Discord bot token from the Discord Developer Portal.
|
||||
#[serde(default)]
|
||||
pub discord_bot_token: Option<String>,
|
||||
@@ -189,21 +185,33 @@ impl BotConfig {
|
||||
if config.transport == "whatsapp" {
|
||||
if config.whatsapp_provider == "twilio" {
|
||||
// Validate Twilio-specific fields.
|
||||
if config.twilio_account_sid.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.twilio_account_sid
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: whatsapp_provider=\"twilio\" requires \
|
||||
twilio_account_sid"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
if config.twilio_auth_token.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.twilio_auth_token
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: whatsapp_provider=\"twilio\" requires \
|
||||
twilio_auth_token"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
if config.twilio_whatsapp_number.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.twilio_whatsapp_number
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: whatsapp_provider=\"twilio\" requires \
|
||||
twilio_whatsapp_number"
|
||||
@@ -212,21 +220,33 @@ impl BotConfig {
|
||||
}
|
||||
} else {
|
||||
// Validate Meta (default) WhatsApp fields.
|
||||
if config.whatsapp_phone_number_id.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.whatsapp_phone_number_id
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"whatsapp\" requires \
|
||||
whatsapp_phone_number_id"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
if config.whatsapp_access_token.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.whatsapp_access_token
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"whatsapp\" requires \
|
||||
whatsapp_access_token"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
if config.whatsapp_verify_token.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.whatsapp_verify_token
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"whatsapp\" requires \
|
||||
whatsapp_verify_token"
|
||||
@@ -243,7 +263,11 @@ impl BotConfig {
|
||||
);
|
||||
return None;
|
||||
}
|
||||
if config.slack_signing_secret.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.slack_signing_secret
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"slack\" requires \
|
||||
slack_signing_secret"
|
||||
@@ -259,7 +283,11 @@ impl BotConfig {
|
||||
}
|
||||
} else if config.transport == "discord" {
|
||||
// Validate Discord-specific fields.
|
||||
if config.discord_bot_token.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
if config
|
||||
.discord_bot_token
|
||||
.as_ref()
|
||||
.is_none_or(|s| s.is_empty())
|
||||
{
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"discord\" requires \
|
||||
discord_bot_token"
|
||||
@@ -276,21 +304,15 @@ impl BotConfig {
|
||||
} else {
|
||||
// Default transport is Matrix — validate Matrix-specific fields.
|
||||
if config.homeserver.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"matrix\" requires homeserver"
|
||||
);
|
||||
eprintln!("[bot] bot.toml: transport=\"matrix\" requires homeserver");
|
||||
return None;
|
||||
}
|
||||
if config.username.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"matrix\" requires username"
|
||||
);
|
||||
eprintln!("[bot] bot.toml: transport=\"matrix\" requires username");
|
||||
return None;
|
||||
}
|
||||
if config.password.as_ref().is_none_or(|s| s.is_empty()) {
|
||||
eprintln!(
|
||||
"[bot] bot.toml: transport=\"matrix\" requires password"
|
||||
);
|
||||
eprintln!("[bot] bot.toml: transport=\"matrix\" requires password");
|
||||
return None;
|
||||
}
|
||||
if config.room_ids.is_empty() {
|
||||
@@ -402,7 +424,10 @@ enabled = true
|
||||
let result = BotConfig::load(tmp.path());
|
||||
assert!(result.is_some());
|
||||
let config = result.unwrap();
|
||||
assert_eq!(config.homeserver.as_deref(), Some("https://matrix.example.com"));
|
||||
assert_eq!(
|
||||
config.homeserver.as_deref(),
|
||||
Some("https://matrix.example.com")
|
||||
);
|
||||
assert_eq!(config.username.as_deref(), Some("@bot:example.com"));
|
||||
assert_eq!(
|
||||
config.effective_room_ids(),
|
||||
@@ -761,18 +786,9 @@ whatsapp_verify_token = "my-verify"
|
||||
.unwrap();
|
||||
let config = BotConfig::load(tmp.path()).unwrap();
|
||||
assert_eq!(config.transport, "whatsapp");
|
||||
assert_eq!(
|
||||
config.whatsapp_phone_number_id.as_deref(),
|
||||
Some("123456")
|
||||
);
|
||||
assert_eq!(
|
||||
config.whatsapp_access_token.as_deref(),
|
||||
Some("EAAtoken")
|
||||
);
|
||||
assert_eq!(
|
||||
config.whatsapp_verify_token.as_deref(),
|
||||
Some("my-verify")
|
||||
);
|
||||
assert_eq!(config.whatsapp_phone_number_id.as_deref(), Some("123456"));
|
||||
assert_eq!(config.whatsapp_access_token.as_deref(), Some("EAAtoken"));
|
||||
assert_eq!(config.whatsapp_verify_token.as_deref(), Some("my-verify"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1106,14 +1122,8 @@ discord_channel_ids = ["123456789012345678"]
|
||||
.unwrap();
|
||||
let config = BotConfig::load(tmp.path()).unwrap();
|
||||
assert_eq!(config.transport, "discord");
|
||||
assert_eq!(
|
||||
config.discord_bot_token.as_deref(),
|
||||
Some("Bot.Token.Here")
|
||||
);
|
||||
assert_eq!(
|
||||
config.discord_channel_ids,
|
||||
vec!["123456789012345678"]
|
||||
);
|
||||
assert_eq!(config.discord_bot_token.as_deref(), Some("Bot.Token.Here"));
|
||||
assert_eq!(config.discord_channel_ids, vec!["123456789012345678"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1176,9 +1186,6 @@ discord_allowed_users = ["111222333", "444555666"]
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
config.discord_allowed_users,
|
||||
vec!["111222333", "444555666"]
|
||||
);
|
||||
assert_eq!(config.discord_allowed_users, vec!["111222333", "444555666"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,9 +65,7 @@ pub async fn handle_delete(
|
||||
match crate::chat::lookup::find_story_by_number(project_root, story_number) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
return format!(
|
||||
"No story, bug, or spike with number **{story_number}** found."
|
||||
);
|
||||
return format!("No story, bug, or spike with number **{story_number}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -13,9 +13,9 @@ use std::time::Duration;
|
||||
use tokio::sync::{Mutex as TokioMutex, watch};
|
||||
|
||||
use crate::agents::{AgentPool, AgentStatus};
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::chat::util::strip_bot_mention;
|
||||
use crate::slog;
|
||||
use crate::chat::ChatTransport;
|
||||
|
||||
use super::bot::markdown_to_html;
|
||||
|
||||
@@ -51,7 +51,11 @@ pub type HtopSessions = Arc<TokioMutex<HashMap<String, HtopSession>>>;
|
||||
/// - `htop stop` → `Stop`
|
||||
/// - `htop 10m` → `Start { duration_secs: 600 }`
|
||||
/// - `htop 120` → `Start { duration_secs: 120 }` (bare seconds)
|
||||
pub fn extract_htop_command(message: &str, bot_name: &str, bot_user_id: &str) -> Option<HtopCommand> {
|
||||
pub fn extract_htop_command(
|
||||
message: &str,
|
||||
bot_name: &str,
|
||||
bot_user_id: &str,
|
||||
) -> Option<HtopCommand> {
|
||||
let stripped = strip_bot_mention(message, bot_name, bot_user_id);
|
||||
let trimmed = stripped.trim();
|
||||
|
||||
@@ -261,7 +265,10 @@ pub async fn run_htop_loop(
|
||||
let text = build_htop_message(&agents, tick as u32, duration_secs);
|
||||
let html = markdown_to_html(&text);
|
||||
|
||||
if let Err(e) = transport.edit_message(&room_id, &initial_message_id, &text, &html).await {
|
||||
if let Err(e) = transport
|
||||
.edit_message(&room_id, &initial_message_id, &text, &html)
|
||||
.await
|
||||
{
|
||||
slog!("[htop] Failed to update message: {e}");
|
||||
return;
|
||||
}
|
||||
@@ -274,7 +281,10 @@ pub async fn run_htop_loop(
|
||||
async fn send_stopped_message(transport: &dyn ChatTransport, room_id: &str, message_id: &str) {
|
||||
let text = "**htop** — monitoring stopped.";
|
||||
let html = markdown_to_html(text);
|
||||
if let Err(e) = transport.edit_message(room_id, message_id, text, &html).await {
|
||||
if let Err(e) = transport
|
||||
.edit_message(room_id, message_id, text, &html)
|
||||
.await
|
||||
{
|
||||
slog!("[htop] Failed to send stop message: {e}");
|
||||
}
|
||||
}
|
||||
@@ -302,7 +312,10 @@ pub async fn handle_htop_start(
|
||||
// Send the initial message.
|
||||
let initial_text = build_htop_message(&agents, 0, duration_secs);
|
||||
let initial_html = markdown_to_html(&initial_text);
|
||||
let message_id = match transport.send_message(room_id, &initial_text, &initial_html).await {
|
||||
let message_id = match transport
|
||||
.send_message(room_id, &initial_text, &initial_html)
|
||||
.await
|
||||
{
|
||||
Ok(id) => id,
|
||||
Err(e) => {
|
||||
slog!("[htop] Failed to send initial message: {e}");
|
||||
|
||||
@@ -21,11 +21,11 @@ pub mod commands;
|
||||
pub(crate) mod config;
|
||||
pub mod delete;
|
||||
pub mod htop;
|
||||
pub mod notifications;
|
||||
pub mod rebuild;
|
||||
pub mod reset;
|
||||
pub mod rmtree;
|
||||
pub mod start;
|
||||
pub mod notifications;
|
||||
pub mod transport_impl;
|
||||
|
||||
pub use bot::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
@@ -37,7 +37,7 @@ use crate::io::watcher::WatcherEvent;
|
||||
use crate::rebuild::ShutdownReason;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{Mutex as TokioMutex, broadcast, mpsc, watch};
|
||||
use tokio::sync::{Mutex as TokioMutex, RwLock, broadcast, mpsc, watch};
|
||||
|
||||
/// Attempt to start the Matrix bot.
|
||||
///
|
||||
@@ -58,18 +58,24 @@ use tokio::sync::{Mutex as TokioMutex, broadcast, mpsc, watch};
|
||||
/// announce the shutdown to all configured rooms before the process exits.
|
||||
///
|
||||
/// Must be called from within a Tokio runtime context (e.g., from `main`).
|
||||
///
|
||||
/// Returns an [`tokio::task::AbortHandle`] if the bot was actually spawned (Matrix/Discord
|
||||
/// transports), or `None` if the config is absent, disabled, or uses a webhook-based
|
||||
/// transport (Slack/WhatsApp) that does not require a persistent background task.
|
||||
pub fn spawn_bot(
|
||||
project_root: &Path,
|
||||
watcher_tx: broadcast::Sender<WatcherEvent>,
|
||||
perm_rx: Arc<TokioMutex<mpsc::UnboundedReceiver<PermissionForward>>>,
|
||||
agents: Arc<AgentPool>,
|
||||
shutdown_rx: watch::Receiver<Option<ShutdownReason>>,
|
||||
) {
|
||||
gateway_active_project: Option<Arc<RwLock<String>>>,
|
||||
gateway_projects: Vec<String>,
|
||||
) -> Option<tokio::task::AbortHandle> {
|
||||
let config = match BotConfig::load(project_root) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
crate::slog!("[matrix-bot] bot.toml absent or disabled; Matrix integration skipped");
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -79,7 +85,7 @@ pub fn spawn_bot(
|
||||
"[bot] transport={} — skipping Matrix bot; webhooks handle this transport",
|
||||
config.transport
|
||||
);
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
|
||||
crate::slog!(
|
||||
@@ -91,12 +97,22 @@ pub fn spawn_bot(
|
||||
let root = project_root.to_path_buf();
|
||||
let watcher_rx = watcher_tx.subscribe();
|
||||
let watcher_rx_auto = watcher_tx.subscribe();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) =
|
||||
bot::run_bot(config, root, watcher_rx, watcher_rx_auto, perm_rx, agents, shutdown_rx)
|
||||
.await
|
||||
let handle = tokio::spawn(async move {
|
||||
if let Err(e) = bot::run_bot(
|
||||
config,
|
||||
root,
|
||||
watcher_rx,
|
||||
watcher_rx_auto,
|
||||
perm_rx,
|
||||
agents,
|
||||
shutdown_rx,
|
||||
gateway_active_project,
|
||||
gateway_projects,
|
||||
)
|
||||
.await
|
||||
{
|
||||
crate::slog!("[matrix-bot] Fatal error: {e}");
|
||||
}
|
||||
});
|
||||
Some(handle.abort_handle())
|
||||
}
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
//! Subscribes to [`WatcherEvent`] broadcasts and posts a notification to all
|
||||
//! configured Matrix rooms whenever a work item moves between pipeline stages.
|
||||
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::config::ProjectConfig;
|
||||
use crate::io::story_metadata::parse_front_matter;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use crate::slog;
|
||||
use crate::chat::ChatTransport;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
@@ -35,16 +35,11 @@ pub fn extract_story_number(item_id: &str) -> Option<&str> {
|
||||
.filter(|s| !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()))
|
||||
}
|
||||
|
||||
/// Read the story name from the work item file's YAML front matter.
|
||||
/// Read the story name from the CRDT content store's YAML front matter.
|
||||
///
|
||||
/// Returns `None` if the file doesn't exist or has no parseable name.
|
||||
pub fn read_story_name(project_root: &Path, stage: &str, item_id: &str) -> Option<String> {
|
||||
let path = project_root
|
||||
.join(".huskies")
|
||||
.join("work")
|
||||
.join(stage)
|
||||
.join(format!("{item_id}.md"));
|
||||
let contents = std::fs::read_to_string(&path).ok()?;
|
||||
/// Returns `None` if the item is not in the content store or has no parseable name.
|
||||
pub fn read_story_name(_project_root: &Path, _stage: &str, item_id: &str) -> Option<String> {
|
||||
let contents = crate::db::read_content(item_id)?;
|
||||
let meta = parse_front_matter(&contents).ok()?;
|
||||
meta.name
|
||||
}
|
||||
@@ -81,24 +76,15 @@ pub fn format_error_notification(
|
||||
let name = story_name.unwrap_or(item_id);
|
||||
|
||||
let plain = format!("\u{274c} #{number} {name} \u{2014} {reason}");
|
||||
let html = format!(
|
||||
"\u{274c} <strong>#{number}</strong> <em>{name}</em> \u{2014} {reason}"
|
||||
);
|
||||
let html = format!("\u{274c} <strong>#{number}</strong> <em>{name}</em> \u{2014} {reason}");
|
||||
(plain, html)
|
||||
}
|
||||
|
||||
/// Search all pipeline stages for a story name.
|
||||
/// Look up a story name from the CRDT content store.
|
||||
///
|
||||
/// Tries each known pipeline stage directory in order and returns the first
|
||||
/// name found. Used for events (like rate-limit warnings) that arrive without
|
||||
/// a known stage.
|
||||
/// Used for events (like rate-limit warnings) that arrive without a known stage.
|
||||
fn find_story_name_any_stage(project_root: &Path, item_id: &str) -> Option<String> {
|
||||
for stage in &["2_current", "3_qa", "4_merge", "1_backlog", "5_done"] {
|
||||
if let Some(name) = read_story_name(project_root, stage, item_id) {
|
||||
return Some(name);
|
||||
}
|
||||
}
|
||||
None
|
||||
read_story_name(project_root, "", item_id)
|
||||
}
|
||||
|
||||
/// Format a blocked-story notification message.
|
||||
@@ -113,9 +99,8 @@ pub fn format_blocked_notification(
|
||||
let name = story_name.unwrap_or(item_id);
|
||||
|
||||
let plain = format!("\u{1f6ab} #{number} {name} \u{2014} BLOCKED: {reason}");
|
||||
let html = format!(
|
||||
"\u{1f6ab} <strong>#{number}</strong> <em>{name}</em> \u{2014} BLOCKED: {reason}"
|
||||
);
|
||||
let html =
|
||||
format!("\u{1f6ab} <strong>#{number}</strong> <em>{name}</em> \u{2014} BLOCKED: {reason}");
|
||||
(plain, html)
|
||||
}
|
||||
|
||||
@@ -126,7 +111,6 @@ const RATE_LIMIT_DEBOUNCE: Duration = Duration::from_secs(60);
|
||||
/// into a single notification (only the final stage is announced).
|
||||
const STAGE_TRANSITION_DEBOUNCE: Duration = Duration::from_millis(200);
|
||||
|
||||
|
||||
/// Format a rate limit warning notification message.
|
||||
///
|
||||
/// Returns `(plain_text, html)` suitable for `ChatTransport::send_message`.
|
||||
@@ -138,9 +122,8 @@ pub fn format_rate_limit_notification(
|
||||
let number = extract_story_number(item_id).unwrap_or(item_id);
|
||||
let name = story_name.unwrap_or(item_id);
|
||||
|
||||
let plain = format!(
|
||||
"\u{26a0}\u{fe0f} #{number} {name} \u{2014} {agent_name} hit an API rate limit"
|
||||
);
|
||||
let plain =
|
||||
format!("\u{26a0}\u{fe0f} #{number} {name} \u{2014} {agent_name} hit an API rate limit");
|
||||
let html = format!(
|
||||
"\u{26a0}\u{fe0f} <strong>#{number}</strong> <em>{name}</em> \u{2014} \
|
||||
{agent_name} hit an API rate limit"
|
||||
@@ -223,9 +206,7 @@ pub fn spawn_notification_listener(
|
||||
// and must be skipped — the old inferred_from_stage fallback
|
||||
// produced wrong notifications for stories that skipped stages
|
||||
// (e.g. "QA → Merge" when QA was never entered).
|
||||
let from_display = from_stage
|
||||
.as_deref()
|
||||
.map(stage_display_name);
|
||||
let from_display = from_stage.as_deref().map(stage_display_name);
|
||||
let Some(from_display) = from_display else {
|
||||
continue; // creation or unknown transition — skip
|
||||
};
|
||||
@@ -246,33 +227,24 @@ pub fn spawn_notification_listener(
|
||||
e.2 = story_name.clone();
|
||||
}
|
||||
})
|
||||
.or_insert_with(|| {
|
||||
(from_display.to_string(), stage.clone(), story_name)
|
||||
});
|
||||
.or_insert_with(|| (from_display.to_string(), stage.clone(), story_name));
|
||||
|
||||
// Start or extend the debounce window.
|
||||
flush_deadline =
|
||||
Some(tokio::time::Instant::now() + STAGE_TRANSITION_DEBOUNCE);
|
||||
flush_deadline = Some(tokio::time::Instant::now() + STAGE_TRANSITION_DEBOUNCE);
|
||||
}
|
||||
Ok(WatcherEvent::MergeFailure {
|
||||
ref story_id,
|
||||
ref reason,
|
||||
}) => {
|
||||
let story_name =
|
||||
read_story_name(&project_root, "4_merge", story_id);
|
||||
let (plain, html) = format_error_notification(
|
||||
story_id,
|
||||
story_name.as_deref(),
|
||||
reason,
|
||||
);
|
||||
let story_name = read_story_name(&project_root, "4_merge", story_id);
|
||||
let (plain, html) =
|
||||
format_error_notification(story_id, story_name.as_deref(), reason);
|
||||
|
||||
slog!("[bot] Sending error notification: {plain}");
|
||||
|
||||
for room_id in &get_room_ids() {
|
||||
if let Err(e) = transport.send_message(room_id, &plain, &html).await {
|
||||
slog!(
|
||||
"[bot] Failed to send error notification to {room_id}: {e}"
|
||||
);
|
||||
slog!("[bot] Failed to send error notification to {room_id}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -303,11 +275,8 @@ pub fn spawn_notification_listener(
|
||||
rate_limit_last_notified.insert(debounce_key, now);
|
||||
|
||||
let story_name = find_story_name_any_stage(&project_root, story_id);
|
||||
let (plain, html) = format_rate_limit_notification(
|
||||
story_id,
|
||||
story_name.as_deref(),
|
||||
agent_name,
|
||||
);
|
||||
let (plain, html) =
|
||||
format_rate_limit_notification(story_id, story_name.as_deref(), agent_name);
|
||||
|
||||
slog!("[bot] Sending rate-limit notification: {plain}");
|
||||
|
||||
@@ -325,19 +294,14 @@ pub fn spawn_notification_listener(
|
||||
ref reason,
|
||||
}) => {
|
||||
let story_name = find_story_name_any_stage(&project_root, story_id);
|
||||
let (plain, html) = format_blocked_notification(
|
||||
story_id,
|
||||
story_name.as_deref(),
|
||||
reason,
|
||||
);
|
||||
let (plain, html) =
|
||||
format_blocked_notification(story_id, story_name.as_deref(), reason);
|
||||
|
||||
slog!("[bot] Sending blocked notification: {plain}");
|
||||
|
||||
for room_id in &get_room_ids() {
|
||||
if let Err(e) = transport.send_message(room_id, &plain, &html).await {
|
||||
slog!(
|
||||
"[bot] Failed to send blocked notification to {room_id}: {e}"
|
||||
);
|
||||
slog!("[bot] Failed to send blocked notification to {room_id}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -362,14 +326,10 @@ pub fn spawn_notification_listener(
|
||||
}
|
||||
Ok(_) => {} // Ignore other events
|
||||
Err(broadcast::error::RecvError::Lagged(n)) => {
|
||||
slog!(
|
||||
"[bot] Notification listener lagged, skipped {n} events"
|
||||
);
|
||||
slog!("[bot] Notification listener lagged, skipped {n} events");
|
||||
}
|
||||
Err(broadcast::error::RecvError::Closed) => {
|
||||
slog!(
|
||||
"[bot] Watcher channel closed, stopping notification listener"
|
||||
);
|
||||
slog!("[bot] Watcher channel closed, stopping notification listener");
|
||||
// Flush any coalesced transitions that haven't fired yet.
|
||||
for (item_id, (from_display, to_stage_key, story_name)) in
|
||||
pending_transitions.drain()
|
||||
@@ -383,12 +343,8 @@ pub fn spawn_notification_listener(
|
||||
);
|
||||
slog!("[bot] Sending stage notification: {plain}");
|
||||
for room_id in &get_room_ids() {
|
||||
if let Err(e) =
|
||||
transport.send_message(room_id, &plain, &html).await
|
||||
{
|
||||
slog!(
|
||||
"[bot] Failed to send notification to {room_id}: {e}"
|
||||
);
|
||||
if let Err(e) = transport.send_message(room_id, &plain, &html).await {
|
||||
slog!("[bot] Failed to send notification to {room_id}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -402,8 +358,8 @@ pub fn spawn_notification_listener(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use async_trait::async_trait;
|
||||
use crate::chat::MessageId;
|
||||
use async_trait::async_trait;
|
||||
|
||||
// ── MockTransport ───────────────────────────────────────────────────────
|
||||
|
||||
@@ -417,18 +373,38 @@ mod tests {
|
||||
impl MockTransport {
|
||||
fn new() -> (Arc<Self>, CallLog) {
|
||||
let calls: CallLog = Arc::new(std::sync::Mutex::new(Vec::new()));
|
||||
(Arc::new(Self { calls: Arc::clone(&calls) }), calls)
|
||||
(
|
||||
Arc::new(Self {
|
||||
calls: Arc::clone(&calls),
|
||||
}),
|
||||
calls,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl crate::chat::ChatTransport for MockTransport {
|
||||
async fn send_message(&self, room_id: &str, plain: &str, html: &str) -> Result<MessageId, String> {
|
||||
self.calls.lock().unwrap().push((room_id.to_string(), plain.to_string(), html.to_string()));
|
||||
async fn send_message(
|
||||
&self,
|
||||
room_id: &str,
|
||||
plain: &str,
|
||||
html: &str,
|
||||
) -> Result<MessageId, String> {
|
||||
self.calls.lock().unwrap().push((
|
||||
room_id.to_string(),
|
||||
plain.to_string(),
|
||||
html.to_string(),
|
||||
));
|
||||
Ok("mock-msg-id".to_string())
|
||||
}
|
||||
|
||||
async fn edit_message(&self, _room_id: &str, _id: &str, _plain: &str, _html: &str) -> Result<(), String> {
|
||||
async fn edit_message(
|
||||
&self,
|
||||
_room_id: &str,
|
||||
_id: &str,
|
||||
_plain: &str,
|
||||
_html: &str,
|
||||
) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -444,13 +420,13 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn rate_limit_warning_sends_notification_with_agent_and_story() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let stage_dir = tmp.path().join(".huskies").join("work").join("2_current");
|
||||
std::fs::create_dir_all(&stage_dir).unwrap();
|
||||
std::fs::write(
|
||||
stage_dir.join("365_story_rate_limit.md"),
|
||||
// Seed story via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"365_story_rate_limit",
|
||||
"2_current",
|
||||
"---\nname: Rate Limit Test Story\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let (watcher_tx, watcher_rx) = broadcast::channel::<WatcherEvent>(16);
|
||||
let (transport, calls) = MockTransport::new();
|
||||
@@ -462,10 +438,12 @@ mod tests {
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "365_story_rate_limit".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "365_story_rate_limit".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// Give the spawned task time to process the event.
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
@@ -475,9 +453,15 @@ mod tests {
|
||||
let (room_id, plain, _html) = &calls[0];
|
||||
assert_eq!(room_id, "!room123:example.org");
|
||||
assert!(plain.contains("365"), "plain should contain story number");
|
||||
assert!(plain.contains("Rate Limit Test Story"), "plain should contain story name");
|
||||
assert!(
|
||||
plain.contains("Rate Limit Test Story"),
|
||||
"plain should contain story name"
|
||||
);
|
||||
assert!(plain.contains("coder-1"), "plain should contain agent name");
|
||||
assert!(plain.contains("rate limit"), "plain should mention rate limit");
|
||||
assert!(
|
||||
plain.contains("rate limit"),
|
||||
"plain should mention rate limit"
|
||||
);
|
||||
}
|
||||
|
||||
/// AC4: a second RateLimitWarning for the same agent within the debounce
|
||||
@@ -498,16 +482,22 @@ mod tests {
|
||||
|
||||
// Send the same warning twice in rapid succession.
|
||||
for _ in 0..2 {
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_debounce".to_string(),
|
||||
agent_name: "coder-2".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_debounce".to_string(),
|
||||
agent_name: "coder-2".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
let calls = calls.lock().unwrap();
|
||||
assert_eq!(calls.len(), 1, "Debounce should suppress the second notification");
|
||||
assert_eq!(
|
||||
calls.len(),
|
||||
1,
|
||||
"Debounce should suppress the second notification"
|
||||
);
|
||||
}
|
||||
|
||||
/// AC4 (corollary): warnings for different agents are NOT debounced against
|
||||
@@ -526,19 +516,27 @@ mod tests {
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "coder-2".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_foo".to_string(),
|
||||
agent_name: "coder-2".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
let calls = calls.lock().unwrap();
|
||||
assert_eq!(calls.len(), 2, "Different agents should each trigger a notification");
|
||||
assert_eq!(
|
||||
calls.len(),
|
||||
2,
|
||||
"Different agents should each trigger a notification"
|
||||
);
|
||||
}
|
||||
|
||||
// ── dynamic room IDs (WhatsApp ambient_rooms pattern) ───────────────────
|
||||
@@ -550,13 +548,9 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn stage_notification_uses_dynamic_room_ids() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let stage_dir = tmp.path().join(".huskies").join("work").join("3_qa");
|
||||
std::fs::create_dir_all(&stage_dir).unwrap();
|
||||
std::fs::write(
|
||||
stage_dir.join("10_story_foo.md"),
|
||||
"---\nname: Foo Story\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
// Seed story via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content("10_story_foo", "3_qa", "---\nname: Foo Story\n---\n");
|
||||
|
||||
let (watcher_tx, watcher_rx) = broadcast::channel::<WatcherEvent>(16);
|
||||
let (transport, calls) = MockTransport::new();
|
||||
@@ -573,25 +567,40 @@ mod tests {
|
||||
);
|
||||
|
||||
// Add a room after the listener is spawned (simulates a user messaging first).
|
||||
rooms.lock().unwrap().insert("phone:+15551234567".to_string());
|
||||
rooms
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert("phone:+15551234567".to_string());
|
||||
|
||||
watcher_tx.send(WatcherEvent::WorkItem {
|
||||
stage: "3_qa".to_string(),
|
||||
item_id: "10_story_foo".to_string(),
|
||||
action: "qa".to_string(),
|
||||
commit_msg: "huskies: qa 10_story_foo".to_string(),
|
||||
from_stage: Some("2_current".to_string()),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::WorkItem {
|
||||
stage: "3_qa".to_string(),
|
||||
item_id: "10_story_foo".to_string(),
|
||||
action: "qa".to_string(),
|
||||
commit_msg: "huskies: qa 10_story_foo".to_string(),
|
||||
from_stage: Some("2_current".to_string()),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// Wait longer than STAGE_TRANSITION_DEBOUNCE (200ms) so the coalesced
|
||||
// notification flushes.
|
||||
tokio::time::sleep(std::time::Duration::from_millis(350)).await;
|
||||
|
||||
let calls = calls.lock().unwrap();
|
||||
assert_eq!(calls.len(), 1, "Should deliver to the dynamically added room");
|
||||
assert_eq!(
|
||||
calls.len(),
|
||||
1,
|
||||
"Should deliver to the dynamically added room"
|
||||
);
|
||||
assert_eq!(calls[0].0, "phone:+15551234567");
|
||||
assert!(calls[0].1.contains("10"), "plain should contain story number");
|
||||
assert!(calls[0].1.contains("Foo Story"), "plain should contain story name");
|
||||
assert!(
|
||||
calls[0].1.contains("10"),
|
||||
"plain should contain story number"
|
||||
);
|
||||
assert!(
|
||||
calls[0].1.contains("Foo Story"),
|
||||
"plain should contain story name"
|
||||
);
|
||||
}
|
||||
|
||||
/// When no rooms are registered (e.g. no WhatsApp users have messaged yet),
|
||||
@@ -603,20 +612,17 @@ mod tests {
|
||||
let (watcher_tx, watcher_rx) = broadcast::channel::<WatcherEvent>(16);
|
||||
let (transport, calls) = MockTransport::new();
|
||||
|
||||
spawn_notification_listener(
|
||||
transport,
|
||||
Vec::new,
|
||||
watcher_rx,
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
spawn_notification_listener(transport, Vec::new, watcher_rx, tmp.path().to_path_buf());
|
||||
|
||||
watcher_tx.send(WatcherEvent::WorkItem {
|
||||
stage: "3_qa".to_string(),
|
||||
item_id: "10_story_foo".to_string(),
|
||||
action: "qa".to_string(),
|
||||
commit_msg: "huskies: qa 10_story_foo".to_string(),
|
||||
from_stage: Some("2_current".to_string()),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::WorkItem {
|
||||
stage: "3_qa".to_string(),
|
||||
item_id: "10_story_foo".to_string(),
|
||||
action: "qa".to_string(),
|
||||
commit_msg: "huskies: qa 10_story_foo".to_string(),
|
||||
from_stage: Some("2_current".to_string()),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
@@ -659,46 +665,37 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn read_story_name_reads_from_front_matter() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let stage_dir = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("work")
|
||||
.join("2_current");
|
||||
std::fs::create_dir_all(&stage_dir).unwrap();
|
||||
std::fs::write(
|
||||
stage_dir.join("42_story_my_feature.md"),
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"9942_story_my_feature",
|
||||
"2_current",
|
||||
"---\nname: My Cool Feature\n---\n# Story\n",
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let name = read_story_name(tmp.path(), "2_current", "42_story_my_feature");
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let name = read_story_name(tmp.path(), "2_current", "9942_story_my_feature");
|
||||
assert_eq!(name.as_deref(), Some("My Cool Feature"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_story_name_returns_none_for_missing_file() {
|
||||
crate::db::ensure_content_store();
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let name = read_story_name(tmp.path(), "2_current", "99_story_missing");
|
||||
let name = read_story_name(tmp.path(), "2_current", "99_story_missing_notif_test");
|
||||
assert_eq!(name, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_story_name_returns_none_for_missing_name_field() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let stage_dir = tmp
|
||||
.path()
|
||||
.join(".huskies")
|
||||
.join("work")
|
||||
.join("2_current");
|
||||
std::fs::create_dir_all(&stage_dir).unwrap();
|
||||
std::fs::write(
|
||||
stage_dir.join("42_story_no_name.md"),
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"9943_story_no_name",
|
||||
"2_current",
|
||||
"---\ncoverage_baseline: 50%\n---\n# Story\n",
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let name = read_story_name(tmp.path(), "2_current", "42_story_no_name");
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let name = read_story_name(tmp.path(), "2_current", "9943_story_no_name");
|
||||
assert_eq!(name, None);
|
||||
}
|
||||
|
||||
@@ -706,8 +703,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_error_notification_with_story_name() {
|
||||
let (plain, html) =
|
||||
format_error_notification("262_story_bot_errors", Some("Bot error notifications"), "merge conflict in src/main.rs");
|
||||
let (plain, html) = format_error_notification(
|
||||
"262_story_bot_errors",
|
||||
Some("Bot error notifications"),
|
||||
"merge conflict in src/main.rs",
|
||||
);
|
||||
assert_eq!(
|
||||
plain,
|
||||
"\u{274c} #262 Bot error notifications \u{2014} merge conflict in src/main.rs"
|
||||
@@ -720,12 +720,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_error_notification_without_story_name_falls_back_to_item_id() {
|
||||
let (plain, _html) =
|
||||
format_error_notification("42_bug_fix_thing", None, "tests failed");
|
||||
assert_eq!(
|
||||
plain,
|
||||
"\u{274c} #42 42_bug_fix_thing \u{2014} tests failed"
|
||||
);
|
||||
let (plain, _html) = format_error_notification("42_bug_fix_thing", None, "tests failed");
|
||||
assert_eq!(plain, "\u{274c} #42 42_bug_fix_thing \u{2014} tests failed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -759,8 +755,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_blocked_notification_falls_back_to_item_id() {
|
||||
let (plain, _html) =
|
||||
format_blocked_notification("42_story_thing", None, "empty diff");
|
||||
let (plain, _html) = format_blocked_notification("42_story_thing", None, "empty diff");
|
||||
assert_eq!(
|
||||
plain,
|
||||
"\u{1f6ab} #42 42_story_thing \u{2014} BLOCKED: empty diff"
|
||||
@@ -774,13 +769,13 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn story_blocked_sends_notification_with_reason() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let stage_dir = tmp.path().join(".huskies").join("work").join("2_current");
|
||||
std::fs::create_dir_all(&stage_dir).unwrap();
|
||||
std::fs::write(
|
||||
stage_dir.join("425_story_blocking_test.md"),
|
||||
// Seed story via CRDT (the only source of truth).
|
||||
crate::db::ensure_content_store();
|
||||
crate::db::write_item_with_content(
|
||||
"425_story_blocking_test",
|
||||
"2_current",
|
||||
"---\nname: Blocking Test Story\n---\n",
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let (watcher_tx, watcher_rx) = broadcast::channel::<WatcherEvent>(16);
|
||||
let (transport, calls) = MockTransport::new();
|
||||
@@ -792,10 +787,12 @@ mod tests {
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
|
||||
watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: "425_story_blocking_test".to_string(),
|
||||
reason: "Retry limit exceeded (3/3) at coder stage".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::StoryBlocked {
|
||||
story_id: "425_story_blocking_test".to_string(),
|
||||
reason: "Retry limit exceeded (3/3) at coder stage".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
@@ -804,10 +801,22 @@ mod tests {
|
||||
let (room_id, plain, html) = &calls[0];
|
||||
assert_eq!(room_id, "!room123:example.org");
|
||||
assert!(plain.contains("425"), "plain should contain story number");
|
||||
assert!(plain.contains("Blocking Test Story"), "plain should contain story name");
|
||||
assert!(plain.contains("BLOCKED"), "plain should contain BLOCKED label");
|
||||
assert!(plain.contains("Retry limit exceeded"), "plain should contain the reason");
|
||||
assert!(html.contains("BLOCKED"), "html should contain BLOCKED label");
|
||||
assert!(
|
||||
plain.contains("Blocking Test Story"),
|
||||
"plain should contain story name"
|
||||
);
|
||||
assert!(
|
||||
plain.contains("BLOCKED"),
|
||||
"plain should contain BLOCKED label"
|
||||
);
|
||||
assert!(
|
||||
plain.contains("Retry limit exceeded"),
|
||||
"plain should contain the reason"
|
||||
);
|
||||
assert!(
|
||||
html.contains("BLOCKED"),
|
||||
"html should contain BLOCKED label"
|
||||
);
|
||||
}
|
||||
|
||||
/// StoryBlocked with no room registered should not panic.
|
||||
@@ -818,17 +827,14 @@ mod tests {
|
||||
let (watcher_tx, watcher_rx) = broadcast::channel::<WatcherEvent>(16);
|
||||
let (transport, calls) = MockTransport::new();
|
||||
|
||||
spawn_notification_listener(
|
||||
transport,
|
||||
Vec::new,
|
||||
watcher_rx,
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
spawn_notification_listener(transport, Vec::new, watcher_rx, tmp.path().to_path_buf());
|
||||
|
||||
watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: "42_story_no_rooms".to_string(),
|
||||
reason: "empty diff".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::StoryBlocked {
|
||||
story_id: "42_story_no_rooms".to_string(),
|
||||
reason: "empty diff".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
@@ -840,11 +846,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_rate_limit_notification_includes_agent_and_story() {
|
||||
let (plain, html) = format_rate_limit_notification(
|
||||
"365_story_my_feature",
|
||||
Some("My Feature"),
|
||||
"coder-2",
|
||||
);
|
||||
let (plain, html) =
|
||||
format_rate_limit_notification("365_story_my_feature", Some("My Feature"), "coder-2");
|
||||
assert_eq!(
|
||||
plain,
|
||||
"\u{26a0}\u{fe0f} #365 My Feature \u{2014} coder-2 hit an API rate limit"
|
||||
@@ -857,8 +860,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_rate_limit_notification_falls_back_to_item_id() {
|
||||
let (plain, _html) =
|
||||
format_rate_limit_notification("42_story_thing", None, "coder-1");
|
||||
let (plain, _html) = format_rate_limit_notification("42_story_thing", None, "coder-1");
|
||||
assert_eq!(
|
||||
plain,
|
||||
"\u{26a0}\u{fe0f} #42 42_story_thing \u{2014} coder-1 hit an API rate limit"
|
||||
@@ -869,12 +871,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_notification_done_stage_includes_party_emoji() {
|
||||
let (plain, html) = format_stage_notification(
|
||||
"353_story_done",
|
||||
Some("Done Story"),
|
||||
"Merge",
|
||||
"Done",
|
||||
);
|
||||
let (plain, html) =
|
||||
format_stage_notification("353_story_done", Some("Done Story"), "Merge", "Done");
|
||||
assert_eq!(
|
||||
plain,
|
||||
"\u{1f389} #353 Done Story \u{2014} Merge \u{2192} Done"
|
||||
@@ -887,12 +885,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_notification_non_done_stage_has_no_emoji() {
|
||||
let (plain, _html) = format_stage_notification(
|
||||
"42_story_thing",
|
||||
Some("Some Story"),
|
||||
"Backlog",
|
||||
"Current",
|
||||
);
|
||||
let (plain, _html) =
|
||||
format_stage_notification("42_story_thing", Some("Some Story"), "Backlog", "Current");
|
||||
assert!(!plain.contains("\u{1f389}"));
|
||||
}
|
||||
|
||||
@@ -916,26 +910,14 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_notification_without_story_name_falls_back_to_item_id() {
|
||||
let (plain, _html) = format_stage_notification(
|
||||
"42_bug_fix_thing",
|
||||
None,
|
||||
"Current",
|
||||
"QA",
|
||||
);
|
||||
assert_eq!(
|
||||
plain,
|
||||
"#42 42_bug_fix_thing \u{2014} Current \u{2192} QA"
|
||||
);
|
||||
let (plain, _html) = format_stage_notification("42_bug_fix_thing", None, "Current", "QA");
|
||||
assert_eq!(plain, "#42 42_bug_fix_thing \u{2014} Current \u{2192} QA");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_notification_non_numeric_id_uses_full_id() {
|
||||
let (plain, _html) = format_stage_notification(
|
||||
"abc_story_thing",
|
||||
Some("Some Story"),
|
||||
"QA",
|
||||
"Merge",
|
||||
);
|
||||
let (plain, _html) =
|
||||
format_stage_notification("abc_story_thing", Some("Some Story"), "QA", "Merge");
|
||||
assert_eq!(
|
||||
plain,
|
||||
"#abc_story_thing Some Story \u{2014} QA \u{2192} Merge"
|
||||
@@ -967,15 +949,21 @@ mod tests {
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_suppress".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_suppress".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
let calls = calls.lock().unwrap();
|
||||
assert_eq!(calls.len(), 0, "RateLimitWarning should be suppressed when rate_limit_notifications = false");
|
||||
assert_eq!(
|
||||
calls.len(),
|
||||
0,
|
||||
"RateLimitWarning should be suppressed when rate_limit_notifications = false"
|
||||
);
|
||||
}
|
||||
|
||||
/// RateLimitHardBlock is never posted to Matrix — it is logged server-side only.
|
||||
@@ -994,11 +982,13 @@ mod tests {
|
||||
);
|
||||
|
||||
let reset_at = chrono::Utc::now() + chrono::Duration::hours(1);
|
||||
watcher_tx.send(WatcherEvent::RateLimitHardBlock {
|
||||
story_id: "42_story_hard_block".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
reset_at,
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitHardBlock {
|
||||
story_id: "42_story_hard_block".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
reset_at,
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
@@ -1028,10 +1018,12 @@ mod tests {
|
||||
tmp.path().to_path_buf(),
|
||||
);
|
||||
|
||||
watcher_tx.send(WatcherEvent::StoryBlocked {
|
||||
story_id: "42_story_blocked".to_string(),
|
||||
reason: "retry limit exceeded".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::StoryBlocked {
|
||||
story_id: "42_story_blocked".to_string(),
|
||||
reason: "retry limit exceeded".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
@@ -1064,10 +1056,12 @@ mod tests {
|
||||
);
|
||||
|
||||
// First warning is sent.
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_reload".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_reload".to_string(),
|
||||
agent_name: "coder-1".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
// Disable notifications and trigger hot-reload.
|
||||
@@ -1080,14 +1074,20 @@ mod tests {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
// Second warning (different agent to bypass debounce) should be suppressed.
|
||||
watcher_tx.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_reload".to_string(),
|
||||
agent_name: "coder-2".to_string(),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::RateLimitWarning {
|
||||
story_id: "42_story_reload".to_string(),
|
||||
agent_name: "coder-2".to_string(),
|
||||
})
|
||||
.unwrap();
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
let calls = calls.lock().unwrap();
|
||||
assert_eq!(calls.len(), 1, "Only the first warning should be sent; second should be suppressed after hot-reload");
|
||||
assert_eq!(
|
||||
calls.len(),
|
||||
1,
|
||||
"Only the first warning should be sent; second should be suppressed after hot-reload"
|
||||
);
|
||||
}
|
||||
|
||||
// ── Bug 549: synthetic events with from_stage=None must not notify ──────
|
||||
@@ -1111,19 +1111,22 @@ mod tests {
|
||||
);
|
||||
|
||||
// Synthetic reassign event within 4_merge — no actual stage change.
|
||||
watcher_tx.send(WatcherEvent::WorkItem {
|
||||
stage: "4_merge".to_string(),
|
||||
item_id: "549_story_skip_qa".to_string(),
|
||||
action: "reassign".to_string(),
|
||||
commit_msg: String::new(),
|
||||
from_stage: None,
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::WorkItem {
|
||||
stage: "4_merge".to_string(),
|
||||
item_id: "549_story_skip_qa".to_string(),
|
||||
action: "reassign".to_string(),
|
||||
commit_msg: String::new(),
|
||||
from_stage: None,
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(350)).await;
|
||||
|
||||
let calls = calls.lock().unwrap();
|
||||
assert_eq!(
|
||||
calls.len(), 0,
|
||||
calls.len(),
|
||||
0,
|
||||
"Synthetic events with from_stage=None must not generate notifications"
|
||||
);
|
||||
}
|
||||
@@ -1152,13 +1155,15 @@ mod tests {
|
||||
);
|
||||
|
||||
// Story skips QA: from_stage is 2_current, not 3_qa.
|
||||
watcher_tx.send(WatcherEvent::WorkItem {
|
||||
stage: "4_merge".to_string(),
|
||||
item_id: "549_story_skip_qa".to_string(),
|
||||
action: "merge".to_string(),
|
||||
commit_msg: "huskies: merge 549_story_skip_qa".to_string(),
|
||||
from_stage: Some("2_current".to_string()),
|
||||
}).unwrap();
|
||||
watcher_tx
|
||||
.send(WatcherEvent::WorkItem {
|
||||
stage: "4_merge".to_string(),
|
||||
item_id: "549_story_skip_qa".to_string(),
|
||||
action: "merge".to_string(),
|
||||
commit_msg: "huskies: merge 549_story_skip_qa".to_string(),
|
||||
from_stage: Some("2_current".to_string()),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(350)).await;
|
||||
|
||||
|
||||
@@ -73,11 +73,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn extract_with_full_user_id() {
|
||||
let cmd = extract_rebuild_command(
|
||||
"@timmy:home.local rebuild",
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
let cmd =
|
||||
extract_rebuild_command("@timmy:home.local rebuild", "Timmy", "@timmy:home.local");
|
||||
assert_eq!(cmd, Some(RebuildCommand));
|
||||
}
|
||||
|
||||
|
||||
@@ -50,7 +50,9 @@ pub async fn handle_reset(
|
||||
) -> String {
|
||||
{
|
||||
let mut guard = history.lock().await;
|
||||
let conv = guard.entry(room_id.clone()).or_insert_with(RoomConversation::default);
|
||||
let conv = guard
|
||||
.entry(room_id.clone())
|
||||
.or_insert_with(RoomConversation::default);
|
||||
conv.session_id = None;
|
||||
conv.entries.clear();
|
||||
crate::chat::transport::matrix::bot::save_history(project_root, &guard);
|
||||
@@ -75,8 +77,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn extract_with_full_user_id() {
|
||||
let cmd =
|
||||
extract_reset_command("@timmy:home.local reset", "Timmy", "@timmy:home.local");
|
||||
let cmd = extract_reset_command("@timmy:home.local reset", "Timmy", "@timmy:home.local");
|
||||
assert_eq!(cmd, Some(ResetCommand));
|
||||
}
|
||||
|
||||
@@ -115,21 +116,27 @@ mod tests {
|
||||
let room_id: OwnedRoomId = "!test:example.com".parse().unwrap();
|
||||
let history: ConversationHistory = Arc::new(TokioMutex::new({
|
||||
let mut m = HashMap::new();
|
||||
m.insert(room_id.clone(), RoomConversation {
|
||||
session_id: Some("old-session-id".to_string()),
|
||||
entries: vec![ConversationEntry {
|
||||
role: ConversationRole::User,
|
||||
sender: "@alice:example.com".to_string(),
|
||||
content: "previous message".to_string(),
|
||||
}],
|
||||
});
|
||||
m.insert(
|
||||
room_id.clone(),
|
||||
RoomConversation {
|
||||
session_id: Some("old-session-id".to_string()),
|
||||
entries: vec![ConversationEntry {
|
||||
role: ConversationRole::User,
|
||||
sender: "@alice:example.com".to_string(),
|
||||
content: "previous message".to_string(),
|
||||
}],
|
||||
},
|
||||
);
|
||||
m
|
||||
}));
|
||||
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let response = handle_reset("Timmy", &room_id, &history, tmp.path()).await;
|
||||
|
||||
assert!(response.contains("reset"), "response should mention reset: {response}");
|
||||
assert!(
|
||||
response.contains("reset"),
|
||||
"response should mention reset: {response}"
|
||||
);
|
||||
|
||||
let guard = history.lock().await;
|
||||
let conv = guard.get(&room_id).unwrap();
|
||||
|
||||
@@ -107,9 +107,7 @@ pub async fn handle_rmtree(
|
||||
return format!("Failed to remove worktree for story {story_number}: {e}");
|
||||
}
|
||||
|
||||
crate::slog!(
|
||||
"[matrix-bot] rmtree command: removed worktree for {story_id} (bot={bot_name})"
|
||||
);
|
||||
crate::slog!("[matrix-bot] rmtree command: removed worktree for {story_id} (bot={bot_name})");
|
||||
|
||||
let mut response = format!("Removed worktree for **{story_id}**.");
|
||||
if !stopped_agents.is_empty() {
|
||||
@@ -131,11 +129,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn extract_with_full_user_id() {
|
||||
let cmd = extract_rmtree_command(
|
||||
"@timmy:home.local rmtree 42",
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
let cmd =
|
||||
extract_rmtree_command("@timmy:home.local rmtree 42", "Timmy", "@timmy:home.local");
|
||||
assert_eq!(
|
||||
cmd,
|
||||
Some(RmtreeCommand::Rmtree {
|
||||
|
||||
@@ -84,9 +84,7 @@ pub async fn handle_start(
|
||||
match crate::chat::lookup::find_story_by_number(project_root, story_number) {
|
||||
Some(found) => found,
|
||||
None => {
|
||||
return format!(
|
||||
"No story, bug, or spike with number **{story_number}** found."
|
||||
);
|
||||
return format!("No story, bug, or spike with number **{story_number}** found.");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -115,7 +113,13 @@ pub async fn handle_start(
|
||||
);
|
||||
|
||||
match agents
|
||||
.start_agent(project_root, &story_id, resolved_agent.as_deref(), None, None)
|
||||
.start_agent(
|
||||
project_root,
|
||||
&story_id,
|
||||
resolved_agent.as_deref(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(info) => {
|
||||
@@ -231,7 +235,14 @@ mod tests {
|
||||
async fn handle_start_returns_not_found_for_unknown_number() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_root = tmp.path();
|
||||
for stage in &["1_backlog", "2_current", "3_qa", "4_merge", "5_done", "6_archived"] {
|
||||
for stage in &[
|
||||
"1_backlog",
|
||||
"2_current",
|
||||
"3_qa",
|
||||
"4_merge",
|
||||
"5_done",
|
||||
"6_archived",
|
||||
] {
|
||||
std::fs::create_dir_all(project_root.join(".huskies").join("work").join(stage))
|
||||
.unwrap();
|
||||
}
|
||||
@@ -276,7 +287,8 @@ mod tests {
|
||||
"response must not say 'Failed' when coders are busy: {response}"
|
||||
);
|
||||
assert!(
|
||||
response.to_lowercase().contains("queue") || response.to_lowercase().contains("available"),
|
||||
response.to_lowercase().contains("queue")
|
||||
|| response.to_lowercase().contains("available"),
|
||||
"response must mention queued/available state: {response}"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
//! Slack incoming message dispatch and slash command handling.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::{Mutex as TokioMutex, oneshot};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::format::markdown_to_slack;
|
||||
use super::history::{SlackConversationHistory, save_slack_history};
|
||||
use super::meta::SlackTransport;
|
||||
use crate::agents::AgentPool;
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
use crate::chat::util::is_permission_approval;
|
||||
use crate::slog;
|
||||
use crate::chat::ChatTransport;
|
||||
use crate::http::context::{PermissionDecision, PermissionForward};
|
||||
use super::meta::SlackTransport;
|
||||
use super::history::{SlackConversationHistory, save_slack_history};
|
||||
use super::format::markdown_to_slack;
|
||||
use crate::slog;
|
||||
|
||||
// ── Slash command types ─────────────────────────────────────────────────
|
||||
|
||||
@@ -81,8 +81,7 @@ pub struct SlackWebhookContext {
|
||||
/// Permission requests from the MCP `prompt_permission` tool arrive here.
|
||||
pub perm_rx: Arc<TokioMutex<tokio::sync::mpsc::UnboundedReceiver<PermissionForward>>>,
|
||||
/// Pending permission replies keyed by channel ID.
|
||||
pub pending_perm_replies:
|
||||
Arc<TokioMutex<HashMap<String, oneshot::Sender<PermissionDecision>>>>,
|
||||
pub pending_perm_replies: Arc<TokioMutex<HashMap<String, oneshot::Sender<PermissionDecision>>>>,
|
||||
/// Seconds before an unanswered permission prompt is auto-denied.
|
||||
pub permission_timeout_secs: u64,
|
||||
}
|
||||
@@ -154,8 +153,11 @@ pub(super) async fn handle_incoming_message(
|
||||
}
|
||||
HtopCommand::Start { duration_secs } => {
|
||||
// On Slack, htop uses native message editing for live updates.
|
||||
let snapshot =
|
||||
crate::chat::transport::matrix::htop::build_htop_message(&ctx.agents, 0, duration_secs);
|
||||
let snapshot = crate::chat::transport::matrix::htop::build_htop_message(
|
||||
&ctx.agents,
|
||||
0,
|
||||
duration_secs,
|
||||
);
|
||||
let snapshot = markdown_to_slack(&snapshot);
|
||||
let msg_id = match ctx.transport.send_message(channel, &snapshot, "").await {
|
||||
Ok(id) => id,
|
||||
@@ -179,9 +181,7 @@ pub(super) async fn handle_incoming_message(
|
||||
duration_secs,
|
||||
);
|
||||
let updated = markdown_to_slack(&updated);
|
||||
if let Err(e) =
|
||||
transport.edit_message(&ch, &msg_id, &updated, "").await
|
||||
{
|
||||
if let Err(e) = transport.edit_message(&ch, &msg_id, &updated, "").await {
|
||||
slog!("[slack] Failed to edit htop message: {e}");
|
||||
break;
|
||||
}
|
||||
@@ -245,7 +245,9 @@ pub(super) async fn handle_incoming_message(
|
||||
) {
|
||||
let response = match rmtree_cmd {
|
||||
crate::chat::transport::matrix::rmtree::RmtreeCommand::Rmtree { story_number } => {
|
||||
slog!("[slack] Handling rmtree command from {user} in {channel}: story {story_number}");
|
||||
slog!(
|
||||
"[slack] Handling rmtree command from {user} in {channel}: story {story_number}"
|
||||
);
|
||||
crate::chat::transport::matrix::rmtree::handle_rmtree(
|
||||
&ctx.bot_name,
|
||||
&story_number,
|
||||
@@ -273,7 +275,9 @@ pub(super) async fn handle_incoming_message(
|
||||
slog!("[slack] Handling reset command from {user} in {channel}");
|
||||
{
|
||||
let mut guard = ctx.history.lock().await;
|
||||
let conv = guard.entry(channel.to_string()).or_insert_with(RoomConversation::default);
|
||||
let conv = guard
|
||||
.entry(channel.to_string())
|
||||
.or_insert_with(RoomConversation::default);
|
||||
conv.session_id = None;
|
||||
conv.entries.clear();
|
||||
save_slack_history(&ctx.project_root, &guard);
|
||||
@@ -295,7 +299,9 @@ pub(super) async fn handle_incoming_message(
|
||||
story_number,
|
||||
agent_hint,
|
||||
} => {
|
||||
slog!("[slack] Handling start command from {user} in {channel}: story {story_number}");
|
||||
slog!(
|
||||
"[slack] Handling start command from {user} in {channel}: story {story_number}"
|
||||
);
|
||||
crate::chat::transport::matrix::start::handle_start(
|
||||
&ctx.bot_name,
|
||||
&story_number,
|
||||
@@ -320,8 +326,13 @@ pub(super) async fn handle_incoming_message(
|
||||
&ctx.bot_user_id,
|
||||
) {
|
||||
let response = match assign_cmd {
|
||||
crate::chat::transport::matrix::assign::AssignCommand::Assign { story_number, model } => {
|
||||
slog!("[slack] Handling assign command from {user} in {channel}: story {story_number} model {model}");
|
||||
crate::chat::transport::matrix::assign::AssignCommand::Assign {
|
||||
story_number,
|
||||
model,
|
||||
} => {
|
||||
slog!(
|
||||
"[slack] Handling assign command from {user} in {channel}: story {story_number} model {model}"
|
||||
);
|
||||
crate::chat::transport::matrix::assign::handle_assign(
|
||||
&ctx.bot_name,
|
||||
&story_number,
|
||||
@@ -352,17 +363,15 @@ async fn handle_llm_message(
|
||||
user: &str,
|
||||
user_message: &str,
|
||||
) {
|
||||
use crate::llm::providers::claude_code::{ClaudeCodeProvider, ClaudeCodeResult};
|
||||
use crate::chat::util::drain_complete_paragraphs;
|
||||
use crate::llm::providers::claude_code::{ClaudeCodeProvider, ClaudeCodeResult};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use tokio::sync::watch;
|
||||
|
||||
// Look up existing session ID for this channel.
|
||||
let resume_session_id: Option<String> = {
|
||||
let guard = ctx.history.lock().await;
|
||||
guard
|
||||
.get(channel)
|
||||
.and_then(|conv| conv.session_id.clone())
|
||||
guard.get(channel).and_then(|conv| conv.session_id.clone())
|
||||
};
|
||||
|
||||
let bot_name = &ctx.bot_name;
|
||||
@@ -383,7 +392,9 @@ async fn handle_llm_message(
|
||||
let post_task = tokio::spawn(async move {
|
||||
while let Some(chunk) = msg_rx.recv().await {
|
||||
let formatted = markdown_to_slack(&chunk);
|
||||
let _ = post_transport.send_message(&post_channel, &formatted, "").await;
|
||||
let _ = post_transport
|
||||
.send_message(&post_channel, &formatted, "")
|
||||
.await;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -472,9 +483,7 @@ async fn handle_llm_message(
|
||||
let last_text = messages
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|m| {
|
||||
m.role == crate::llm::types::Role::Assistant && !m.content.is_empty()
|
||||
})
|
||||
.find(|m| m.role == crate::llm::types::Role::Assistant && !m.content.is_empty())
|
||||
.map(|m| m.content.clone())
|
||||
.unwrap_or_default();
|
||||
if !last_text.is_empty() {
|
||||
@@ -559,7 +568,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn slash_command_maps_status() {
|
||||
assert_eq!(slash_command_to_bot_keyword("/huskies-status"), Some("status"));
|
||||
assert_eq!(
|
||||
slash_command_to_bot_keyword("/huskies-status"),
|
||||
Some("status")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -600,9 +612,8 @@ mod tests {
|
||||
response_type: "ephemeral",
|
||||
text: "hello".to_string(),
|
||||
};
|
||||
let json: serde_json::Value = serde_json::from_str(
|
||||
&serde_json::to_string(&resp).unwrap()
|
||||
).unwrap();
|
||||
let json: serde_json::Value =
|
||||
serde_json::from_str(&serde_json::to_string(&resp).unwrap()).unwrap();
|
||||
assert_eq!(json["response_type"], "ephemeral");
|
||||
assert_eq!(json["text"], "hello");
|
||||
}
|
||||
@@ -642,7 +653,10 @@ mod tests {
|
||||
};
|
||||
|
||||
let result = try_handle_command(&dispatch, &synthetic);
|
||||
assert!(result.is_some(), "status slash command should produce output via registry");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"status slash command should produce output via registry"
|
||||
);
|
||||
assert!(result.unwrap().contains("Pipeline Status"));
|
||||
}
|
||||
|
||||
@@ -671,7 +685,10 @@ mod tests {
|
||||
let result = try_handle_command(&dispatch, &synthetic);
|
||||
assert!(result.is_some(), "show slash command should produce output");
|
||||
let output = result.unwrap();
|
||||
assert!(output.contains("999"), "show output should reference the story number: {output}");
|
||||
assert!(
|
||||
output.contains("999"),
|
||||
"show output should reference the story number: {output}"
|
||||
);
|
||||
}
|
||||
|
||||
// ── rebuild command extraction ─────────────────────────────────────
|
||||
@@ -704,7 +721,10 @@ mod tests {
|
||||
"Huskies",
|
||||
"slack-bot",
|
||||
);
|
||||
assert!(result.is_none(), "'status' should not be recognised as rebuild");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"'status' should not be recognised as rebuild"
|
||||
);
|
||||
}
|
||||
|
||||
// ── reset command extraction ───────────────────────────────────────
|
||||
@@ -731,21 +751,26 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn reset_command_clears_slack_session() {
|
||||
use crate::chat::transport::matrix::{
|
||||
ConversationEntry, ConversationRole, RoomConversation,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex as TokioMutex;
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
|
||||
let channel = "C01ABCDEF";
|
||||
let history: SlackConversationHistory = Arc::new(TokioMutex::new({
|
||||
let mut m = HashMap::new();
|
||||
m.insert(channel.to_string(), RoomConversation {
|
||||
session_id: Some("old-session".to_string()),
|
||||
entries: vec![ConversationEntry {
|
||||
role: ConversationRole::User,
|
||||
sender: "U01GHIJKL".to_string(),
|
||||
content: "previous message".to_string(),
|
||||
}],
|
||||
});
|
||||
m.insert(
|
||||
channel.to_string(),
|
||||
RoomConversation {
|
||||
session_id: Some("old-session".to_string()),
|
||||
entries: vec![ConversationEntry {
|
||||
role: ConversationRole::User,
|
||||
sender: "U01GHIJKL".to_string(),
|
||||
content: "previous message".to_string(),
|
||||
}],
|
||||
},
|
||||
);
|
||||
m
|
||||
}));
|
||||
|
||||
@@ -755,7 +780,9 @@ mod tests {
|
||||
|
||||
{
|
||||
let mut guard = history.lock().await;
|
||||
let conv = guard.entry(channel.to_string()).or_insert_with(RoomConversation::default);
|
||||
let conv = guard
|
||||
.entry(channel.to_string())
|
||||
.or_insert_with(RoomConversation::default);
|
||||
conv.session_id = None;
|
||||
conv.entries.clear();
|
||||
save_slack_history(tmp.path(), &guard);
|
||||
@@ -862,6 +889,9 @@ mod tests {
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
assert!(result.is_none(), "'status' should not be recognised as assign on Slack");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"'status' should not be recognised as assign on Slack"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,10 +20,8 @@ pub fn markdown_to_slack(text: &str) -> String {
|
||||
LazyLock::new(|| Regex::new(r"(?m)^#{1,6}\s+(.+)$").unwrap());
|
||||
static RE_BOLD_ITALIC: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\*\*\*(.+?)\*\*\*").unwrap());
|
||||
static RE_BOLD: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\*\*(.+?)\*\*").unwrap());
|
||||
static RE_STRIKETHROUGH: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"~~(.+?)~~").unwrap());
|
||||
static RE_BOLD: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\*\*(.+?)\*\*").unwrap());
|
||||
static RE_STRIKETHROUGH: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"~~(.+?)~~").unwrap());
|
||||
static RE_LINK: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap());
|
||||
|
||||
@@ -105,8 +103,14 @@ mod tests {
|
||||
fn slack_fenced_code_block_preserved() {
|
||||
let input = "```rust\nlet x = 1;\n```";
|
||||
let output = markdown_to_slack(input);
|
||||
assert!(output.contains("let x = 1;"), "code block content must be preserved");
|
||||
assert!(output.contains("```"), "fenced code delimiters must be preserved");
|
||||
assert!(
|
||||
output.contains("let x = 1;"),
|
||||
"code block content must be preserved"
|
||||
);
|
||||
assert!(
|
||||
output.contains("```"),
|
||||
"fenced code delimiters must be preserved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -104,9 +104,8 @@ impl ChatTransport for SlackTransport {
|
||||
return Err(format!("Slack API returned {status}: {resp_text}"));
|
||||
}
|
||||
|
||||
let parsed: SlackApiResponse = serde_json::from_str(&resp_text).map_err(|e| {
|
||||
format!("Failed to parse Slack API response: {e} — body: {resp_text}")
|
||||
})?;
|
||||
let parsed: SlackApiResponse = serde_json::from_str(&resp_text)
|
||||
.map_err(|e| format!("Failed to parse Slack API response: {e} — body: {resp_text}"))?;
|
||||
|
||||
if !parsed.ok {
|
||||
return Err(format!(
|
||||
@@ -190,10 +189,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport = SlackTransport::with_api_base(
|
||||
"xoxb-test-token".to_string(),
|
||||
server.url(),
|
||||
);
|
||||
let transport = SlackTransport::with_api_base("xoxb-test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.send_message("C01ABCDEF", "hello", "<p>hello</p>")
|
||||
@@ -212,14 +208,9 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport = SlackTransport::with_api_base(
|
||||
"xoxb-test-token".to_string(),
|
||||
server.url(),
|
||||
);
|
||||
let transport = SlackTransport::with_api_base("xoxb-test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.send_message("C_INVALID", "hello", "")
|
||||
.await;
|
||||
let result = transport.send_message("C_INVALID", "hello", "").await;
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains("channel_not_found"),
|
||||
@@ -237,10 +228,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport = SlackTransport::with_api_base(
|
||||
"xoxb-test-token".to_string(),
|
||||
server.url(),
|
||||
);
|
||||
let transport = SlackTransport::with_api_base("xoxb-test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.edit_message("C01ABCDEF", "1234567890.123456", "updated", "")
|
||||
@@ -258,10 +246,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport = SlackTransport::with_api_base(
|
||||
"xoxb-test-token".to_string(),
|
||||
server.url(),
|
||||
);
|
||||
let transport = SlackTransport::with_api_base("xoxb-test-token".to_string(), server.url());
|
||||
|
||||
let result = transport
|
||||
.edit_message("C01ABCDEF", "bad-ts", "updated", "")
|
||||
@@ -287,10 +272,7 @@ mod tests {
|
||||
.create_async()
|
||||
.await;
|
||||
|
||||
let transport = SlackTransport::with_api_base(
|
||||
"xoxb-test-token".to_string(),
|
||||
server.url(),
|
||||
);
|
||||
let transport = SlackTransport::with_api_base("xoxb-test-token".to_string(), server.url());
|
||||
|
||||
let result = transport.send_message("C01ABCDEF", "hello", "").await;
|
||||
assert!(result.is_err());
|
||||
|
||||
@@ -12,15 +12,15 @@ pub mod history;
|
||||
pub mod meta;
|
||||
pub mod verify;
|
||||
|
||||
pub use commands::SlackWebhookContext;
|
||||
pub use format::markdown_to_slack;
|
||||
pub use history::load_slack_history;
|
||||
pub use meta::SlackTransport;
|
||||
pub use format::markdown_to_slack;
|
||||
pub use commands::SlackWebhookContext;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
use poem::{Request, Response, handler, http::StatusCode};
|
||||
use crate::slog;
|
||||
use poem::{Request, Response, handler, http::StatusCode};
|
||||
|
||||
// ── Slack Events API types ──────────────────────────────────────────────
|
||||
|
||||
@@ -71,10 +71,7 @@ pub async fn webhook_receive(
|
||||
.header("X-Slack-Request-Timestamp")
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let signature = req
|
||||
.header("X-Slack-Signature")
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let signature = req.header("X-Slack-Signature").unwrap_or("").to_string();
|
||||
|
||||
let bytes = match body.into_bytes().await {
|
||||
Ok(b) => b,
|
||||
@@ -98,9 +95,7 @@ pub async fn webhook_receive(
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
slog!("[slack] Failed to parse webhook payload: {e}");
|
||||
return Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body("ok");
|
||||
return Response::builder().status(StatusCode::OK).body("ok");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -124,8 +119,7 @@ pub async fn webhook_receive(
|
||||
&& event.r#type.as_deref() == Some("message")
|
||||
&& event.subtype.is_none()
|
||||
&& event.bot_id.is_none()
|
||||
&& let (Some(channel), Some(user), Some(text)) =
|
||||
(event.channel, event.user, event.text)
|
||||
&& let (Some(channel), Some(user), Some(text)) = (event.channel, event.user, event.text)
|
||||
&& ctx.channel_ids.contains(&channel)
|
||||
{
|
||||
let ctx = Arc::clone(*ctx);
|
||||
@@ -135,9 +129,7 @@ pub async fn webhook_receive(
|
||||
});
|
||||
}
|
||||
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body("ok")
|
||||
Response::builder().status(StatusCode::OK).body("ok")
|
||||
}
|
||||
|
||||
/// POST /webhook/slack/command — receive incoming Slack slash commands.
|
||||
@@ -155,10 +147,7 @@ pub async fn slash_command_receive(
|
||||
.header("X-Slack-Request-Timestamp")
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let signature = req
|
||||
.header("X-Slack-Signature")
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let signature = req.header("X-Slack-Signature").unwrap_or("").to_string();
|
||||
|
||||
let bytes = match body.into_bytes().await {
|
||||
Ok(b) => b,
|
||||
@@ -178,16 +167,15 @@ pub async fn slash_command_receive(
|
||||
.body("Invalid signature");
|
||||
}
|
||||
|
||||
let payload: commands::SlackSlashCommandPayload =
|
||||
match serde_urlencoded::from_bytes(&bytes) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
slog!("[slack] Failed to parse slash command payload: {e}");
|
||||
return Response::builder()
|
||||
.status(StatusCode::BAD_REQUEST)
|
||||
.body("Bad request");
|
||||
}
|
||||
};
|
||||
let payload: commands::SlackSlashCommandPayload = match serde_urlencoded::from_bytes(&bytes) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
slog!("[slack] Failed to parse slash command payload: {e}");
|
||||
return Response::builder()
|
||||
.status(StatusCode::BAD_REQUEST)
|
||||
.body("Bad request");
|
||||
}
|
||||
};
|
||||
|
||||
slog!(
|
||||
"[slack] Slash command from {}: {} {}",
|
||||
|
||||
@@ -215,7 +215,12 @@ mod tests {
|
||||
let body = b"test body";
|
||||
|
||||
let sig = compute_test_signature("correct-secret", timestamp, body);
|
||||
assert!(!verify_slack_signature("wrong-secret", timestamp, body, &sig));
|
||||
assert!(!verify_slack_signature(
|
||||
"wrong-secret",
|
||||
timestamp,
|
||||
body,
|
||||
&sig
|
||||
));
|
||||
}
|
||||
|
||||
/// Helper to compute a test signature using our sha256 + HMAC implementation.
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
//! WhatsApp command handling — processes incoming WhatsApp messages as bot commands.
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
use crate::chat::util::is_permission_approval;
|
||||
use crate::http::context::{PermissionDecision};
|
||||
use crate::slog;
|
||||
use super::WhatsAppWebhookContext;
|
||||
use super::format::{chunk_for_whatsapp, markdown_to_whatsapp};
|
||||
use super::history::save_whatsapp_history;
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
use crate::chat::util::is_permission_approval;
|
||||
use crate::http::context::PermissionDecision;
|
||||
use crate::slog;
|
||||
|
||||
/// Dispatch an incoming WhatsApp message to bot commands.
|
||||
pub(super) async fn handle_incoming_message(ctx: &WhatsAppWebhookContext, sender: &str, message: &str) {
|
||||
pub(super) async fn handle_incoming_message(
|
||||
ctx: &WhatsAppWebhookContext,
|
||||
sender: &str,
|
||||
message: &str,
|
||||
) {
|
||||
use crate::chat::commands::{CommandDispatch, try_handle_command};
|
||||
|
||||
// Allowlist check: when configured, silently ignore unauthorized senders.
|
||||
if !ctx.allowed_phones.is_empty()
|
||||
&& !ctx.allowed_phones.iter().any(|p| p == sender)
|
||||
{
|
||||
if !ctx.allowed_phones.is_empty() && !ctx.allowed_phones.iter().any(|p| p == sender) {
|
||||
slog!("[whatsapp] Ignoring message from unauthorized sender: {sender}");
|
||||
return;
|
||||
}
|
||||
@@ -173,7 +175,9 @@ pub(super) async fn handle_incoming_message(ctx: &WhatsAppWebhookContext, sender
|
||||
slog!("[whatsapp] Handling reset command from {sender}");
|
||||
{
|
||||
let mut guard = ctx.history.lock().await;
|
||||
let conv = guard.entry(sender.to_string()).or_insert_with(RoomConversation::default);
|
||||
let conv = guard
|
||||
.entry(sender.to_string())
|
||||
.or_insert_with(RoomConversation::default);
|
||||
conv.session_id = None;
|
||||
conv.entries.clear();
|
||||
save_whatsapp_history(&ctx.project_root, &guard);
|
||||
@@ -219,8 +223,13 @@ pub(super) async fn handle_incoming_message(ctx: &WhatsAppWebhookContext, sender
|
||||
&ctx.bot_user_id,
|
||||
) {
|
||||
let response = match assign_cmd {
|
||||
crate::chat::transport::matrix::assign::AssignCommand::Assign { story_number, model } => {
|
||||
slog!("[whatsapp] Handling assign command from {sender}: story {story_number} model {model}");
|
||||
crate::chat::transport::matrix::assign::AssignCommand::Assign {
|
||||
story_number,
|
||||
model,
|
||||
} => {
|
||||
slog!(
|
||||
"[whatsapp] Handling assign command from {sender}: story {story_number} model {model}"
|
||||
);
|
||||
crate::chat::transport::matrix::assign::handle_assign(
|
||||
&ctx.bot_name,
|
||||
&story_number,
|
||||
@@ -385,9 +394,7 @@ async fn handle_llm_message(ctx: &WhatsAppWebhookContext, sender: &str, user_mes
|
||||
Err(e) => {
|
||||
slog!("[whatsapp] LLM error: {e}");
|
||||
let err_msg = if let Some(url) = crate::llm::oauth::extract_login_url_from_error(&e) {
|
||||
format!(
|
||||
"Authentication required. Log in to Claude here: {url}"
|
||||
)
|
||||
format!("Authentication required. Log in to Claude here: {url}")
|
||||
} else {
|
||||
format!("Error processing your request: {e}")
|
||||
};
|
||||
@@ -434,20 +441,18 @@ async fn handle_llm_message(ctx: &WhatsAppWebhookContext, sender: &str, user_mes
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::agents::AgentPool;
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
use super::super::history::{MessagingWindowTracker, WhatsAppConversationHistory};
|
||||
use super::super::WhatsAppWebhookContext;
|
||||
use super::super::history::{MessagingWindowTracker, WhatsAppConversationHistory};
|
||||
use super::*;
|
||||
use crate::agents::AgentPool;
|
||||
use crate::chat::transport::matrix::{ConversationEntry, ConversationRole, RoomConversation};
|
||||
use crate::io::watcher::WatcherEvent;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex as TokioMutex;
|
||||
|
||||
/// Build a minimal WhatsAppWebhookContext for allowlist tests.
|
||||
fn make_ctx_with_allowlist(
|
||||
allowed_phones: Vec<String>,
|
||||
) -> Arc<WhatsAppWebhookContext> {
|
||||
fn make_ctx_with_allowlist(allowed_phones: Vec<String>) -> Arc<WhatsAppWebhookContext> {
|
||||
struct NullTransport;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -505,9 +510,15 @@ mod tests {
|
||||
let err = "OAuth session expired or credentials missing. Please log in: http://localhost:3001/oauth/authorize";
|
||||
let url = crate::llm::oauth::extract_login_url_from_error(err);
|
||||
assert!(url.is_some(), "should extract URL from OAuth error");
|
||||
let msg = format!("Authentication required. Log in to Claude here: {}", url.unwrap());
|
||||
let msg = format!(
|
||||
"Authentication required. Log in to Claude here: {}",
|
||||
url.unwrap()
|
||||
);
|
||||
assert!(msg.contains("http://localhost:3001/oauth/authorize"));
|
||||
assert!(!msg.contains('['), "WhatsApp message should not use Markdown link syntax");
|
||||
assert!(
|
||||
!msg.contains('['),
|
||||
"WhatsApp message should not use Markdown link syntax"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -594,7 +605,10 @@ mod tests {
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
assert!(result.is_none(), "'status' should not be recognised as rebuild");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"'status' should not be recognised as rebuild"
|
||||
);
|
||||
}
|
||||
|
||||
// ── reset command extraction ───────────────────────────────────────
|
||||
@@ -624,14 +638,17 @@ mod tests {
|
||||
let sender = "+15555550100";
|
||||
let history: WhatsAppConversationHistory = Arc::new(TokioMutex::new({
|
||||
let mut m = HashMap::new();
|
||||
m.insert(sender.to_string(), RoomConversation {
|
||||
session_id: Some("old-session".to_string()),
|
||||
entries: vec![ConversationEntry {
|
||||
role: ConversationRole::User,
|
||||
sender: sender.to_string(),
|
||||
content: "previous message".to_string(),
|
||||
}],
|
||||
});
|
||||
m.insert(
|
||||
sender.to_string(),
|
||||
RoomConversation {
|
||||
session_id: Some("old-session".to_string()),
|
||||
entries: vec![ConversationEntry {
|
||||
role: ConversationRole::User,
|
||||
sender: sender.to_string(),
|
||||
content: "previous message".to_string(),
|
||||
}],
|
||||
},
|
||||
);
|
||||
m
|
||||
}));
|
||||
|
||||
@@ -641,7 +658,9 @@ mod tests {
|
||||
|
||||
{
|
||||
let mut guard = history.lock().await;
|
||||
let conv = guard.entry(sender.to_string()).or_insert_with(RoomConversation::default);
|
||||
let conv = guard
|
||||
.entry(sender.to_string())
|
||||
.or_insert_with(RoomConversation::default);
|
||||
conv.session_id = None;
|
||||
conv.entries.clear();
|
||||
save_whatsapp_history(tmp.path(), &guard);
|
||||
@@ -748,7 +767,10 @@ mod tests {
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
assert!(result.is_none(), "'status' should not be recognised as rmtree");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"'status' should not be recognised as rmtree"
|
||||
);
|
||||
}
|
||||
|
||||
// ── assign command extraction ──────────────────────────────────────
|
||||
@@ -805,6 +827,9 @@ mod tests {
|
||||
"Timmy",
|
||||
"@timmy:home.local",
|
||||
);
|
||||
assert!(result.is_none(), "'status' should not be recognised as assign");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"'status' should not be recognised as assign"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user