Restore codebase deleted by bad auto-commit e4227cf
Commit e4227cf (a story creation auto-commit) erroneously deleted 175
files from master's tree, likely due to a race condition between
concurrent git operations. This commit re-adds all files from the
working directory.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
+1689
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,6 @@
|
||||
pub mod fs;
|
||||
pub mod onboarding;
|
||||
pub mod search;
|
||||
pub mod shell;
|
||||
pub mod story_metadata;
|
||||
pub mod watcher;
|
||||
@@ -0,0 +1,315 @@
|
||||
use std::path::Path;
|
||||
|
||||
/// Sentinel comment injected as the first line of scaffold templates.
|
||||
/// Only untouched templates contain this marker — real project content
|
||||
/// will never include it, so it avoids false positives when the project
|
||||
/// itself is an "Agentic AI Code Assistant".
|
||||
const TEMPLATE_SENTINEL: &str = "<!-- storkit:scaffold-template -->";
|
||||
|
||||
/// Marker found in the default `script/test` scaffold output.
|
||||
const TEMPLATE_MARKER_SCRIPT: &str = "No tests configured";
|
||||
|
||||
/// Summary of what parts of a project still need onboarding.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct OnboardingStatus {
|
||||
/// True when the project context spec needs to be populated.
|
||||
pub needs_context: bool,
|
||||
/// True when the tech stack spec needs to be populated.
|
||||
pub needs_stack: bool,
|
||||
/// True when `script/test` still contains the scaffold placeholder.
|
||||
pub needs_test_script: bool,
|
||||
/// True when `.storkit/project.toml` is missing or has no
|
||||
/// `[[component]]` entries.
|
||||
pub needs_project_toml: bool,
|
||||
}
|
||||
|
||||
impl OnboardingStatus {
|
||||
/// Returns `true` when any onboarding step is still needed.
|
||||
pub fn needs_onboarding(&self) -> bool {
|
||||
self.needs_context || self.needs_stack
|
||||
}
|
||||
}
|
||||
|
||||
/// Inspect the project at `project_root` and determine which onboarding
|
||||
/// steps are still required.
|
||||
pub fn check_onboarding_status(project_root: &Path) -> OnboardingStatus {
|
||||
let story_kit = project_root.join(".storkit");
|
||||
|
||||
OnboardingStatus {
|
||||
needs_context: is_template_or_missing(
|
||||
&story_kit.join("specs").join("00_CONTEXT.md"),
|
||||
TEMPLATE_SENTINEL,
|
||||
),
|
||||
needs_stack: is_template_or_missing(
|
||||
&story_kit.join("specs").join("tech").join("STACK.md"),
|
||||
TEMPLATE_SENTINEL,
|
||||
),
|
||||
needs_test_script: is_template_or_missing(
|
||||
&project_root.join("script").join("test"),
|
||||
TEMPLATE_MARKER_SCRIPT,
|
||||
),
|
||||
needs_project_toml: needs_project_toml(&story_kit),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` when the file is missing, empty, or contains the
|
||||
/// given scaffold marker string.
|
||||
fn is_template_or_missing(path: &Path, marker: &str) -> bool {
|
||||
match std::fs::read_to_string(path) {
|
||||
Ok(content) => content.trim().is_empty() || content.contains(marker),
|
||||
Err(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` when `project.toml` is missing or has no
|
||||
/// `[[component]]` entries.
|
||||
fn needs_project_toml(story_kit: &Path) -> bool {
|
||||
let toml_path = story_kit.join("project.toml");
|
||||
match std::fs::read_to_string(toml_path) {
|
||||
Ok(content) => !content.contains("[[component]]"),
|
||||
Err(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn setup_project(dir: &TempDir) -> std::path::PathBuf {
|
||||
let root = dir.path().to_path_buf();
|
||||
let sk = root.join(".storkit");
|
||||
fs::create_dir_all(sk.join("specs").join("tech")).unwrap();
|
||||
fs::create_dir_all(root.join("script")).unwrap();
|
||||
root
|
||||
}
|
||||
|
||||
// ── needs_onboarding ──────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn needs_onboarding_true_when_no_files_exist() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = dir.path().to_path_buf();
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_onboarding());
|
||||
assert!(status.needs_context);
|
||||
assert!(status.needs_stack);
|
||||
assert!(status.needs_test_script);
|
||||
assert!(status.needs_project_toml);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_onboarding_true_when_specs_contain_scaffold_sentinel() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
// Write content that includes the scaffold sentinel
|
||||
fs::write(
|
||||
root.join(".storkit/specs/00_CONTEXT.md"),
|
||||
"<!-- storkit:scaffold-template -->\n# Project Context\nPlaceholder...",
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(
|
||||
root.join(".storkit/specs/tech/STACK.md"),
|
||||
"<!-- storkit:scaffold-template -->\n# Tech Stack\nPlaceholder...",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_context);
|
||||
assert!(status.needs_stack);
|
||||
assert!(status.needs_onboarding());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_onboarding_false_when_content_mentions_agentic_but_no_sentinel() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
// Real project content that happens to mention "Agentic AI Code Assistant"
|
||||
// but does NOT contain the scaffold sentinel — should NOT trigger onboarding.
|
||||
fs::write(
|
||||
root.join(".storkit/specs/00_CONTEXT.md"),
|
||||
"# Project Context\nTo build a standalone Agentic AI Code Assistant application.",
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(
|
||||
root.join(".storkit/specs/tech/STACK.md"),
|
||||
"# Tech Stack\nThis is an Agentic Code Assistant binary.",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(!status.needs_context);
|
||||
assert!(!status.needs_stack);
|
||||
assert!(!status.needs_onboarding());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_onboarding_false_when_specs_have_custom_content() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
fs::write(
|
||||
root.join(".storkit/specs/00_CONTEXT.md"),
|
||||
"# My Project\n\nThis is an e-commerce platform for selling widgets.",
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(
|
||||
root.join(".storkit/specs/tech/STACK.md"),
|
||||
"# Tech Stack\n\n## Backend: Python + FastAPI\n## Frontend: React + TypeScript",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(!status.needs_context);
|
||||
assert!(!status.needs_stack);
|
||||
assert!(!status.needs_onboarding());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_onboarding_true_when_specs_are_empty() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
fs::write(root.join(".storkit/specs/00_CONTEXT.md"), " \n").unwrap();
|
||||
fs::write(root.join(".storkit/specs/tech/STACK.md"), "").unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_context);
|
||||
assert!(status.needs_stack);
|
||||
}
|
||||
|
||||
// ── needs_test_script ─────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn needs_test_script_true_when_placeholder() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
fs::write(
|
||||
root.join("script/test"),
|
||||
"#!/usr/bin/env bash\nset -euo pipefail\necho \"No tests configured\"\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_test_script);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_test_script_false_when_customised() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
fs::write(
|
||||
root.join("script/test"),
|
||||
"#!/usr/bin/env bash\nset -euo pipefail\ncargo test\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(!status.needs_test_script);
|
||||
}
|
||||
|
||||
// ── needs_project_toml ────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn needs_project_toml_true_when_missing() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_project_toml);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_project_toml_true_when_no_components() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
fs::write(root.join(".storkit/project.toml"), "# empty config\n").unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_project_toml);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn needs_project_toml_false_when_has_components() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
fs::write(
|
||||
root.join(".storkit/project.toml"),
|
||||
"[[component]]\nname = \"app\"\npath = \".\"\nsetup = [\"cargo check\"]\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(!status.needs_project_toml);
|
||||
}
|
||||
|
||||
// ── CLAUDE.md is not an onboarding step ──────────────────────
|
||||
|
||||
#[test]
|
||||
fn onboarding_status_does_not_check_claude_md() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
// Write real content for the required onboarding files
|
||||
fs::write(
|
||||
root.join(".storkit/specs/00_CONTEXT.md"),
|
||||
"# My Project\n\nReal project context.",
|
||||
)
|
||||
.unwrap();
|
||||
fs::write(
|
||||
root.join(".storkit/specs/tech/STACK.md"),
|
||||
"# My Stack\n\nReal stack content.",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// CLAUDE.md is absent — should NOT affect onboarding result
|
||||
assert!(!root.join("CLAUDE.md").exists());
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(
|
||||
!status.needs_context,
|
||||
"needs_context should be false with real content"
|
||||
);
|
||||
assert!(
|
||||
!status.needs_stack,
|
||||
"needs_stack should be false with real content"
|
||||
);
|
||||
assert!(
|
||||
!status.needs_onboarding(),
|
||||
"needs_onboarding() should be false regardless of CLAUDE.md presence"
|
||||
);
|
||||
}
|
||||
|
||||
// ── partial onboarding ────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn needs_onboarding_true_when_only_context_is_template() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let root = setup_project(&dir);
|
||||
|
||||
// Context still has sentinel
|
||||
fs::write(
|
||||
root.join(".storkit/specs/00_CONTEXT.md"),
|
||||
"<!-- storkit:scaffold-template -->\n# Project Context\nPlaceholder...",
|
||||
)
|
||||
.unwrap();
|
||||
// Stack is customised (no sentinel)
|
||||
fs::write(
|
||||
root.join(".storkit/specs/tech/STACK.md"),
|
||||
"# My Stack\nRuby on Rails + PostgreSQL",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let status = check_onboarding_status(&root);
|
||||
assert!(status.needs_context);
|
||||
assert!(!status.needs_stack);
|
||||
assert!(status.needs_onboarding());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,218 @@
|
||||
use crate::slog;
|
||||
use crate::state::SessionState;
|
||||
use ignore::WalkBuilder;
|
||||
use serde::Serialize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Serialize, Debug, poem_openapi::Object)]
|
||||
pub struct SearchResult {
|
||||
pub path: String,
|
||||
pub matches: usize,
|
||||
}
|
||||
|
||||
fn get_project_root(state: &SessionState) -> Result<PathBuf, String> {
|
||||
state.get_project_root()
|
||||
}
|
||||
|
||||
pub async fn search_files(
|
||||
query: String,
|
||||
state: &SessionState,
|
||||
) -> Result<Vec<SearchResult>, String> {
|
||||
let root = get_project_root(state)?;
|
||||
search_files_impl(query, root).await
|
||||
}
|
||||
|
||||
pub async fn search_files_impl(query: String, root: PathBuf) -> Result<Vec<SearchResult>, String> {
|
||||
let root_clone = root.clone();
|
||||
|
||||
let results = tokio::task::spawn_blocking(move || {
|
||||
let mut matches = Vec::new();
|
||||
let walker = WalkBuilder::new(&root_clone).git_ignore(true).build();
|
||||
|
||||
for result in walker {
|
||||
match result {
|
||||
Ok(entry) => {
|
||||
if !entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = entry.path();
|
||||
if let Ok(content) = fs::read_to_string(path)
|
||||
&& content.contains(&query)
|
||||
{
|
||||
let relative = path
|
||||
.strip_prefix(&root_clone)
|
||||
.unwrap_or(path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
matches.push(SearchResult {
|
||||
path: relative,
|
||||
matches: 1,
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(err) => slog!("Error walking dir: {}", err),
|
||||
}
|
||||
}
|
||||
|
||||
matches
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Search task failed: {e}"))?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn setup_project(files: &[(&str, &str)]) -> TempDir {
|
||||
let dir = TempDir::new().unwrap();
|
||||
for (path, content) in files {
|
||||
let full = dir.path().join(path);
|
||||
if let Some(parent) = full.parent() {
|
||||
fs::create_dir_all(parent).unwrap();
|
||||
}
|
||||
fs::write(full, content).unwrap();
|
||||
}
|
||||
dir
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn finds_files_matching_query() {
|
||||
let dir = setup_project(&[
|
||||
("hello.txt", "hello world"),
|
||||
("goodbye.txt", "goodbye world"),
|
||||
]);
|
||||
|
||||
let results = search_files_impl("hello".to_string(), dir.path().to_path_buf())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].path, "hello.txt");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_empty_for_no_matches() {
|
||||
let dir = setup_project(&[("file.txt", "some content")]);
|
||||
|
||||
let results = search_files_impl("nonexistent".to_string(), dir.path().to_path_buf())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(results.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn searches_nested_directories() {
|
||||
let dir = setup_project(&[
|
||||
("top.txt", "needle"),
|
||||
("sub/deep.txt", "needle in haystack"),
|
||||
("sub/other.txt", "no match here"),
|
||||
]);
|
||||
|
||||
let results = search_files_impl("needle".to_string(), dir.path().to_path_buf())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(results.len(), 2);
|
||||
let paths: Vec<&str> = results.iter().map(|r| r.path.as_str()).collect();
|
||||
assert!(paths.contains(&"top.txt"));
|
||||
assert!(paths.contains(&"sub/deep.txt"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn skips_directories_only_matches_files() {
|
||||
let dir = setup_project(&[("sub/file.txt", "content")]);
|
||||
|
||||
let results = search_files_impl("content".to_string(), dir.path().to_path_buf())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].path, "sub/file.txt");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn respects_gitignore() {
|
||||
let dir = setup_project(&[
|
||||
(".gitignore", "ignored/\n"),
|
||||
("kept.txt", "search term"),
|
||||
("ignored/hidden.txt", "search term"),
|
||||
]);
|
||||
|
||||
// Initialize a git repo so .gitignore is respected
|
||||
std::process::Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(dir.path())
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
let results = search_files_impl("search term".to_string(), dir.path().to_path_buf())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].path, "kept.txt");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn search_files_with_session_state() {
|
||||
let dir = setup_project(&[("found.txt", "target_text")]);
|
||||
let state = SessionState::default();
|
||||
*state.project_root.lock().unwrap() = Some(dir.path().to_path_buf());
|
||||
|
||||
let results = search_files("target_text".to_string(), &state).await.unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].path, "found.txt");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn search_files_errors_without_project_root() {
|
||||
let state = SessionState::default();
|
||||
|
||||
let result = search_files("query".to_string(), &state).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("No project"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn search_result_serializes_and_debugs() {
|
||||
let sr = SearchResult {
|
||||
path: "src/main.rs".to_string(),
|
||||
matches: 3,
|
||||
};
|
||||
let json = serde_json::to_string(&sr).unwrap();
|
||||
assert!(json.contains("src/main.rs"));
|
||||
assert!(json.contains("3"));
|
||||
|
||||
let debug = format!("{sr:?}");
|
||||
assert!(debug.contains("SearchResult"));
|
||||
assert!(debug.contains("src/main.rs"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn skips_binary_files() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
// Write a file with invalid UTF-8 bytes
|
||||
let binary_path = dir.path().join("binary.bin");
|
||||
fs::write(&binary_path, [0xFF, 0xFE, 0x00, 0x01]).unwrap();
|
||||
// Write a valid text file with the search term
|
||||
fs::write(dir.path().join("text.txt"), "findme").unwrap();
|
||||
|
||||
let results = search_files_impl("findme".to_string(), dir.path().to_path_buf())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].path, "text.txt");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,189 @@
|
||||
use crate::state::SessionState;
|
||||
use serde::Serialize;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
/// Helper to get the root path (cloned) without joining
|
||||
fn get_project_root(state: &SessionState) -> Result<PathBuf, String> {
|
||||
state.get_project_root()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, poem_openapi::Object)]
|
||||
pub struct CommandOutput {
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
pub exit_code: i32,
|
||||
}
|
||||
|
||||
/// Execute shell command logic (pure function for testing)
|
||||
async fn exec_shell_impl(
|
||||
command: String,
|
||||
args: Vec<String>,
|
||||
root: PathBuf,
|
||||
) -> Result<CommandOutput, String> {
|
||||
// Security Allowlist
|
||||
let allowed_commands = [
|
||||
"git", "cargo", "npm", "yarn", "pnpm", "node", "bun", "ls", "find", "grep", "mkdir", "rm",
|
||||
"mv", "cp", "touch", "rustc", "rustfmt",
|
||||
];
|
||||
|
||||
if !allowed_commands.contains(&command.as_str()) {
|
||||
return Err(format!("Command '{}' is not in the allowlist.", command));
|
||||
}
|
||||
|
||||
let output = tokio::task::spawn_blocking(move || {
|
||||
Command::new(&command)
|
||||
.args(&args)
|
||||
.current_dir(root)
|
||||
.output()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Task join error: {}", e))?
|
||||
.map_err(|e| format!("Failed to execute command: {}", e))?;
|
||||
|
||||
Ok(CommandOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn exec_shell(
|
||||
command: String,
|
||||
args: Vec<String>,
|
||||
state: &SessionState,
|
||||
) -> Result<CommandOutput, String> {
|
||||
let root = get_project_root(state)?;
|
||||
exec_shell_impl(command, args, root).await
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_impl_rejects_disallowed_command() {
|
||||
let dir = tempdir().unwrap();
|
||||
let result = exec_shell_impl(
|
||||
"curl".to_string(),
|
||||
vec!["https://example.com".to_string()],
|
||||
dir.path().to_path_buf(),
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("not in the allowlist"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_impl_runs_allowed_command() {
|
||||
let dir = tempdir().unwrap();
|
||||
let result = exec_shell_impl(
|
||||
"ls".to_string(),
|
||||
Vec::new(),
|
||||
dir.path().to_path_buf(),
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let output = result.unwrap();
|
||||
assert_eq!(output.exit_code, 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_impl_captures_stdout() {
|
||||
let dir = tempdir().unwrap();
|
||||
std::fs::write(dir.path().join("hello.txt"), "").unwrap();
|
||||
|
||||
let result = exec_shell_impl(
|
||||
"ls".to_string(),
|
||||
Vec::new(),
|
||||
dir.path().to_path_buf(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.stdout.contains("hello.txt"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_impl_returns_nonzero_exit_code() {
|
||||
let dir = tempdir().unwrap();
|
||||
let result = exec_shell_impl(
|
||||
"ls".to_string(),
|
||||
vec!["nonexistent_file_xyz".to_string()],
|
||||
dir.path().to_path_buf(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_ne!(result.exit_code, 0);
|
||||
assert!(!result.stderr.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_delegates_to_impl_via_state() {
|
||||
let dir = tempdir().unwrap();
|
||||
std::fs::write(dir.path().join("marker.txt"), "hello").unwrap();
|
||||
|
||||
let state = SessionState::default();
|
||||
*state.project_root.lock().unwrap() = Some(dir.path().to_path_buf());
|
||||
|
||||
let result = exec_shell("ls".to_string(), Vec::new(), &state)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(result.exit_code, 0);
|
||||
assert!(result.stdout.contains("marker.txt"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_errors_when_no_project_root() {
|
||||
let state = SessionState::default();
|
||||
|
||||
let result = exec_shell("ls".to_string(), Vec::new(), &state).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("No project"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn exec_shell_impl_errors_on_nonexistent_cwd() {
|
||||
let result = exec_shell_impl(
|
||||
"ls".to_string(),
|
||||
Vec::new(),
|
||||
PathBuf::from("/nonexistent_dir_that_does_not_exist_xyz"),
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Failed to execute command"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_output_serializes_to_json() {
|
||||
let output = CommandOutput {
|
||||
stdout: "hello".to_string(),
|
||||
stderr: "".to_string(),
|
||||
exit_code: 0,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
assert!(json.contains("\"stdout\":\"hello\""));
|
||||
assert!(json.contains("\"exit_code\":0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_output_debug_format() {
|
||||
let output = CommandOutput {
|
||||
stdout: "out".to_string(),
|
||||
stderr: "err".to_string(),
|
||||
exit_code: 1,
|
||||
};
|
||||
|
||||
let debug = format!("{:?}", output);
|
||||
assert!(debug.contains("CommandOutput"));
|
||||
assert!(debug.contains("out"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,542 @@
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// QA mode for a story: determines how the pipeline handles post-coder review.
|
||||
///
|
||||
/// - `Server` — skip the QA agent; rely on server gate checks (clippy + tests).
|
||||
/// If gates pass, advance straight to merge.
|
||||
/// - `Agent` — spin up a QA agent (Claude session) to review code and run gates.
|
||||
/// - `Human` — hold in QA for human approval after server gates pass.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum QaMode {
|
||||
Server,
|
||||
Agent,
|
||||
Human,
|
||||
}
|
||||
|
||||
impl QaMode {
|
||||
/// Parse a string into a `QaMode`. Returns `None` for unrecognised values.
|
||||
pub fn from_str(s: &str) -> Option<Self> {
|
||||
match s.trim().to_lowercase().as_str() {
|
||||
"server" => Some(Self::Server),
|
||||
"agent" => Some(Self::Agent),
|
||||
"human" => Some(Self::Human),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Server => "server",
|
||||
Self::Agent => "agent",
|
||||
Self::Human => "human",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for QaMode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct StoryMetadata {
|
||||
pub name: Option<String>,
|
||||
pub coverage_baseline: Option<String>,
|
||||
pub merge_failure: Option<String>,
|
||||
pub agent: Option<String>,
|
||||
pub review_hold: Option<bool>,
|
||||
pub qa: Option<QaMode>,
|
||||
/// Number of times this story has been retried at its current pipeline stage.
|
||||
pub retry_count: Option<u32>,
|
||||
/// When `true`, auto-assign will skip this story (retry limit exceeded).
|
||||
pub blocked: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum StoryMetaError {
|
||||
MissingFrontMatter,
|
||||
InvalidFrontMatter(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for StoryMetaError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
StoryMetaError::MissingFrontMatter => write!(f, "Missing front matter"),
|
||||
StoryMetaError::InvalidFrontMatter(msg) => write!(f, "Invalid front matter: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct FrontMatter {
|
||||
name: Option<String>,
|
||||
coverage_baseline: Option<String>,
|
||||
merge_failure: Option<String>,
|
||||
agent: Option<String>,
|
||||
review_hold: Option<bool>,
|
||||
/// Configurable QA mode field: "human", "server", or "agent".
|
||||
qa: Option<String>,
|
||||
/// Number of times this story has been retried at its current pipeline stage.
|
||||
retry_count: Option<u32>,
|
||||
/// When `true`, auto-assign will skip this story (retry limit exceeded).
|
||||
blocked: Option<bool>,
|
||||
}
|
||||
|
||||
pub fn parse_front_matter(contents: &str) -> Result<StoryMetadata, StoryMetaError> {
|
||||
let mut lines = contents.lines();
|
||||
|
||||
let first = lines.next().unwrap_or_default().trim();
|
||||
if first != "---" {
|
||||
return Err(StoryMetaError::MissingFrontMatter);
|
||||
}
|
||||
|
||||
let mut front_lines = Vec::new();
|
||||
for line in &mut lines {
|
||||
let trimmed = line.trim();
|
||||
if trimmed == "---" {
|
||||
let raw = front_lines.join("\n");
|
||||
let front: FrontMatter = serde_yaml::from_str(&raw)
|
||||
.map_err(|e| StoryMetaError::InvalidFrontMatter(e.to_string()))?;
|
||||
return Ok(build_metadata(front));
|
||||
}
|
||||
front_lines.push(line);
|
||||
}
|
||||
|
||||
Err(StoryMetaError::InvalidFrontMatter(
|
||||
"Missing closing front matter delimiter".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
fn build_metadata(front: FrontMatter) -> StoryMetadata {
|
||||
let qa = front.qa.as_deref().and_then(QaMode::from_str);
|
||||
|
||||
StoryMetadata {
|
||||
name: front.name,
|
||||
coverage_baseline: front.coverage_baseline,
|
||||
merge_failure: front.merge_failure,
|
||||
agent: front.agent,
|
||||
review_hold: front.review_hold,
|
||||
qa,
|
||||
retry_count: front.retry_count,
|
||||
blocked: front.blocked,
|
||||
}
|
||||
}
|
||||
|
||||
/// Write or update a `coverage_baseline:` field in the YAML front matter of a story file.
|
||||
///
|
||||
/// If front matter is present, adds or replaces `coverage_baseline:` before the closing `---`.
|
||||
/// If no front matter is present, this is a no-op (returns Ok).
|
||||
pub fn write_coverage_baseline(path: &Path, coverage_pct: f64) -> Result<(), String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
|
||||
let updated = set_front_matter_field(&contents, "coverage_baseline", &format!("{coverage_pct:.1}%"));
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write or update a `merge_failure:` field in the YAML front matter of a story file.
|
||||
///
|
||||
/// The reason is stored as a quoted YAML string so that colons, hashes, and newlines
|
||||
/// in the failure message do not break front-matter parsing.
|
||||
/// If no front matter is present, this is a no-op (returns Ok).
|
||||
pub fn write_merge_failure(path: &Path, reason: &str) -> Result<(), String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
|
||||
// Produce a YAML-safe inline quoted string: collapse newlines, escape inner quotes.
|
||||
let escaped = reason.replace('"', "\\\"").replace('\n', " ").replace('\r', "");
|
||||
let yaml_value = format!("\"{escaped}\"");
|
||||
|
||||
let updated = set_front_matter_field(&contents, "merge_failure", &yaml_value);
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write `review_hold: true` to the YAML front matter of a story file.
|
||||
///
|
||||
/// Used to mark spikes that have passed QA and are waiting for human review.
|
||||
pub fn write_review_hold(path: &Path) -> Result<(), String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
let updated = set_front_matter_field(&contents, "review_hold", "true");
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove a key from the YAML front matter of a story file on disk.
|
||||
///
|
||||
/// If front matter is present and contains the key, the line is removed.
|
||||
/// If no front matter or key is not found, the file is left unchanged.
|
||||
pub fn clear_front_matter_field(path: &Path, key: &str) -> Result<(), String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
let updated = remove_front_matter_field(&contents, key);
|
||||
if updated != contents {
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove a key: value line from the YAML front matter of a markdown string.
|
||||
///
|
||||
/// If no front matter (opening `---`) is found or the key is absent, returns content unchanged.
|
||||
fn remove_front_matter_field(contents: &str, key: &str) -> String {
|
||||
let mut lines: Vec<String> = contents.lines().map(String::from).collect();
|
||||
if lines.is_empty() || lines[0].trim() != "---" {
|
||||
return contents.to_string();
|
||||
}
|
||||
|
||||
let close_idx = match lines[1..].iter().position(|l| l.trim() == "---") {
|
||||
Some(i) => i + 1,
|
||||
None => return contents.to_string(),
|
||||
};
|
||||
|
||||
let key_prefix = format!("{key}:");
|
||||
if let Some(idx) = lines[1..close_idx]
|
||||
.iter()
|
||||
.position(|l| l.trim_start().starts_with(&key_prefix))
|
||||
.map(|i| i + 1)
|
||||
{
|
||||
lines.remove(idx);
|
||||
} else {
|
||||
return contents.to_string();
|
||||
}
|
||||
|
||||
let mut result = lines.join("\n");
|
||||
if contents.ends_with('\n') {
|
||||
result.push('\n');
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Insert or update a key: value pair in the YAML front matter of a markdown string.
|
||||
///
|
||||
/// If no front matter (opening `---`) is found, returns the content unchanged.
|
||||
pub fn set_front_matter_field(contents: &str, key: &str, value: &str) -> String {
|
||||
let mut lines: Vec<String> = contents.lines().map(String::from).collect();
|
||||
if lines.is_empty() || lines[0].trim() != "---" {
|
||||
return contents.to_string();
|
||||
}
|
||||
|
||||
// Find closing --- (search from index 1)
|
||||
let close_idx = match lines[1..].iter().position(|l| l.trim() == "---") {
|
||||
Some(i) => i + 1,
|
||||
None => return contents.to_string(),
|
||||
};
|
||||
|
||||
let key_prefix = format!("{key}:");
|
||||
let existing_idx = lines[1..close_idx]
|
||||
.iter()
|
||||
.position(|l| l.trim_start().starts_with(&key_prefix))
|
||||
.map(|i| i + 1);
|
||||
|
||||
let new_line = format!("{key}: {value}");
|
||||
if let Some(idx) = existing_idx {
|
||||
lines[idx] = new_line;
|
||||
} else {
|
||||
lines.insert(close_idx, new_line);
|
||||
}
|
||||
|
||||
let mut result = lines.join("\n");
|
||||
if contents.ends_with('\n') {
|
||||
result.push('\n');
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Increment the `retry_count` field in the story file's front matter.
|
||||
///
|
||||
/// Reads the current value (defaulting to 0), increments by 1, and writes back.
|
||||
/// Returns the new retry count.
|
||||
pub fn increment_retry_count(path: &Path) -> Result<u32, String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
|
||||
let current = parse_front_matter(&contents)
|
||||
.ok()
|
||||
.and_then(|m| m.retry_count)
|
||||
.unwrap_or(0);
|
||||
let new_count = current + 1;
|
||||
|
||||
let updated = set_front_matter_field(&contents, "retry_count", &new_count.to_string());
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
Ok(new_count)
|
||||
}
|
||||
|
||||
/// Write `blocked: true` to the YAML front matter of a story file.
|
||||
///
|
||||
/// Used to mark stories that have exceeded the retry limit and should not
|
||||
/// be auto-assigned again.
|
||||
pub fn write_blocked(path: &Path) -> Result<(), String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
let updated = set_front_matter_field(&contents, "blocked", "true");
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Append rejection notes to a story file body.
|
||||
///
|
||||
/// Adds a `## QA Rejection Notes` section at the end of the file so the coder
|
||||
/// agent can see what needs fixing.
|
||||
pub fn write_rejection_notes(path: &Path, notes: &str) -> Result<(), String> {
|
||||
let contents =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read story file: {e}"))?;
|
||||
|
||||
let section = format!("\n\n## QA Rejection Notes\n\n{notes}\n");
|
||||
let updated = format!("{contents}{section}");
|
||||
fs::write(path, &updated).map_err(|e| format!("Failed to write story file: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Resolve the effective QA mode for a story file.
|
||||
///
|
||||
/// Reads the `qa` front matter field. If absent, falls back to `default`.
|
||||
/// Spikes are **not** handled here — the caller is responsible for overriding
|
||||
/// to `Human` for spikes.
|
||||
pub fn resolve_qa_mode(path: &Path, default: QaMode) -> QaMode {
|
||||
let contents = match fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => return default,
|
||||
};
|
||||
match parse_front_matter(&contents) {
|
||||
Ok(meta) => meta.qa.unwrap_or(default),
|
||||
Err(_) => default,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_unchecked_todos(contents: &str) -> Vec<String> {
|
||||
contents
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
let trimmed = line.trim();
|
||||
trimmed
|
||||
.strip_prefix("- [ ] ")
|
||||
.map(|text| text.to_string())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parses_front_matter_metadata() {
|
||||
let input = r#"---
|
||||
name: Establish the TDD Workflow and Gates
|
||||
workflow: tdd
|
||||
---
|
||||
# Story 26
|
||||
"#;
|
||||
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.name.as_deref(), Some("Establish the TDD Workflow and Gates"));
|
||||
assert_eq!(meta.coverage_baseline, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_coverage_baseline_from_front_matter() {
|
||||
let input = "---\nname: Test Story\ncoverage_baseline: 78.5%\n---\n# Story\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.coverage_baseline.as_deref(), Some("78.5%"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_front_matter_field_inserts_new_key() {
|
||||
let input = "---\nname: My Story\n---\n# Body\n";
|
||||
let output = set_front_matter_field(input, "coverage_baseline", "55.0%");
|
||||
assert!(output.contains("coverage_baseline: 55.0%"));
|
||||
assert!(output.contains("name: My Story"));
|
||||
assert!(output.ends_with('\n'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_front_matter_field_updates_existing_key() {
|
||||
let input = "---\nname: My Story\ncoverage_baseline: 40.0%\n---\n# Body\n";
|
||||
let output = set_front_matter_field(input, "coverage_baseline", "55.0%");
|
||||
assert!(output.contains("coverage_baseline: 55.0%"));
|
||||
assert!(!output.contains("40.0%"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_front_matter_field_no_op_without_front_matter() {
|
||||
let input = "# No front matter\n";
|
||||
let output = set_front_matter_field(input, "coverage_baseline", "55.0%");
|
||||
assert_eq!(output, input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_coverage_baseline_updates_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let path = tmp.path().join("story.md");
|
||||
std::fs::write(&path, "---\nname: Test\n---\n# Story\n").unwrap();
|
||||
write_coverage_baseline(&path, 82.3).unwrap();
|
||||
let contents = std::fs::read_to_string(&path).unwrap();
|
||||
assert!(contents.contains("coverage_baseline: 82.3%"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_missing_front_matter() {
|
||||
let input = "# Story 26\n";
|
||||
assert_eq!(
|
||||
parse_front_matter(input),
|
||||
Err(StoryMetaError::MissingFrontMatter)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_unclosed_front_matter() {
|
||||
let input = "---\nname: Test\n";
|
||||
assert!(matches!(
|
||||
parse_front_matter(input),
|
||||
Err(StoryMetaError::InvalidFrontMatter(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_front_matter_field_removes_key() {
|
||||
let input = "---\nname: My Story\nmerge_failure: \"something broke\"\n---\n# Body\n";
|
||||
let output = remove_front_matter_field(input, "merge_failure");
|
||||
assert!(!output.contains("merge_failure"));
|
||||
assert!(output.contains("name: My Story"));
|
||||
assert!(output.ends_with('\n'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_front_matter_field_no_op_when_absent() {
|
||||
let input = "---\nname: My Story\n---\n# Body\n";
|
||||
let output = remove_front_matter_field(input, "merge_failure");
|
||||
assert_eq!(output, input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_front_matter_field_no_op_without_front_matter() {
|
||||
let input = "# No front matter\n";
|
||||
let output = remove_front_matter_field(input, "merge_failure");
|
||||
assert_eq!(output, input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clear_front_matter_field_updates_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let path = tmp.path().join("story.md");
|
||||
std::fs::write(&path, "---\nname: Test\nmerge_failure: \"bad\"\n---\n# Story\n").unwrap();
|
||||
clear_front_matter_field(&path, "merge_failure").unwrap();
|
||||
let contents = std::fs::read_to_string(&path).unwrap();
|
||||
assert!(!contents.contains("merge_failure"));
|
||||
assert!(contents.contains("name: Test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unchecked_todos_mixed() {
|
||||
let input = "## AC\n- [ ] First thing\n- [x] Done thing\n- [ ] Second thing\n";
|
||||
assert_eq!(
|
||||
parse_unchecked_todos(input),
|
||||
vec!["First thing", "Second thing"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unchecked_todos_all_checked() {
|
||||
let input = "- [x] Done\n- [x] Also done\n";
|
||||
assert!(parse_unchecked_todos(input).is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unchecked_todos_no_checkboxes() {
|
||||
let input = "# Story\nSome text\n- A bullet\n";
|
||||
assert!(parse_unchecked_todos(input).is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unchecked_todos_leading_whitespace() {
|
||||
let input = " - [ ] Indented item\n";
|
||||
assert_eq!(parse_unchecked_todos(input), vec!["Indented item"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_review_hold_from_front_matter() {
|
||||
let input = "---\nname: Spike\nreview_hold: true\n---\n# Spike\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.review_hold, Some(true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn review_hold_defaults_to_none() {
|
||||
let input = "---\nname: Story\n---\n# Story\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.review_hold, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_review_hold_sets_field() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let path = tmp.path().join("spike.md");
|
||||
std::fs::write(&path, "---\nname: My Spike\n---\n# Spike\n").unwrap();
|
||||
write_review_hold(&path).unwrap();
|
||||
let contents = std::fs::read_to_string(&path).unwrap();
|
||||
assert!(contents.contains("review_hold: true"));
|
||||
assert!(contents.contains("name: My Spike"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_qa_mode_from_front_matter() {
|
||||
let input = "---\nname: Story\nqa: server\n---\n# Story\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.qa, Some(QaMode::Server));
|
||||
|
||||
let input = "---\nname: Story\nqa: agent\n---\n# Story\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.qa, Some(QaMode::Agent));
|
||||
|
||||
let input = "---\nname: Story\nqa: human\n---\n# Story\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.qa, Some(QaMode::Human));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn qa_mode_defaults_to_none() {
|
||||
let input = "---\nname: Story\n---\n# Story\n";
|
||||
let meta = parse_front_matter(input).expect("front matter");
|
||||
assert_eq!(meta.qa, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_qa_mode_uses_file_value() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let path = tmp.path().join("story.md");
|
||||
std::fs::write(&path, "---\nname: Test\nqa: human\n---\n# Story\n").unwrap();
|
||||
assert_eq!(resolve_qa_mode(&path, QaMode::Server), QaMode::Human);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_qa_mode_falls_back_to_default() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let path = tmp.path().join("story.md");
|
||||
std::fs::write(&path, "---\nname: Test\n---\n# Story\n").unwrap();
|
||||
assert_eq!(resolve_qa_mode(&path, QaMode::Server), QaMode::Server);
|
||||
assert_eq!(resolve_qa_mode(&path, QaMode::Agent), QaMode::Agent);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_qa_mode_missing_file_uses_default() {
|
||||
let path = std::path::Path::new("/nonexistent/story.md");
|
||||
assert_eq!(resolve_qa_mode(path, QaMode::Server), QaMode::Server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_rejection_notes_appends_section() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let path = tmp.path().join("story.md");
|
||||
std::fs::write(&path, "---\nname: Test\n---\n# Story\n").unwrap();
|
||||
write_rejection_notes(&path, "Button color is wrong").unwrap();
|
||||
let contents = std::fs::read_to_string(&path).unwrap();
|
||||
assert!(contents.contains("## QA Rejection Notes"));
|
||||
assert!(contents.contains("Button color is wrong"));
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user