huskies: merge 819

This commit is contained in:
dave
2026-04-28 20:22:22 +00:00
parent b060d8fc88
commit f5ab75ecaa
8 changed files with 1056 additions and 1 deletions
Generated
+9
View File
@@ -2323,6 +2323,7 @@ dependencies = [
"serde_urlencoded",
"serde_yaml",
"sha2 0.11.0",
"source-map-gen",
"sqlx",
"strip-ansi-escapes",
"tempfile",
@@ -5423,6 +5424,14 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "source-map-gen"
version = "0.1.0"
dependencies = [
"serde_json",
"tempfile",
]
[[package]]
name = "spin"
version = "0.9.8"
+1 -1
View File
@@ -1,5 +1,5 @@
[workspace]
members = ["server", "crates/bft-json-crdt"]
members = ["server", "crates/bft-json-crdt", "crates/source-map-gen"]
resolver = "3"
[workspace.dependencies]
+13
View File
@@ -0,0 +1,13 @@
[package]
name = "source-map-gen"
version = "0.1.0"
edition = "2024"
[lib]
crate-type = ["lib"]
[dependencies]
serde_json = { workspace = true }
[dev-dependencies]
tempfile = { workspace = true }
+442
View File
@@ -0,0 +1,442 @@
//! LLM-friendly source map generation and documentation coverage checking.
//!
//! Provides a [`LanguageAdapter`] trait that language-specific adapters implement,
//! plus top-level dispatcher functions that route to the right adapter based on file
//! extension (`.rs` → [`RustAdapter`], `.ts`/`.tsx` → [`TypeScriptAdapter`]).
//!
//! The entry point for agent spawn integration is [`update_for_worktree`], which
//! runs `git diff --name-only` to find changed files and updates the source map for
//! those that pass the documentation coverage check.
mod rust_adapter;
mod ts_adapter;
pub use rust_adapter::RustAdapter;
pub use ts_adapter::TypeScriptAdapter;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::process::Command;
/// A missing documentation failure for a single public item.
#[derive(Debug, Clone, PartialEq)]
pub struct CheckFailure {
/// Path to the file containing the undocumented item.
pub file_path: PathBuf,
/// 1-based line number of the item declaration.
pub line: usize,
/// Kind of item (e.g. `"fn"`, `"struct"`, `"module"`).
pub item_kind: String,
/// Name of the item.
pub item_name: String,
}
impl CheckFailure {
/// Returns a human-readable direction a coding agent can act on directly.
pub fn to_direction(&self) -> String {
format!(
"{}:{}: add a doc comment to {} `{}`",
self.file_path.display(),
self.line,
self.item_kind,
self.item_name
)
}
}
/// Result of a documentation coverage check.
#[derive(Debug, Clone, PartialEq)]
pub enum CheckResult {
/// All checked items are documented.
Ok,
/// One or more items are missing documentation.
Failures(Vec<CheckFailure>),
}
/// Language-specific adapter for doc-coverage checking and source map generation.
pub trait LanguageAdapter {
/// Check documentation coverage for `files`.
///
/// Returns [`CheckResult::Ok`] when every public item in every file has a doc
/// comment, or [`CheckResult::Failures`] listing each undocumented item as a
/// direction the coding agent can act on.
fn check(&self, files: &[&Path]) -> CheckResult;
/// Update the source map at `source_map_path` with entries for `passing_files`.
///
/// Reads the existing map, updates only the entries for the provided files, and
/// writes back. Entries for files not in `passing_files` are preserved unchanged.
/// Running twice with the same input produces identical file content (idempotent).
fn update_source_map(
&self,
passing_files: &[&Path],
source_map_path: &Path,
) -> Result<(), String>;
}
/// Returns the adapter for the given file extension, or `None` if unsupported.
fn adapter_for_ext(ext: &str) -> Option<Box<dyn LanguageAdapter>> {
match ext {
"rs" => Some(Box::new(RustAdapter)),
"ts" | "tsx" => Some(Box::new(TypeScriptAdapter)),
_ => None,
}
}
/// Check documentation coverage for a mixed list of files.
///
/// Dispatches each file to the appropriate [`LanguageAdapter`] based on its
/// extension. Files with unsupported extensions are silently skipped.
pub fn check_files(files: &[&Path]) -> CheckResult {
let mut by_ext: HashMap<String, Vec<&Path>> = HashMap::new();
for &file in files {
if let Some(ext) = file.extension().and_then(|e| e.to_str()) {
by_ext.entry(ext.to_string()).or_default().push(file);
}
}
let mut all_failures = Vec::new();
for (ext, ext_files) in &by_ext {
if let Some(adapter) = adapter_for_ext(ext)
&& let CheckResult::Failures(mut f) = adapter.check(ext_files)
{
all_failures.append(&mut f);
}
}
if all_failures.is_empty() {
CheckResult::Ok
} else {
CheckResult::Failures(all_failures)
}
}
/// Update the source map at `source_map_path` with entries for `passing_files`.
///
/// Dispatches each file to the appropriate [`LanguageAdapter`] based on extension.
/// Files with unsupported extensions are silently skipped.
pub fn update_source_map(passing_files: &[&Path], source_map_path: &Path) -> Result<(), String> {
let mut by_ext: HashMap<String, Vec<&Path>> = HashMap::new();
for &file in passing_files {
if let Some(ext) = file.extension().and_then(|e| e.to_str()) {
by_ext.entry(ext.to_string()).or_default().push(file);
}
}
for (ext, ext_files) in &by_ext {
if let Some(adapter) = adapter_for_ext(ext) {
adapter.update_source_map(ext_files, source_map_path)?;
}
}
Ok(())
}
/// Update the source map for files that changed since `base_branch` in `worktree_path`.
///
/// 1. Runs `git diff --name-only {base_branch}...HEAD` in the worktree.
/// 2. Checks doc coverage for each changed file (per-file).
/// 3. Calls [`update_source_map`] with the files whose coverage check passes.
///
/// Errors are returned as `Err(String)`; callers in the spawn flow treat them as
/// non-blocking warnings.
pub fn update_for_worktree(
worktree_path: &Path,
base_branch: &str,
source_map_path: &Path,
) -> Result<(), String> {
let output = Command::new("git")
.args(["diff", "--name-only", &format!("{base_branch}...HEAD")])
.current_dir(worktree_path)
.output()
.map_err(|e| format!("git diff: {e}"))?;
if !output.status.success() {
return Err(format!(
"git diff failed: {}",
String::from_utf8_lossy(&output.stderr).trim()
));
}
let changed: Vec<PathBuf> = String::from_utf8_lossy(&output.stdout)
.lines()
.filter(|l| !l.is_empty())
.map(|l| worktree_path.join(l))
.filter(|p| p.exists())
.collect();
if changed.is_empty() {
return Ok(());
}
// Collect files that individually pass the doc check.
let passing: Vec<&Path> = changed
.iter()
.map(PathBuf::as_path)
.filter(|&p| matches!(check_files(&[p]), CheckResult::Ok))
.collect();
if passing.is_empty() {
return Ok(());
}
if let Some(parent) = source_map_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("create_dir_all: {e}"))?;
}
update_source_map(&passing, source_map_path)
}
/// Read the existing source map from `path` as a JSON object.
///
/// Returns an empty map if the file does not exist.
pub(crate) fn read_map(path: &Path) -> Result<serde_json::Map<String, serde_json::Value>, String> {
if !path.exists() {
return Ok(serde_json::Map::new());
}
let content =
std::fs::read_to_string(path).map_err(|e| format!("read {}: {e}", path.display()))?;
serde_json::from_str(&content).map_err(|e| format!("parse source map: {e}"))
}
/// Write `map` to `path` as pretty-printed JSON.
pub(crate) fn write_map(
path: &Path,
map: serde_json::Map<String, serde_json::Value>,
) -> Result<(), String> {
let content = serde_json::to_string_pretty(&serde_json::Value::Object(map))
.map_err(|e| format!("serialize: {e}"))?;
std::fs::write(path, content).map_err(|e| format!("write {}: {e}", path.display()))
}
#[cfg(test)]
mod tests {
use super::*;
use std::process::Command;
use tempfile::TempDir;
fn write_rs(dir: &std::path::Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(name);
std::fs::write(&path, content).unwrap();
path
}
fn write_ts(dir: &std::path::Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(name);
std::fs::write(&path, content).unwrap();
path
}
// --- Rust happy path ---
#[test]
fn rust_check_happy_path_ok() {
let tmp = TempDir::new().unwrap();
let path = write_rs(
tmp.path(),
"foo.rs",
"//! Module doc.\n\n/// A function.\npub fn hello() {}\n",
);
assert_eq!(check_files(&[&path]), CheckResult::Ok);
}
// --- Rust failure path ---
#[test]
fn rust_check_missing_module_doc_yields_failure() {
let tmp = TempDir::new().unwrap();
let path = write_rs(tmp.path(), "foo.rs", "/// A function.\npub fn hello() {}\n");
let result = check_files(&[&path]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "module")),
"expected module failure, got {result:?}"
);
}
#[test]
fn rust_check_missing_fn_doc_yields_failure_with_correct_fields() {
let tmp = TempDir::new().unwrap();
let path = write_rs(
tmp.path(),
"bar.rs",
"//! Module doc.\n\npub fn undocumented() {}\n",
);
let result = check_files(&[&path]);
if let CheckResult::Failures(failures) = result {
let f = failures.iter().find(|f| f.item_kind == "fn").unwrap();
assert_eq!(f.item_name, "undocumented");
assert_eq!(f.file_path, path);
assert_eq!(f.line, 3);
} else {
panic!("expected failures");
}
}
// --- TypeScript happy path ---
#[test]
fn ts_check_happy_path_ok() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/**\n * File doc.\n */\n\n/**\n * Does something.\n */\nexport function hello(): void {}\n",
);
assert_eq!(check_files(&[&path]), CheckResult::Ok);
}
// --- TypeScript failure path ---
#[test]
fn ts_check_missing_file_doc_yields_failure() {
let tmp = TempDir::new().unwrap();
let _path = write_ts(
tmp.path(),
"app.ts",
"/** A function. */\nexport function hello(): void {}\n",
);
// No file-level JSDoc (first non-empty line is not /**)
// Actually this file DOES start with /**, so let's make one that doesn't
let path2 = write_ts(tmp.path(), "app2.ts", "export function hello(): void {}\n");
let result = check_files(&[&path2]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "file")),
"expected file failure, got {result:?}"
);
}
#[test]
fn ts_check_missing_export_doc_yields_failure() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/**\n * File doc.\n */\n\nexport function undocumented(): void {}\n",
);
let result = check_files(&[&path]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "function" && f.item_name == "undocumented")),
"expected function failure, got {result:?}"
);
}
// --- Update idempotency ---
#[test]
fn update_idempotent_same_input_twice() {
let tmp = TempDir::new().unwrap();
let rs_path = write_rs(
tmp.path(),
"lib.rs",
"//! Module doc.\n\n/// A function.\npub fn foo() {}\n",
);
let map_path = tmp.path().join("source-map.json");
let files: &[&Path] = &[&rs_path];
update_source_map(files, &map_path).unwrap();
let first = std::fs::read_to_string(&map_path).unwrap();
update_source_map(files, &map_path).unwrap();
let second = std::fs::read_to_string(&map_path).unwrap();
assert_eq!(first, second, "update_source_map must be idempotent");
}
// --- update_source_map preserves other entries ---
#[test]
fn update_source_map_preserves_unrelated_entries() {
let tmp = TempDir::new().unwrap();
let map_path = tmp.path().join("source-map.json");
// Write an initial map with an unrelated entry
std::fs::write(&map_path, r#"{"unrelated/file.rs": ["fn old"]}"#).unwrap();
let rs_path = write_rs(
tmp.path(),
"new.rs",
"//! Module doc.\n\n/// A function.\npub fn bar() {}\n",
);
update_source_map(&[&rs_path], &map_path).unwrap();
let content = std::fs::read_to_string(&map_path).unwrap();
assert!(
content.contains("unrelated/file.rs"),
"old entry should be preserved"
);
assert!(content.contains("new.rs"), "new entry should be added");
}
// --- Spawn integration: update_for_worktree writes map at expected path ---
fn init_git_repo(dir: &Path) {
Command::new("git")
.args(["init"])
.current_dir(dir)
.output()
.expect("git init");
Command::new("git")
.args(["config", "user.email", "test@test.com"])
.current_dir(dir)
.output()
.expect("git config email");
Command::new("git")
.args(["config", "user.name", "Test"])
.current_dir(dir)
.output()
.expect("git config name");
Command::new("git")
.args(["commit", "--allow-empty", "-m", "init"])
.current_dir(dir)
.output()
.expect("initial commit");
}
#[test]
fn spawn_integration_map_written_at_expected_path() {
let tmp = TempDir::new().unwrap();
init_git_repo(tmp.path());
// Add a well-documented Rust file and commit it
let rs_path = write_rs(
tmp.path(),
"lib.rs",
"//! Module doc.\n\n/// A function.\npub fn greet() {}\n",
);
Command::new("git")
.args(["add", "lib.rs"])
.current_dir(tmp.path())
.output()
.expect("git add");
Command::new("git")
.args(["commit", "-m", "add lib.rs"])
.current_dir(tmp.path())
.output()
.expect("git commit");
let huskies_dir = tmp.path().join(".huskies");
std::fs::create_dir_all(&huskies_dir).unwrap();
let map_path = huskies_dir.join("source-map.json");
// Simulate what spawn does: update_for_worktree with base = initial commit
let result = update_for_worktree(tmp.path(), "HEAD~1", &map_path);
assert!(
result.is_ok(),
"update_for_worktree failed: {:?}",
result.err()
);
// The map file must exist at the expected path
assert!(
map_path.exists(),
"source map must be written at .huskies/source-map.json"
);
let content = std::fs::read_to_string(&map_path).unwrap();
let _ = rs_path; // used above
assert!(
content.contains("lib.rs"),
"map must contain the documented file"
);
assert!(
content.contains("fn greet"),
"map must list the documented function"
);
}
}
+272
View File
@@ -0,0 +1,272 @@
//! Rust documentation coverage adapter.
//!
//! Checks for:
//! - A `//!` module-level doc comment somewhere in every `.rs` file.
//! - A `///` doc comment immediately before every `pub` item (`fn`, `struct`,
//! `enum`, `trait`, `type`, `const`, `static`, `mod`).
use std::fs;
use std::path::Path;
use crate::{CheckFailure, CheckResult, LanguageAdapter};
/// Rust documentation coverage adapter.
pub struct RustAdapter;
impl RustAdapter {
fn check_file(&self, path: &Path) -> Vec<CheckFailure> {
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => return vec![],
};
let lines: Vec<&str> = content.lines().collect();
let mut failures = Vec::new();
// Module-level doc comment (//!)
if !lines.iter().any(|l| l.trim_start().starts_with("//!")) {
failures.push(CheckFailure {
file_path: path.to_path_buf(),
line: 1,
item_kind: "module".to_string(),
item_name: module_name(path),
});
}
// Public items missing /// doc comments
for (i, &line) in lines.iter().enumerate() {
if let Some((kind, name)) = parse_pub_item(line)
&& !has_doc_before(&lines, i)
{
failures.push(CheckFailure {
file_path: path.to_path_buf(),
line: i + 1,
item_kind: kind,
item_name: name,
});
}
}
failures
}
/// Extract public item signatures from a Rust file as `"kind name"` strings.
pub(crate) fn extract_items(path: &Path) -> Vec<String> {
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => return vec![],
};
content
.lines()
.filter_map(|line| {
let (kind, name) = parse_pub_item(line)?;
Some(format!("{kind} {name}"))
})
.collect()
}
}
impl LanguageAdapter for RustAdapter {
fn check(&self, files: &[&Path]) -> CheckResult {
let failures: Vec<CheckFailure> = files.iter().flat_map(|&f| self.check_file(f)).collect();
if failures.is_empty() {
CheckResult::Ok
} else {
CheckResult::Failures(failures)
}
}
fn update_source_map(
&self,
passing_files: &[&Path],
source_map_path: &Path,
) -> Result<(), String> {
let mut map = crate::read_map(source_map_path)?;
for &file in passing_files {
let key = file.to_string_lossy().to_string();
let items: Vec<serde_json::Value> = Self::extract_items(file)
.into_iter()
.map(serde_json::Value::String)
.collect();
map.insert(key, serde_json::Value::Array(items));
}
crate::write_map(source_map_path, map)
}
}
fn module_name(path: &Path) -> String {
path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown")
.to_string()
}
/// Parse a line as a public Rust item declaration.
///
/// Returns `(kind, name)` if the line declares a public item, `None` otherwise.
fn parse_pub_item(line: &str) -> Option<(String, String)> {
let trimmed = line.trim();
// Strip visibility: "pub(…)" or "pub "
let rest = if let Some(r) = trimmed.strip_prefix("pub(") {
let end = r.find(')')?;
r[end + 1..].trim_start()
} else if let Some(r) = trimmed.strip_prefix("pub ") {
r.trim_start()
} else {
return None;
};
// Handle "async fn"
let rest = if let Some(r) = rest.strip_prefix("async ") {
r.trim_start()
} else {
rest
};
// Match item keyword and extract name part
let (kind, name_part) = if let Some(r) = rest.strip_prefix("fn ") {
("fn", r.trim_start())
} else if let Some(r) = rest.strip_prefix("struct ") {
("struct", r.trim_start())
} else if let Some(r) = rest.strip_prefix("enum ") {
("enum", r.trim_start())
} else if let Some(r) = rest.strip_prefix("trait ") {
("trait", r.trim_start())
} else if let Some(r) = rest.strip_prefix("type ") {
("type", r.trim_start())
} else if let Some(r) = rest.strip_prefix("const ") {
("const", r.trim_start())
} else if let Some(r) = rest.strip_prefix("static ") {
("static", r.trim_start())
} else if let Some(r) = rest.strip_prefix("mod ") {
("mod", r.trim_start())
} else {
return None;
};
let name: String = name_part
.chars()
.take_while(|&c| c.is_alphanumeric() || c == '_')
.collect();
if name.is_empty() {
return None;
}
Some((kind.to_string(), name))
}
/// Return `true` if a `///` doc comment appears before the item at `item_idx`.
///
/// Scans backward from `item_idx`, skipping blank lines and `#[…]` attribute
/// lines. Returns `true` if the first substantive line is a `///` comment.
fn has_doc_before(lines: &[&str], item_idx: usize) -> bool {
let mut i = item_idx;
while i > 0 {
i -= 1;
let line = lines[i].trim();
if line.starts_with("///") {
return true;
}
if line.starts_with("#[") || line.starts_with("#![") || line.is_empty() {
continue;
}
break;
}
false
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
fn write_rs(dir: &Path, name: &str, content: &str) -> std::path::PathBuf {
let path = dir.join(name);
std::fs::write(&path, content).unwrap();
path
}
#[test]
fn check_fully_documented_file_returns_ok() {
let tmp = TempDir::new().unwrap();
let path = write_rs(
tmp.path(),
"lib.rs",
"//! Module doc.\n\n/// A function.\npub fn hello() {}\n\n/// A struct.\npub struct Foo;\n",
);
let adapter = RustAdapter;
assert_eq!(adapter.check(&[&path]), CheckResult::Ok);
}
#[test]
fn check_detects_missing_module_doc() {
let tmp = TempDir::new().unwrap();
let path = write_rs(tmp.path(), "lib.rs", "/// A function.\npub fn hello() {}\n");
let adapter = RustAdapter;
let result = adapter.check(&[&path]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "module")),
"expected module failure, got {result:?}"
);
}
#[test]
fn check_detects_missing_fn_doc_with_correct_fields() {
let tmp = TempDir::new().unwrap();
let path = write_rs(tmp.path(), "bar.rs", "//! Module.\n\npub fn no_doc() {}\n");
let adapter = RustAdapter;
let result = adapter.check(&[&path]);
if let CheckResult::Failures(failures) = result {
let f = failures.iter().find(|f| f.item_kind == "fn").unwrap();
assert_eq!(f.item_name, "no_doc");
assert_eq!(f.line, 3);
assert_eq!(f.file_path, path);
} else {
panic!("expected failures");
}
}
#[test]
fn check_passes_item_with_attribute_before_doc() {
let tmp = TempDir::new().unwrap();
// Attribute between doc and item is fine; doc between attribute and item is fine too
let path = write_rs(
tmp.path(),
"lib.rs",
"//! Module.\n\n/// Doc.\n#[derive(Debug)]\npub struct Foo;\n",
);
let adapter = RustAdapter;
assert_eq!(adapter.check(&[&path]), CheckResult::Ok);
}
#[test]
fn parse_pub_item_recognises_various_kinds() {
assert_eq!(
parse_pub_item("pub fn foo()"),
Some(("fn".into(), "foo".into()))
);
assert_eq!(
parse_pub_item(" pub async fn bar()"),
Some(("fn".into(), "bar".into()))
);
assert_eq!(
parse_pub_item("pub struct Baz"),
Some(("struct".into(), "Baz".into()))
);
assert_eq!(
parse_pub_item("pub enum Qux"),
Some(("enum".into(), "Qux".into()))
);
assert_eq!(
parse_pub_item("pub trait MyTrait"),
Some(("trait".into(), "MyTrait".into()))
);
assert_eq!(
parse_pub_item("pub(crate) fn inner()"),
Some(("fn".into(), "inner".into()))
);
assert_eq!(parse_pub_item("fn private()"), None);
assert_eq!(parse_pub_item("let x = 1;"), None);
}
}
+293
View File
@@ -0,0 +1,293 @@
//! TypeScript documentation coverage adapter.
//!
//! Checks for:
//! - A leading file-level JSDoc comment (`/** … */`) at the top of every
//! `.ts` / `.tsx` file.
//! - A JSDoc comment before every exported declaration (`export function`,
//! `export class`, `export type`, `export interface`, `export const`, etc.).
use std::fs;
use std::path::Path;
use crate::{CheckFailure, CheckResult, LanguageAdapter};
/// TypeScript documentation coverage adapter.
pub struct TypeScriptAdapter;
impl TypeScriptAdapter {
fn check_file(&self, path: &Path) -> Vec<CheckFailure> {
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => return vec![],
};
let lines: Vec<&str> = content.lines().collect();
let mut failures = Vec::new();
// File-level JSDoc: first non-empty line must start with "/**"
if !has_file_level_jsdoc(&content) {
failures.push(CheckFailure {
file_path: path.to_path_buf(),
line: 1,
item_kind: "file".to_string(),
item_name: file_stem(path),
});
}
// Exported items missing JSDoc
for (i, &line) in lines.iter().enumerate() {
if let Some((kind, name)) = parse_exported_item(line)
&& !has_jsdoc_before(&lines, i)
{
failures.push(CheckFailure {
file_path: path.to_path_buf(),
line: i + 1,
item_kind: kind,
item_name: name,
});
}
}
failures
}
/// Extract exported item signatures from a TypeScript file as `"kind name"` strings.
pub(crate) fn extract_items(path: &Path) -> Vec<String> {
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => return vec![],
};
content
.lines()
.filter_map(|line| {
let (kind, name) = parse_exported_item(line)?;
Some(format!("{kind} {name}"))
})
.collect()
}
}
impl LanguageAdapter for TypeScriptAdapter {
fn check(&self, files: &[&Path]) -> CheckResult {
let failures: Vec<CheckFailure> = files.iter().flat_map(|&f| self.check_file(f)).collect();
if failures.is_empty() {
CheckResult::Ok
} else {
CheckResult::Failures(failures)
}
}
fn update_source_map(
&self,
passing_files: &[&Path],
source_map_path: &Path,
) -> Result<(), String> {
let mut map = crate::read_map(source_map_path)?;
for &file in passing_files {
let key = file.to_string_lossy().to_string();
let items: Vec<serde_json::Value> = Self::extract_items(file)
.into_iter()
.map(serde_json::Value::String)
.collect();
map.insert(key, serde_json::Value::Array(items));
}
crate::write_map(source_map_path, map)
}
}
fn file_stem(path: &Path) -> String {
path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown")
.to_string()
}
/// Return `true` if the file starts with a JSDoc block comment (`/**`).
fn has_file_level_jsdoc(content: &str) -> bool {
for line in content.lines() {
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
return trimmed.starts_with("/**");
}
false
}
/// Parse a line as an exported TypeScript declaration.
///
/// Returns `(kind, name)` for supported export forms, `None` otherwise.
fn parse_exported_item(line: &str) -> Option<(String, String)> {
let trimmed = line.trim();
// Strip "export default" or "export"
let rest = if let Some(r) = trimmed.strip_prefix("export default ") {
r.trim_start()
} else if let Some(r) = trimmed.strip_prefix("export ") {
r.trim_start()
} else {
return None;
};
// Strip optional "async"
let rest = if let Some(r) = rest.strip_prefix("async ") {
r.trim_start()
} else {
rest
};
let (kind, name_part) = if let Some(r) = rest.strip_prefix("function ") {
("function", r.trim_start())
} else if let Some(r) = rest.strip_prefix("class ") {
("class", r.trim_start())
} else if let Some(r) = rest.strip_prefix("type ") {
("type", r.trim_start())
} else if let Some(r) = rest.strip_prefix("interface ") {
("interface", r.trim_start())
} else if let Some(r) = rest.strip_prefix("const ") {
("const", r.trim_start())
} else if let Some(r) = rest.strip_prefix("let ") {
("let", r.trim_start())
} else if let Some(r) = rest.strip_prefix("enum ") {
("enum", r.trim_start())
} else {
return None;
};
let name: String = name_part
.chars()
.take_while(|&c| c.is_alphanumeric() || c == '_')
.collect();
if name.is_empty() {
// "export default function() {}" — anonymous default export
return Some((kind.to_string(), "default".to_string()));
}
Some((kind.to_string(), name))
}
/// Return `true` if a JSDoc comment appears before the item at `item_idx`.
///
/// Scans backward, skipping blank lines and decorator lines (`@…`). Returns
/// `true` if the first substantive line ends with `*/` (closing a JSDoc block)
/// or starts with `/**` (single-line JSDoc).
fn has_jsdoc_before(lines: &[&str], item_idx: usize) -> bool {
let mut i = item_idx;
while i > 0 {
i -= 1;
let line = lines[i].trim();
if line.is_empty() {
continue;
}
if line.starts_with('@') {
// Decorator — keep scanning upward
continue;
}
return line.ends_with("*/") || line.starts_with("/**");
}
false
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
fn write_ts(dir: &Path, name: &str, content: &str) -> std::path::PathBuf {
let path = dir.join(name);
std::fs::write(&path, content).unwrap();
path
}
#[test]
fn check_fully_documented_file_returns_ok() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/**\n * File doc.\n */\n\n/** Does something. */\nexport function hello(): void {}\n",
);
let adapter = TypeScriptAdapter;
assert_eq!(adapter.check(&[&path]), CheckResult::Ok);
}
#[test]
fn check_detects_missing_file_jsdoc() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/** Does something. */\nexport function hello(): void {}\n",
);
// First non-empty line IS "/**", so this file passes the file-level check.
// Use a file that starts with code instead.
let path2 = write_ts(
tmp.path(),
"app2.ts",
"import { foo } from './foo';\n/** A function. */\nexport function hello(): void {}\n",
);
let adapter = TypeScriptAdapter;
let result = adapter.check(&[&path2]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "file")),
"expected file failure, got {result:?}"
);
// The first file (starts with /**) should pass the file-level check
let result2 = adapter.check(&[&path]);
// It may still fail on the export if there's no separate export doc,
// but the file-level check itself should pass (first line is /**)
assert!(
!matches!(&result2, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "file")),
"file starting with /** should not have file-level failure"
);
}
#[test]
fn check_detects_missing_export_jsdoc_with_correct_fields() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/**\n * File doc.\n */\n\nexport function undocumented(): void {}\n",
);
let adapter = TypeScriptAdapter;
let result = adapter.check(&[&path]);
if let CheckResult::Failures(failures) = result {
let f = failures.iter().find(|f| f.item_kind == "function").unwrap();
assert_eq!(f.item_name, "undocumented");
assert_eq!(f.file_path, path);
} else {
panic!("expected failures");
}
}
#[test]
fn parse_exported_item_recognises_various_kinds() {
assert_eq!(
parse_exported_item("export function foo()"),
Some(("function".into(), "foo".into()))
);
assert_eq!(
parse_exported_item("export async function bar()"),
Some(("function".into(), "bar".into()))
);
assert_eq!(
parse_exported_item("export class Baz"),
Some(("class".into(), "Baz".into()))
);
assert_eq!(
parse_exported_item("export type Qux = string;"),
Some(("type".into(), "Qux".into()))
);
assert_eq!(
parse_exported_item("export interface IFoo"),
Some(("interface".into(), "IFoo".into()))
);
assert_eq!(
parse_exported_item("export const MY_CONST = 1;"),
Some(("const".into(), "MY_CONST".into()))
);
assert_eq!(parse_exported_item("function notExported()"), None);
assert_eq!(parse_exported_item("const x = 1;"), None);
}
}
+1
View File
@@ -41,6 +41,7 @@ libsqlite3-sys = { version = "0.35.0", features = ["bundled"] }
sqlx = { workspace = true }
wait-timeout = "0.2.1"
bft-json-crdt = { path = "../crates/bft-json-crdt", default-features = false, features = ["bft"] }
source-map-gen = { path = "../crates/source-map-gen" }
ed25519-dalek = { version = "2", features = ["rand_core"] }
fastcrypto = "0.1.8"
rand = "0.8"
+25
View File
@@ -102,6 +102,23 @@ pub(super) async fn run_agent_spawn(
}
};
// Step 1.5: Update the source map for changed files since master.
// Non-blocking — failures are logged but do not gate the spawn.
{
let wt_path_for_map = wt_info.path.clone();
let base_for_map = wt_info.base_branch.clone();
let map_path = project_root_clone.join(".huskies").join("source-map.json");
match tokio::task::spawn_blocking(move || {
source_map_gen::update_for_worktree(&wt_path_for_map, &base_for_map, &map_path)
})
.await
.unwrap_or_else(|e| Err(e.to_string()))
{
Ok(()) => {}
Err(e) => slog_error!("[agents] source map update for {sid}: {e}"),
}
}
// Step 2: store worktree info and render agent command/args/prompt.
let wt_path_str = wt_info.path.to_string_lossy().to_string();
{
@@ -151,6 +168,14 @@ pub(super) async fn run_agent_spawn(
prompt.push_str(&local);
}
// Append a reference to the source map if the file was written.
let source_map_path = project_root_clone.join(".huskies").join("source-map.json");
if source_map_path.exists() {
prompt.push_str(
"\n\nA source map of well-documented changed files is at `.huskies/source-map.json`.",
);
}
// Build the effective prompt and determine resume session.
//
// When resuming a previous session, discard the full rendered prompt