huskies: merge 819

This commit is contained in:
dave
2026-04-28 20:22:22 +00:00
parent b060d8fc88
commit f5ab75ecaa
8 changed files with 1056 additions and 1 deletions
+442
View File
@@ -0,0 +1,442 @@
//! LLM-friendly source map generation and documentation coverage checking.
//!
//! Provides a [`LanguageAdapter`] trait that language-specific adapters implement,
//! plus top-level dispatcher functions that route to the right adapter based on file
//! extension (`.rs` → [`RustAdapter`], `.ts`/`.tsx` → [`TypeScriptAdapter`]).
//!
//! The entry point for agent spawn integration is [`update_for_worktree`], which
//! runs `git diff --name-only` to find changed files and updates the source map for
//! those that pass the documentation coverage check.
mod rust_adapter;
mod ts_adapter;
pub use rust_adapter::RustAdapter;
pub use ts_adapter::TypeScriptAdapter;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::process::Command;
/// A missing documentation failure for a single public item.
#[derive(Debug, Clone, PartialEq)]
pub struct CheckFailure {
/// Path to the file containing the undocumented item.
pub file_path: PathBuf,
/// 1-based line number of the item declaration.
pub line: usize,
/// Kind of item (e.g. `"fn"`, `"struct"`, `"module"`).
pub item_kind: String,
/// Name of the item.
pub item_name: String,
}
impl CheckFailure {
/// Returns a human-readable direction a coding agent can act on directly.
pub fn to_direction(&self) -> String {
format!(
"{}:{}: add a doc comment to {} `{}`",
self.file_path.display(),
self.line,
self.item_kind,
self.item_name
)
}
}
/// Result of a documentation coverage check.
#[derive(Debug, Clone, PartialEq)]
pub enum CheckResult {
/// All checked items are documented.
Ok,
/// One or more items are missing documentation.
Failures(Vec<CheckFailure>),
}
/// Language-specific adapter for doc-coverage checking and source map generation.
pub trait LanguageAdapter {
/// Check documentation coverage for `files`.
///
/// Returns [`CheckResult::Ok`] when every public item in every file has a doc
/// comment, or [`CheckResult::Failures`] listing each undocumented item as a
/// direction the coding agent can act on.
fn check(&self, files: &[&Path]) -> CheckResult;
/// Update the source map at `source_map_path` with entries for `passing_files`.
///
/// Reads the existing map, updates only the entries for the provided files, and
/// writes back. Entries for files not in `passing_files` are preserved unchanged.
/// Running twice with the same input produces identical file content (idempotent).
fn update_source_map(
&self,
passing_files: &[&Path],
source_map_path: &Path,
) -> Result<(), String>;
}
/// Returns the adapter for the given file extension, or `None` if unsupported.
fn adapter_for_ext(ext: &str) -> Option<Box<dyn LanguageAdapter>> {
match ext {
"rs" => Some(Box::new(RustAdapter)),
"ts" | "tsx" => Some(Box::new(TypeScriptAdapter)),
_ => None,
}
}
/// Check documentation coverage for a mixed list of files.
///
/// Dispatches each file to the appropriate [`LanguageAdapter`] based on its
/// extension. Files with unsupported extensions are silently skipped.
pub fn check_files(files: &[&Path]) -> CheckResult {
let mut by_ext: HashMap<String, Vec<&Path>> = HashMap::new();
for &file in files {
if let Some(ext) = file.extension().and_then(|e| e.to_str()) {
by_ext.entry(ext.to_string()).or_default().push(file);
}
}
let mut all_failures = Vec::new();
for (ext, ext_files) in &by_ext {
if let Some(adapter) = adapter_for_ext(ext)
&& let CheckResult::Failures(mut f) = adapter.check(ext_files)
{
all_failures.append(&mut f);
}
}
if all_failures.is_empty() {
CheckResult::Ok
} else {
CheckResult::Failures(all_failures)
}
}
/// Update the source map at `source_map_path` with entries for `passing_files`.
///
/// Dispatches each file to the appropriate [`LanguageAdapter`] based on extension.
/// Files with unsupported extensions are silently skipped.
pub fn update_source_map(passing_files: &[&Path], source_map_path: &Path) -> Result<(), String> {
let mut by_ext: HashMap<String, Vec<&Path>> = HashMap::new();
for &file in passing_files {
if let Some(ext) = file.extension().and_then(|e| e.to_str()) {
by_ext.entry(ext.to_string()).or_default().push(file);
}
}
for (ext, ext_files) in &by_ext {
if let Some(adapter) = adapter_for_ext(ext) {
adapter.update_source_map(ext_files, source_map_path)?;
}
}
Ok(())
}
/// Update the source map for files that changed since `base_branch` in `worktree_path`.
///
/// 1. Runs `git diff --name-only {base_branch}...HEAD` in the worktree.
/// 2. Checks doc coverage for each changed file (per-file).
/// 3. Calls [`update_source_map`] with the files whose coverage check passes.
///
/// Errors are returned as `Err(String)`; callers in the spawn flow treat them as
/// non-blocking warnings.
pub fn update_for_worktree(
worktree_path: &Path,
base_branch: &str,
source_map_path: &Path,
) -> Result<(), String> {
let output = Command::new("git")
.args(["diff", "--name-only", &format!("{base_branch}...HEAD")])
.current_dir(worktree_path)
.output()
.map_err(|e| format!("git diff: {e}"))?;
if !output.status.success() {
return Err(format!(
"git diff failed: {}",
String::from_utf8_lossy(&output.stderr).trim()
));
}
let changed: Vec<PathBuf> = String::from_utf8_lossy(&output.stdout)
.lines()
.filter(|l| !l.is_empty())
.map(|l| worktree_path.join(l))
.filter(|p| p.exists())
.collect();
if changed.is_empty() {
return Ok(());
}
// Collect files that individually pass the doc check.
let passing: Vec<&Path> = changed
.iter()
.map(PathBuf::as_path)
.filter(|&p| matches!(check_files(&[p]), CheckResult::Ok))
.collect();
if passing.is_empty() {
return Ok(());
}
if let Some(parent) = source_map_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("create_dir_all: {e}"))?;
}
update_source_map(&passing, source_map_path)
}
/// Read the existing source map from `path` as a JSON object.
///
/// Returns an empty map if the file does not exist.
pub(crate) fn read_map(path: &Path) -> Result<serde_json::Map<String, serde_json::Value>, String> {
if !path.exists() {
return Ok(serde_json::Map::new());
}
let content =
std::fs::read_to_string(path).map_err(|e| format!("read {}: {e}", path.display()))?;
serde_json::from_str(&content).map_err(|e| format!("parse source map: {e}"))
}
/// Write `map` to `path` as pretty-printed JSON.
pub(crate) fn write_map(
path: &Path,
map: serde_json::Map<String, serde_json::Value>,
) -> Result<(), String> {
let content = serde_json::to_string_pretty(&serde_json::Value::Object(map))
.map_err(|e| format!("serialize: {e}"))?;
std::fs::write(path, content).map_err(|e| format!("write {}: {e}", path.display()))
}
#[cfg(test)]
mod tests {
use super::*;
use std::process::Command;
use tempfile::TempDir;
fn write_rs(dir: &std::path::Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(name);
std::fs::write(&path, content).unwrap();
path
}
fn write_ts(dir: &std::path::Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(name);
std::fs::write(&path, content).unwrap();
path
}
// --- Rust happy path ---
#[test]
fn rust_check_happy_path_ok() {
let tmp = TempDir::new().unwrap();
let path = write_rs(
tmp.path(),
"foo.rs",
"//! Module doc.\n\n/// A function.\npub fn hello() {}\n",
);
assert_eq!(check_files(&[&path]), CheckResult::Ok);
}
// --- Rust failure path ---
#[test]
fn rust_check_missing_module_doc_yields_failure() {
let tmp = TempDir::new().unwrap();
let path = write_rs(tmp.path(), "foo.rs", "/// A function.\npub fn hello() {}\n");
let result = check_files(&[&path]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "module")),
"expected module failure, got {result:?}"
);
}
#[test]
fn rust_check_missing_fn_doc_yields_failure_with_correct_fields() {
let tmp = TempDir::new().unwrap();
let path = write_rs(
tmp.path(),
"bar.rs",
"//! Module doc.\n\npub fn undocumented() {}\n",
);
let result = check_files(&[&path]);
if let CheckResult::Failures(failures) = result {
let f = failures.iter().find(|f| f.item_kind == "fn").unwrap();
assert_eq!(f.item_name, "undocumented");
assert_eq!(f.file_path, path);
assert_eq!(f.line, 3);
} else {
panic!("expected failures");
}
}
// --- TypeScript happy path ---
#[test]
fn ts_check_happy_path_ok() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/**\n * File doc.\n */\n\n/**\n * Does something.\n */\nexport function hello(): void {}\n",
);
assert_eq!(check_files(&[&path]), CheckResult::Ok);
}
// --- TypeScript failure path ---
#[test]
fn ts_check_missing_file_doc_yields_failure() {
let tmp = TempDir::new().unwrap();
let _path = write_ts(
tmp.path(),
"app.ts",
"/** A function. */\nexport function hello(): void {}\n",
);
// No file-level JSDoc (first non-empty line is not /**)
// Actually this file DOES start with /**, so let's make one that doesn't
let path2 = write_ts(tmp.path(), "app2.ts", "export function hello(): void {}\n");
let result = check_files(&[&path2]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "file")),
"expected file failure, got {result:?}"
);
}
#[test]
fn ts_check_missing_export_doc_yields_failure() {
let tmp = TempDir::new().unwrap();
let path = write_ts(
tmp.path(),
"app.ts",
"/**\n * File doc.\n */\n\nexport function undocumented(): void {}\n",
);
let result = check_files(&[&path]);
assert!(
matches!(&result, CheckResult::Failures(v) if v.iter().any(|f| f.item_kind == "function" && f.item_name == "undocumented")),
"expected function failure, got {result:?}"
);
}
// --- Update idempotency ---
#[test]
fn update_idempotent_same_input_twice() {
let tmp = TempDir::new().unwrap();
let rs_path = write_rs(
tmp.path(),
"lib.rs",
"//! Module doc.\n\n/// A function.\npub fn foo() {}\n",
);
let map_path = tmp.path().join("source-map.json");
let files: &[&Path] = &[&rs_path];
update_source_map(files, &map_path).unwrap();
let first = std::fs::read_to_string(&map_path).unwrap();
update_source_map(files, &map_path).unwrap();
let second = std::fs::read_to_string(&map_path).unwrap();
assert_eq!(first, second, "update_source_map must be idempotent");
}
// --- update_source_map preserves other entries ---
#[test]
fn update_source_map_preserves_unrelated_entries() {
let tmp = TempDir::new().unwrap();
let map_path = tmp.path().join("source-map.json");
// Write an initial map with an unrelated entry
std::fs::write(&map_path, r#"{"unrelated/file.rs": ["fn old"]}"#).unwrap();
let rs_path = write_rs(
tmp.path(),
"new.rs",
"//! Module doc.\n\n/// A function.\npub fn bar() {}\n",
);
update_source_map(&[&rs_path], &map_path).unwrap();
let content = std::fs::read_to_string(&map_path).unwrap();
assert!(
content.contains("unrelated/file.rs"),
"old entry should be preserved"
);
assert!(content.contains("new.rs"), "new entry should be added");
}
// --- Spawn integration: update_for_worktree writes map at expected path ---
fn init_git_repo(dir: &Path) {
Command::new("git")
.args(["init"])
.current_dir(dir)
.output()
.expect("git init");
Command::new("git")
.args(["config", "user.email", "test@test.com"])
.current_dir(dir)
.output()
.expect("git config email");
Command::new("git")
.args(["config", "user.name", "Test"])
.current_dir(dir)
.output()
.expect("git config name");
Command::new("git")
.args(["commit", "--allow-empty", "-m", "init"])
.current_dir(dir)
.output()
.expect("initial commit");
}
#[test]
fn spawn_integration_map_written_at_expected_path() {
let tmp = TempDir::new().unwrap();
init_git_repo(tmp.path());
// Add a well-documented Rust file and commit it
let rs_path = write_rs(
tmp.path(),
"lib.rs",
"//! Module doc.\n\n/// A function.\npub fn greet() {}\n",
);
Command::new("git")
.args(["add", "lib.rs"])
.current_dir(tmp.path())
.output()
.expect("git add");
Command::new("git")
.args(["commit", "-m", "add lib.rs"])
.current_dir(tmp.path())
.output()
.expect("git commit");
let huskies_dir = tmp.path().join(".huskies");
std::fs::create_dir_all(&huskies_dir).unwrap();
let map_path = huskies_dir.join("source-map.json");
// Simulate what spawn does: update_for_worktree with base = initial commit
let result = update_for_worktree(tmp.path(), "HEAD~1", &map_path);
assert!(
result.is_ok(),
"update_for_worktree failed: {:?}",
result.err()
);
// The map file must exist at the expected path
assert!(
map_path.exists(),
"source map must be written at .huskies/source-map.json"
);
let content = std::fs::read_to_string(&map_path).unwrap();
let _ = rs_path; // used above
assert!(
content.contains("lib.rs"),
"map must contain the documented file"
);
assert!(
content.contains("fn greet"),
"map must list the documented function"
);
}
}