refactor: split http/mcp/diagnostics.rs (861) into mod + permission + usage
The 861-line diagnostics.rs is split: - permission.rs: tool_prompt_permission + helpers + their tests (584 lines) - usage.rs: tool_get_token_usage + tests (122 lines) - mod.rs: server_logs, rebuild, version, loc_file, dump_crdt, move_story + tests (185 lines) Tests stay co-located. The bigger sub-modules (permission at 584 with tests mostly under 800; usage at 122) are well within the 800-line guide. Also added #[allow(unused_imports)] to two now-pedantic re-exports in service/diagnostics/mod.rs that the split made flag. All 2636 tests pass; clippy clean.
This commit is contained in:
@@ -0,0 +1,179 @@
|
|||||||
|
//! MCP diagnostic tools — server logs, CRDT dump, version, line counting, story movement.
|
||||||
|
|
||||||
|
use crate::agents::move_story_to_stage;
|
||||||
|
use crate::http::context::AppContext;
|
||||||
|
use crate::log_buffer;
|
||||||
|
use crate::slog;
|
||||||
|
use serde_json::{Value, json};
|
||||||
|
|
||||||
|
mod permission;
|
||||||
|
mod usage;
|
||||||
|
|
||||||
|
pub(crate) use permission::tool_prompt_permission;
|
||||||
|
pub(crate) use usage::tool_get_token_usage;
|
||||||
|
|
||||||
|
pub(crate) fn tool_get_server_logs(args: &Value) -> Result<String, String> {
|
||||||
|
let lines_count = args
|
||||||
|
.get("lines")
|
||||||
|
.and_then(|v| v.as_u64())
|
||||||
|
.map(|n| n.min(1000) as usize)
|
||||||
|
.unwrap_or(100);
|
||||||
|
let filter = args.get("filter").and_then(|v| v.as_str());
|
||||||
|
let severity = args
|
||||||
|
.get("severity")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.and_then(log_buffer::LogLevel::from_str_ci);
|
||||||
|
|
||||||
|
let recent = log_buffer::global().get_recent(lines_count, filter, severity.as_ref());
|
||||||
|
let joined = recent.join("\n");
|
||||||
|
// Clamp to lines_count actual lines in case any entry contains embedded newlines.
|
||||||
|
let all_lines: Vec<&str> = joined.lines().collect();
|
||||||
|
let start = all_lines.len().saturating_sub(lines_count);
|
||||||
|
Ok(all_lines[start..].join("\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Rebuild the server binary and re-exec (delegates to `crate::rebuild`).
|
||||||
|
pub(crate) async fn tool_rebuild_and_restart(ctx: &AppContext) -> Result<String, String> {
|
||||||
|
slog!("[rebuild] Rebuild and restart requested via MCP tool");
|
||||||
|
|
||||||
|
// Signal the Matrix bot (if active) so it can send its own shutdown
|
||||||
|
// announcement before the process is replaced. Best-effort: we wait up
|
||||||
|
// to 1.5 s for the message to be delivered.
|
||||||
|
if let Some(ref tx) = ctx.matrix_shutdown_tx {
|
||||||
|
let _ = tx.send(Some(crate::rebuild::ShutdownReason::Rebuild));
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(1500)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let project_root = ctx.state.get_project_root().unwrap_or_default();
|
||||||
|
let notifier = ctx.bot_shutdown.as_deref();
|
||||||
|
crate::rebuild::rebuild_and_restart(&ctx.services.agents, &project_root, notifier).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP tool called by Claude Code via `--permission-prompt-tool`.
|
||||||
|
///
|
||||||
|
/// Forwards the permission request through the shared channel to the active
|
||||||
|
/// WebSocket session, which presents a dialog to the user. Blocks until the
|
||||||
|
/// user approves or denies (with a 5-minute timeout).
|
||||||
|
pub(crate) fn tool_move_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||||
|
let story_id = args
|
||||||
|
.get("story_id")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.ok_or("Missing required argument: story_id")?;
|
||||||
|
let target_stage = args
|
||||||
|
.get("target_stage")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.ok_or("Missing required argument: target_stage")?;
|
||||||
|
|
||||||
|
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
||||||
|
|
||||||
|
let (from_stage, to_stage) = move_story_to_stage(&project_root, story_id, target_stage)?;
|
||||||
|
|
||||||
|
serde_json::to_string_pretty(&json!({
|
||||||
|
"story_id": story_id,
|
||||||
|
"from_stage": from_stage,
|
||||||
|
"to_stage": to_stage,
|
||||||
|
"message": format!("Work item '{story_id}' moved from '{from_stage}' to '{to_stage}'.")
|
||||||
|
}))
|
||||||
|
.map_err(|e| format!("Serialization error: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP tool: dump the raw in-memory CRDT state for debugging.
|
||||||
|
///
|
||||||
|
/// **Debug tool only** — for normal pipeline introspection use `get_pipeline_status`.
|
||||||
|
pub(crate) fn tool_dump_crdt(args: &Value) -> Result<String, String> {
|
||||||
|
let story_id_filter = args.get("story_id").and_then(|v| v.as_str());
|
||||||
|
let dump = crate::crdt_state::dump_crdt_state(story_id_filter);
|
||||||
|
|
||||||
|
let items: Vec<Value> = dump
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(|item| {
|
||||||
|
json!({
|
||||||
|
"story_id": item.story_id,
|
||||||
|
"stage": item.stage,
|
||||||
|
"name": item.name,
|
||||||
|
"agent": item.agent,
|
||||||
|
"retry_count": item.retry_count,
|
||||||
|
"blocked": item.blocked,
|
||||||
|
"depends_on": item.depends_on,
|
||||||
|
"claimed_by": item.claimed_by,
|
||||||
|
"claimed_at": item.claimed_at,
|
||||||
|
"content_index": item.content_index,
|
||||||
|
"is_deleted": item.is_deleted,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
serde_json::to_string_pretty(&json!({
|
||||||
|
"metadata": {
|
||||||
|
"in_memory_state_loaded": dump.in_memory_state_loaded,
|
||||||
|
"total_items": dump.total_items,
|
||||||
|
"total_ops_in_list": dump.total_ops_in_list,
|
||||||
|
"max_seq_in_list": dump.max_seq_in_list,
|
||||||
|
"persisted_ops_count": dump.persisted_ops_count,
|
||||||
|
"pending_persist_ops_count": null,
|
||||||
|
},
|
||||||
|
"items": items,
|
||||||
|
}))
|
||||||
|
.map_err(|e| format!("Serialization error: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP tool: return the server version, build hash, and running port.
|
||||||
|
pub(crate) fn tool_get_version(ctx: &AppContext) -> Result<String, String> {
|
||||||
|
let build_hash =
|
||||||
|
std::fs::read_to_string(".huskies/build_hash").unwrap_or_else(|_| "unknown".to_string());
|
||||||
|
serde_json::to_string_pretty(&json!({
|
||||||
|
"version": env!("CARGO_PKG_VERSION"),
|
||||||
|
"build_hash": build_hash.trim(),
|
||||||
|
"port": ctx.services.agents.port(),
|
||||||
|
}))
|
||||||
|
.map_err(|e| format!("Serialization error: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP tool: count lines in a specific file relative to the project root.
|
||||||
|
pub(crate) fn tool_loc_file(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||||
|
let file_path = args
|
||||||
|
.get("file_path")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.ok_or_else(|| "Missing required argument: file_path".to_string())?;
|
||||||
|
|
||||||
|
let project_root = ctx.state.get_project_root()?;
|
||||||
|
Ok(crate::chat::commands::loc::loc_single_file(
|
||||||
|
&project_root,
|
||||||
|
file_path,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_server_logs_no_args_returns_string() {
|
||||||
|
let result = tool_get_server_logs(&json!({})).unwrap();
|
||||||
|
// Returns recent log lines (possibly empty in tests) — just verify no panic
|
||||||
|
let _ = result;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_server_logs_with_filter_returns_matching_lines() {
|
||||||
|
let result = tool_get_server_logs(&json!({"filter": "xyz_unlikely_match_999"})).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
result, "",
|
||||||
|
"filter with no matches should return empty string"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_server_logs_with_line_limit() {
|
||||||
|
let result = tool_get_server_logs(&json!({"lines": 5})).unwrap();
|
||||||
|
assert!(result.lines().count() <= 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_server_logs_max_cap_is_1000() {
|
||||||
|
// Lines > 1000 are capped — just verify it returns without error
|
||||||
|
let result = tool_get_server_logs(&json!({"lines": 9999})).unwrap();
|
||||||
|
let _ = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
+20
-293
@@ -1,60 +1,13 @@
|
|||||||
//! MCP diagnostic tools — server logs, CRDT dump, and story movement helpers.
|
//! MCP permission-prompt tool (`tool_prompt_permission`) and rule helpers.
|
||||||
//!
|
|
||||||
//! This file is a thin adapter: it deserialises MCP payloads, delegates to
|
use serde_json::{Value, json};
|
||||||
//! `crate::service::diagnostics` for all business logic, and serialises responses.
|
|
||||||
use crate::agents::move_story_to_stage;
|
|
||||||
use crate::http::context::AppContext;
|
use crate::http::context::AppContext;
|
||||||
use crate::log_buffer;
|
|
||||||
use crate::service::diagnostics::{add_permission_rule, generate_permission_rule};
|
use crate::service::diagnostics::{add_permission_rule, generate_permission_rule};
|
||||||
use crate::slog;
|
use crate::slog;
|
||||||
use crate::slog_warn;
|
use crate::slog_warn;
|
||||||
use serde_json::{Value, json};
|
|
||||||
#[allow(unused_imports)]
|
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
pub(super) fn tool_get_server_logs(args: &Value) -> Result<String, String> {
|
pub(crate) async fn tool_prompt_permission(
|
||||||
let lines_count = args
|
|
||||||
.get("lines")
|
|
||||||
.and_then(|v| v.as_u64())
|
|
||||||
.map(|n| n.min(1000) as usize)
|
|
||||||
.unwrap_or(100);
|
|
||||||
let filter = args.get("filter").and_then(|v| v.as_str());
|
|
||||||
let severity = args
|
|
||||||
.get("severity")
|
|
||||||
.and_then(|v| v.as_str())
|
|
||||||
.and_then(log_buffer::LogLevel::from_str_ci);
|
|
||||||
|
|
||||||
let recent = log_buffer::global().get_recent(lines_count, filter, severity.as_ref());
|
|
||||||
let joined = recent.join("\n");
|
|
||||||
// Clamp to lines_count actual lines in case any entry contains embedded newlines.
|
|
||||||
let all_lines: Vec<&str> = joined.lines().collect();
|
|
||||||
let start = all_lines.len().saturating_sub(lines_count);
|
|
||||||
Ok(all_lines[start..].join("\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Rebuild the server binary and re-exec (delegates to `crate::rebuild`).
|
|
||||||
pub(super) async fn tool_rebuild_and_restart(ctx: &AppContext) -> Result<String, String> {
|
|
||||||
slog!("[rebuild] Rebuild and restart requested via MCP tool");
|
|
||||||
|
|
||||||
// Signal the Matrix bot (if active) so it can send its own shutdown
|
|
||||||
// announcement before the process is replaced. Best-effort: we wait up
|
|
||||||
// to 1.5 s for the message to be delivered.
|
|
||||||
if let Some(ref tx) = ctx.matrix_shutdown_tx {
|
|
||||||
let _ = tx.send(Some(crate::rebuild::ShutdownReason::Rebuild));
|
|
||||||
tokio::time::sleep(std::time::Duration::from_millis(1500)).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
let project_root = ctx.state.get_project_root().unwrap_or_default();
|
|
||||||
let notifier = ctx.bot_shutdown.as_deref();
|
|
||||||
crate::rebuild::rebuild_and_restart(&ctx.services.agents, &project_root, notifier).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// MCP tool called by Claude Code via `--permission-prompt-tool`.
|
|
||||||
///
|
|
||||||
/// Forwards the permission request through the shared channel to the active
|
|
||||||
/// WebSocket session, which presents a dialog to the user. Blocks until the
|
|
||||||
/// user approves or denies (with a 5-minute timeout).
|
|
||||||
pub(super) async fn tool_prompt_permission(
|
|
||||||
args: &Value,
|
args: &Value,
|
||||||
ctx: &AppContext,
|
ctx: &AppContext,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
@@ -157,238 +110,11 @@ pub(super) async fn tool_prompt_permission(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn tool_get_token_usage(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
||||||
let root = ctx.state.get_project_root()?;
|
|
||||||
let filter_story = args.get("story_id").and_then(|v| v.as_str());
|
|
||||||
|
|
||||||
let all_records = crate::agents::token_usage::read_all(&root)?;
|
|
||||||
let records: Vec<_> = all_records
|
|
||||||
.into_iter()
|
|
||||||
.filter(|r| filter_story.is_none_or(|s| r.story_id == s))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let total_cost: f64 = records.iter().map(|r| r.usage.total_cost_usd).sum();
|
|
||||||
let total_input: u64 = records.iter().map(|r| r.usage.input_tokens).sum();
|
|
||||||
let total_output: u64 = records.iter().map(|r| r.usage.output_tokens).sum();
|
|
||||||
let total_cache_create: u64 = records
|
|
||||||
.iter()
|
|
||||||
.map(|r| r.usage.cache_creation_input_tokens)
|
|
||||||
.sum();
|
|
||||||
let total_cache_read: u64 = records
|
|
||||||
.iter()
|
|
||||||
.map(|r| r.usage.cache_read_input_tokens)
|
|
||||||
.sum();
|
|
||||||
|
|
||||||
serde_json::to_string_pretty(&json!({
|
|
||||||
"records": records.iter().map(|r| json!({
|
|
||||||
"story_id": r.story_id,
|
|
||||||
"agent_name": r.agent_name,
|
|
||||||
"timestamp": r.timestamp,
|
|
||||||
"input_tokens": r.usage.input_tokens,
|
|
||||||
"output_tokens": r.usage.output_tokens,
|
|
||||||
"cache_creation_input_tokens": r.usage.cache_creation_input_tokens,
|
|
||||||
"cache_read_input_tokens": r.usage.cache_read_input_tokens,
|
|
||||||
"total_cost_usd": r.usage.total_cost_usd,
|
|
||||||
})).collect::<Vec<_>>(),
|
|
||||||
"totals": {
|
|
||||||
"records": records.len(),
|
|
||||||
"input_tokens": total_input,
|
|
||||||
"output_tokens": total_output,
|
|
||||||
"cache_creation_input_tokens": total_cache_create,
|
|
||||||
"cache_read_input_tokens": total_cache_read,
|
|
||||||
"total_cost_usd": total_cost,
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.map_err(|e| format!("Serialization error: {e}"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn tool_move_story(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
||||||
let story_id = args
|
|
||||||
.get("story_id")
|
|
||||||
.and_then(|v| v.as_str())
|
|
||||||
.ok_or("Missing required argument: story_id")?;
|
|
||||||
let target_stage = args
|
|
||||||
.get("target_stage")
|
|
||||||
.and_then(|v| v.as_str())
|
|
||||||
.ok_or("Missing required argument: target_stage")?;
|
|
||||||
|
|
||||||
let project_root = ctx.services.agents.get_project_root(&ctx.state)?;
|
|
||||||
|
|
||||||
let (from_stage, to_stage) = move_story_to_stage(&project_root, story_id, target_stage)?;
|
|
||||||
|
|
||||||
serde_json::to_string_pretty(&json!({
|
|
||||||
"story_id": story_id,
|
|
||||||
"from_stage": from_stage,
|
|
||||||
"to_stage": to_stage,
|
|
||||||
"message": format!("Work item '{story_id}' moved from '{from_stage}' to '{to_stage}'.")
|
|
||||||
}))
|
|
||||||
.map_err(|e| format!("Serialization error: {e}"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// MCP tool: dump the raw in-memory CRDT state for debugging.
|
|
||||||
///
|
|
||||||
/// **Debug tool only** — for normal pipeline introspection use `get_pipeline_status`.
|
|
||||||
pub(super) fn tool_dump_crdt(args: &Value) -> Result<String, String> {
|
|
||||||
let story_id_filter = args.get("story_id").and_then(|v| v.as_str());
|
|
||||||
let dump = crate::crdt_state::dump_crdt_state(story_id_filter);
|
|
||||||
|
|
||||||
let items: Vec<Value> = dump
|
|
||||||
.items
|
|
||||||
.into_iter()
|
|
||||||
.map(|item| {
|
|
||||||
json!({
|
|
||||||
"story_id": item.story_id,
|
|
||||||
"stage": item.stage,
|
|
||||||
"name": item.name,
|
|
||||||
"agent": item.agent,
|
|
||||||
"retry_count": item.retry_count,
|
|
||||||
"blocked": item.blocked,
|
|
||||||
"depends_on": item.depends_on,
|
|
||||||
"claimed_by": item.claimed_by,
|
|
||||||
"claimed_at": item.claimed_at,
|
|
||||||
"content_index": item.content_index,
|
|
||||||
"is_deleted": item.is_deleted,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
serde_json::to_string_pretty(&json!({
|
|
||||||
"metadata": {
|
|
||||||
"in_memory_state_loaded": dump.in_memory_state_loaded,
|
|
||||||
"total_items": dump.total_items,
|
|
||||||
"total_ops_in_list": dump.total_ops_in_list,
|
|
||||||
"max_seq_in_list": dump.max_seq_in_list,
|
|
||||||
"persisted_ops_count": dump.persisted_ops_count,
|
|
||||||
"pending_persist_ops_count": null,
|
|
||||||
},
|
|
||||||
"items": items,
|
|
||||||
}))
|
|
||||||
.map_err(|e| format!("Serialization error: {e}"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// MCP tool: return the server version, build hash, and running port.
|
|
||||||
pub(super) fn tool_get_version(ctx: &AppContext) -> Result<String, String> {
|
|
||||||
let build_hash =
|
|
||||||
std::fs::read_to_string(".huskies/build_hash").unwrap_or_else(|_| "unknown".to_string());
|
|
||||||
serde_json::to_string_pretty(&json!({
|
|
||||||
"version": env!("CARGO_PKG_VERSION"),
|
|
||||||
"build_hash": build_hash.trim(),
|
|
||||||
"port": ctx.services.agents.port(),
|
|
||||||
}))
|
|
||||||
.map_err(|e| format!("Serialization error: {e}"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// MCP tool: count lines in a specific file relative to the project root.
|
|
||||||
pub(super) fn tool_loc_file(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
|
||||||
let file_path = args
|
|
||||||
.get("file_path")
|
|
||||||
.and_then(|v| v.as_str())
|
|
||||||
.ok_or_else(|| "Missing required argument: file_path".to_string())?;
|
|
||||||
|
|
||||||
let project_root = ctx.state.get_project_root()?;
|
|
||||||
Ok(crate::chat::commands::loc::loc_single_file(
|
|
||||||
&project_root,
|
|
||||||
file_path,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::http::test_helpers::test_ctx;
|
use crate::http::test_helpers::test_ctx;
|
||||||
|
use std::fs;
|
||||||
#[test]
|
|
||||||
fn tool_get_server_logs_no_args_returns_string() {
|
|
||||||
let result = tool_get_server_logs(&json!({})).unwrap();
|
|
||||||
// Returns recent log lines (possibly empty in tests) — just verify no panic
|
|
||||||
let _ = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tool_get_server_logs_with_filter_returns_matching_lines() {
|
|
||||||
let result = tool_get_server_logs(&json!({"filter": "xyz_unlikely_match_999"})).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
result, "",
|
|
||||||
"filter with no matches should return empty string"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tool_get_server_logs_with_line_limit() {
|
|
||||||
let result = tool_get_server_logs(&json!({"lines": 5})).unwrap();
|
|
||||||
assert!(result.lines().count() <= 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tool_get_server_logs_max_cap_is_1000() {
|
|
||||||
// Lines > 1000 are capped — just verify it returns without error
|
|
||||||
let result = tool_get_server_logs(&json!({"lines": 9999})).unwrap();
|
|
||||||
let _ = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tool_get_token_usage_empty_returns_zero_totals() {
|
|
||||||
let tmp = tempfile::tempdir().unwrap();
|
|
||||||
let ctx = test_ctx(tmp.path());
|
|
||||||
let result = tool_get_token_usage(&json!({}), &ctx).unwrap();
|
|
||||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
|
||||||
assert_eq!(parsed["records"].as_array().unwrap().len(), 0);
|
|
||||||
assert_eq!(parsed["totals"]["records"], 0);
|
|
||||||
assert_eq!(parsed["totals"]["total_cost_usd"], 0.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tool_get_token_usage_returns_written_records() {
|
|
||||||
let tmp = tempfile::tempdir().unwrap();
|
|
||||||
let root = tmp.path();
|
|
||||||
let ctx = test_ctx(root);
|
|
||||||
|
|
||||||
let usage = crate::agents::TokenUsage {
|
|
||||||
input_tokens: 100,
|
|
||||||
output_tokens: 200,
|
|
||||||
cache_creation_input_tokens: 5000,
|
|
||||||
cache_read_input_tokens: 10000,
|
|
||||||
total_cost_usd: 1.57,
|
|
||||||
};
|
|
||||||
let record =
|
|
||||||
crate::agents::token_usage::build_record("42_story_foo", "coder-1", None, usage);
|
|
||||||
crate::agents::token_usage::append_record(root, &record).unwrap();
|
|
||||||
|
|
||||||
let result = tool_get_token_usage(&json!({}), &ctx).unwrap();
|
|
||||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
|
||||||
assert_eq!(parsed["records"].as_array().unwrap().len(), 1);
|
|
||||||
assert_eq!(parsed["records"][0]["story_id"], "42_story_foo");
|
|
||||||
assert_eq!(parsed["records"][0]["agent_name"], "coder-1");
|
|
||||||
assert_eq!(parsed["records"][0]["input_tokens"], 100);
|
|
||||||
assert_eq!(parsed["totals"]["records"], 1);
|
|
||||||
assert!((parsed["totals"]["total_cost_usd"].as_f64().unwrap() - 1.57).abs() < f64::EPSILON);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tool_get_token_usage_filters_by_story_id() {
|
|
||||||
let tmp = tempfile::tempdir().unwrap();
|
|
||||||
let root = tmp.path();
|
|
||||||
let ctx = test_ctx(root);
|
|
||||||
|
|
||||||
let usage = crate::agents::TokenUsage {
|
|
||||||
input_tokens: 50,
|
|
||||||
output_tokens: 60,
|
|
||||||
cache_creation_input_tokens: 0,
|
|
||||||
cache_read_input_tokens: 0,
|
|
||||||
total_cost_usd: 0.5,
|
|
||||||
};
|
|
||||||
let r1 =
|
|
||||||
crate::agents::token_usage::build_record("10_story_a", "coder-1", None, usage.clone());
|
|
||||||
let r2 = crate::agents::token_usage::build_record("20_story_b", "coder-2", None, usage);
|
|
||||||
crate::agents::token_usage::append_record(root, &r1).unwrap();
|
|
||||||
crate::agents::token_usage::append_record(root, &r2).unwrap();
|
|
||||||
|
|
||||||
let result = tool_get_token_usage(&json!({"story_id": "10_story_a"}), &ctx).unwrap();
|
|
||||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
|
||||||
assert_eq!(parsed["records"].as_array().unwrap().len(), 1);
|
|
||||||
assert_eq!(parsed["records"][0]["story_id"], "10_story_a");
|
|
||||||
assert_eq!(parsed["totals"]["records"], 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn tool_prompt_permission_auto_denies_without_interactive_session() {
|
async fn tool_prompt_permission_auto_denies_without_interactive_session() {
|
||||||
@@ -612,7 +338,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rebuild_and_restart_in_tools_list() {
|
fn rebuild_and_restart_in_tools_list() {
|
||||||
use super::super::handle_tools_list;
|
use super::super::super::tools_list::handle_tools_list;
|
||||||
let resp = handle_tools_list(Some(json!(1)));
|
let resp = handle_tools_list(Some(json!(1)));
|
||||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||||
let tool = tools.iter().find(|t| t["name"] == "rebuild_and_restart");
|
let tool = tools.iter().find(|t| t["name"] == "rebuild_and_restart");
|
||||||
@@ -665,7 +391,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn move_story_in_tools_list() {
|
fn move_story_in_tools_list() {
|
||||||
use super::super::handle_tools_list;
|
use super::super::super::tools_list::handle_tools_list;
|
||||||
let resp = handle_tools_list(Some(json!(1)));
|
let resp = handle_tools_list(Some(json!(1)));
|
||||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||||
let tool = tools.iter().find(|t| t["name"] == "move_story");
|
let tool = tools.iter().find(|t| t["name"] == "move_story");
|
||||||
@@ -682,7 +408,7 @@ mod tests {
|
|||||||
fn tool_move_story_missing_story_id() {
|
fn tool_move_story_missing_story_id() {
|
||||||
let tmp = tempfile::tempdir().unwrap();
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
let ctx = test_ctx(tmp.path());
|
let ctx = test_ctx(tmp.path());
|
||||||
let result = tool_move_story(&json!({"target_stage": "current"}), &ctx);
|
let result = super::super::tool_move_story(&json!({"target_stage": "current"}), &ctx);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert!(result.unwrap_err().contains("story_id"));
|
assert!(result.unwrap_err().contains("story_id"));
|
||||||
}
|
}
|
||||||
@@ -691,7 +417,7 @@ mod tests {
|
|||||||
fn tool_move_story_missing_target_stage() {
|
fn tool_move_story_missing_target_stage() {
|
||||||
let tmp = tempfile::tempdir().unwrap();
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
let ctx = test_ctx(tmp.path());
|
let ctx = test_ctx(tmp.path());
|
||||||
let result = tool_move_story(&json!({"story_id": "1_story_test"}), &ctx);
|
let result = super::super::tool_move_story(&json!({"story_id": "1_story_test"}), &ctx);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert!(result.unwrap_err().contains("target_stage"));
|
assert!(result.unwrap_err().contains("target_stage"));
|
||||||
}
|
}
|
||||||
@@ -705,7 +431,7 @@ mod tests {
|
|||||||
fs::create_dir_all(&backlog).unwrap();
|
fs::create_dir_all(&backlog).unwrap();
|
||||||
fs::write(backlog.join("1_story_test.md"), "---\nname: Test\n---\n").unwrap();
|
fs::write(backlog.join("1_story_test.md"), "---\nname: Test\n---\n").unwrap();
|
||||||
let ctx = test_ctx(root);
|
let ctx = test_ctx(root);
|
||||||
let result = tool_move_story(
|
let result = super::super::tool_move_story(
|
||||||
&json!({"story_id": "1_story_test", "target_stage": "invalid"}),
|
&json!({"story_id": "1_story_test", "target_stage": "invalid"}),
|
||||||
&ctx,
|
&ctx,
|
||||||
);
|
);
|
||||||
@@ -727,7 +453,7 @@ mod tests {
|
|||||||
crate::db::write_content("5_story_test", content);
|
crate::db::write_content("5_story_test", content);
|
||||||
|
|
||||||
let ctx = test_ctx(root);
|
let ctx = test_ctx(root);
|
||||||
let result = tool_move_story(
|
let result = super::super::tool_move_story(
|
||||||
&json!({"story_id": "5_story_test", "target_stage": "current"}),
|
&json!({"story_id": "5_story_test", "target_stage": "current"}),
|
||||||
&ctx,
|
&ctx,
|
||||||
)
|
)
|
||||||
@@ -754,7 +480,7 @@ mod tests {
|
|||||||
crate::db::write_content("6_story_back", content);
|
crate::db::write_content("6_story_back", content);
|
||||||
|
|
||||||
let ctx = test_ctx(root);
|
let ctx = test_ctx(root);
|
||||||
let result = tool_move_story(
|
let result = super::super::tool_move_story(
|
||||||
&json!({"story_id": "6_story_back", "target_stage": "backlog"}),
|
&json!({"story_id": "6_story_back", "target_stage": "backlog"}),
|
||||||
&ctx,
|
&ctx,
|
||||||
)
|
)
|
||||||
@@ -781,7 +507,7 @@ mod tests {
|
|||||||
crate::db::write_content("9907_story_idem", content);
|
crate::db::write_content("9907_story_idem", content);
|
||||||
|
|
||||||
let ctx = test_ctx(root);
|
let ctx = test_ctx(root);
|
||||||
let result = tool_move_story(
|
let result = super::super::tool_move_story(
|
||||||
&json!({"story_id": "9907_story_idem", "target_stage": "current"}),
|
&json!({"story_id": "9907_story_idem", "target_stage": "current"}),
|
||||||
&ctx,
|
&ctx,
|
||||||
)
|
)
|
||||||
@@ -799,7 +525,7 @@ mod tests {
|
|||||||
fn tool_move_story_error_when_not_found() {
|
fn tool_move_story_error_when_not_found() {
|
||||||
let tmp = tempfile::tempdir().unwrap();
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
let ctx = test_ctx(tmp.path());
|
let ctx = test_ctx(tmp.path());
|
||||||
let result = tool_move_story(
|
let result = super::super::tool_move_story(
|
||||||
&json!({"story_id": "99_story_ghost", "target_stage": "current"}),
|
&json!({"story_id": "99_story_ghost", "target_stage": "current"}),
|
||||||
&ctx,
|
&ctx,
|
||||||
);
|
);
|
||||||
@@ -815,7 +541,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tool_dump_crdt_returns_valid_json() {
|
fn tool_dump_crdt_returns_valid_json() {
|
||||||
let result = tool_dump_crdt(&json!({})).unwrap();
|
let result = super::super::tool_dump_crdt(&json!({})).unwrap();
|
||||||
let parsed: Value = serde_json::from_str(&result).expect("result must be valid JSON");
|
let parsed: Value = serde_json::from_str(&result).expect("result must be valid JSON");
|
||||||
assert!(parsed["metadata"].is_object(), "must have metadata object");
|
assert!(parsed["metadata"].is_object(), "must have metadata object");
|
||||||
assert!(parsed["items"].is_array(), "must have items array");
|
assert!(parsed["items"].is_array(), "must have items array");
|
||||||
@@ -823,7 +549,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tool_dump_crdt_metadata_has_required_fields() {
|
fn tool_dump_crdt_metadata_has_required_fields() {
|
||||||
let result = tool_dump_crdt(&json!({})).unwrap();
|
let result = super::super::tool_dump_crdt(&json!({})).unwrap();
|
||||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||||
let meta = &parsed["metadata"];
|
let meta = &parsed["metadata"];
|
||||||
assert!(meta["in_memory_state_loaded"].is_boolean());
|
assert!(meta["in_memory_state_loaded"].is_boolean());
|
||||||
@@ -835,14 +561,15 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tool_dump_crdt_with_story_id_filter_returns_valid_json() {
|
fn tool_dump_crdt_with_story_id_filter_returns_valid_json() {
|
||||||
let result = tool_dump_crdt(&json!({"story_id": "9999_story_nonexistent"})).unwrap();
|
let result =
|
||||||
|
super::super::tool_dump_crdt(&json!({"story_id": "9999_story_nonexistent"})).unwrap();
|
||||||
let parsed: Value = serde_json::from_str(&result).unwrap();
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||||
assert!(parsed["items"].as_array().unwrap().is_empty());
|
assert!(parsed["items"].as_array().unwrap().is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn dump_crdt_in_tools_list() {
|
fn dump_crdt_in_tools_list() {
|
||||||
use super::super::handle_tools_list;
|
use super::super::super::tools_list::handle_tools_list;
|
||||||
let resp = handle_tools_list(Some(json!(1)));
|
let resp = handle_tools_list(Some(json!(1)));
|
||||||
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
let tools = resp.result.unwrap()["tools"].as_array().unwrap().clone();
|
||||||
let tool = tools.iter().find(|t| t["name"] == "dump_crdt");
|
let tool = tools.iter().find(|t| t["name"] == "dump_crdt");
|
||||||
@@ -0,0 +1,120 @@
|
|||||||
|
//! MCP token-usage reporting tool (`tool_get_token_usage`).
|
||||||
|
|
||||||
|
use serde_json::{Value, json};
|
||||||
|
|
||||||
|
use crate::http::context::AppContext;
|
||||||
|
|
||||||
|
pub(crate) fn tool_get_token_usage(args: &Value, ctx: &AppContext) -> Result<String, String> {
|
||||||
|
let root = ctx.state.get_project_root()?;
|
||||||
|
let filter_story = args.get("story_id").and_then(|v| v.as_str());
|
||||||
|
|
||||||
|
let all_records = crate::agents::token_usage::read_all(&root)?;
|
||||||
|
let records: Vec<_> = all_records
|
||||||
|
.into_iter()
|
||||||
|
.filter(|r| filter_story.is_none_or(|s| r.story_id == s))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let total_cost: f64 = records.iter().map(|r| r.usage.total_cost_usd).sum();
|
||||||
|
let total_input: u64 = records.iter().map(|r| r.usage.input_tokens).sum();
|
||||||
|
let total_output: u64 = records.iter().map(|r| r.usage.output_tokens).sum();
|
||||||
|
let total_cache_create: u64 = records
|
||||||
|
.iter()
|
||||||
|
.map(|r| r.usage.cache_creation_input_tokens)
|
||||||
|
.sum();
|
||||||
|
let total_cache_read: u64 = records
|
||||||
|
.iter()
|
||||||
|
.map(|r| r.usage.cache_read_input_tokens)
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
serde_json::to_string_pretty(&json!({
|
||||||
|
"records": records.iter().map(|r| json!({
|
||||||
|
"story_id": r.story_id,
|
||||||
|
"agent_name": r.agent_name,
|
||||||
|
"timestamp": r.timestamp,
|
||||||
|
"input_tokens": r.usage.input_tokens,
|
||||||
|
"output_tokens": r.usage.output_tokens,
|
||||||
|
"cache_creation_input_tokens": r.usage.cache_creation_input_tokens,
|
||||||
|
"cache_read_input_tokens": r.usage.cache_read_input_tokens,
|
||||||
|
"total_cost_usd": r.usage.total_cost_usd,
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
"totals": {
|
||||||
|
"records": records.len(),
|
||||||
|
"input_tokens": total_input,
|
||||||
|
"output_tokens": total_output,
|
||||||
|
"cache_creation_input_tokens": total_cache_create,
|
||||||
|
"cache_read_input_tokens": total_cache_read,
|
||||||
|
"total_cost_usd": total_cost,
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.map_err(|e| format!("Serialization error: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::http::test_helpers::test_ctx;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_token_usage_empty_returns_zero_totals() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let ctx = test_ctx(tmp.path());
|
||||||
|
let result = tool_get_token_usage(&json!({}), &ctx).unwrap();
|
||||||
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||||
|
assert_eq!(parsed["records"].as_array().unwrap().len(), 0);
|
||||||
|
assert_eq!(parsed["totals"]["records"], 0);
|
||||||
|
assert_eq!(parsed["totals"]["total_cost_usd"], 0.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_token_usage_returns_written_records() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let root = tmp.path();
|
||||||
|
let ctx = test_ctx(root);
|
||||||
|
|
||||||
|
let usage = crate::agents::TokenUsage {
|
||||||
|
input_tokens: 100,
|
||||||
|
output_tokens: 200,
|
||||||
|
cache_creation_input_tokens: 5000,
|
||||||
|
cache_read_input_tokens: 10000,
|
||||||
|
total_cost_usd: 1.57,
|
||||||
|
};
|
||||||
|
let record =
|
||||||
|
crate::agents::token_usage::build_record("42_story_foo", "coder-1", None, usage);
|
||||||
|
crate::agents::token_usage::append_record(root, &record).unwrap();
|
||||||
|
|
||||||
|
let result = tool_get_token_usage(&json!({}), &ctx).unwrap();
|
||||||
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||||
|
assert_eq!(parsed["records"].as_array().unwrap().len(), 1);
|
||||||
|
assert_eq!(parsed["records"][0]["story_id"], "42_story_foo");
|
||||||
|
assert_eq!(parsed["records"][0]["agent_name"], "coder-1");
|
||||||
|
assert_eq!(parsed["records"][0]["input_tokens"], 100);
|
||||||
|
assert_eq!(parsed["totals"]["records"], 1);
|
||||||
|
assert!((parsed["totals"]["total_cost_usd"].as_f64().unwrap() - 1.57).abs() < f64::EPSILON);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_get_token_usage_filters_by_story_id() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let root = tmp.path();
|
||||||
|
let ctx = test_ctx(root);
|
||||||
|
|
||||||
|
let usage = crate::agents::TokenUsage {
|
||||||
|
input_tokens: 50,
|
||||||
|
output_tokens: 60,
|
||||||
|
cache_creation_input_tokens: 0,
|
||||||
|
cache_read_input_tokens: 0,
|
||||||
|
total_cost_usd: 0.5,
|
||||||
|
};
|
||||||
|
let r1 =
|
||||||
|
crate::agents::token_usage::build_record("10_story_a", "coder-1", None, usage.clone());
|
||||||
|
let r2 = crate::agents::token_usage::build_record("20_story_b", "coder-2", None, usage);
|
||||||
|
crate::agents::token_usage::append_record(root, &r1).unwrap();
|
||||||
|
crate::agents::token_usage::append_record(root, &r2).unwrap();
|
||||||
|
|
||||||
|
let result = tool_get_token_usage(&json!({"story_id": "10_story_a"}), &ctx).unwrap();
|
||||||
|
let parsed: Value = serde_json::from_str(&result).unwrap();
|
||||||
|
assert_eq!(parsed["records"].as_array().unwrap().len(), 1);
|
||||||
|
assert_eq!(parsed["records"][0]["story_id"], "10_story_a");
|
||||||
|
assert_eq!(parsed["totals"]["records"], 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,7 +9,9 @@
|
|||||||
pub mod io;
|
pub mod io;
|
||||||
pub mod permission;
|
pub mod permission;
|
||||||
|
|
||||||
|
#[allow(unused_imports)]
|
||||||
pub use io::add_permission_rule;
|
pub use io::add_permission_rule;
|
||||||
|
#[allow(unused_imports)]
|
||||||
pub use permission::generate_permission_rule;
|
pub use permission::generate_permission_rule;
|
||||||
#[allow(unused_imports)]
|
#[allow(unused_imports)]
|
||||||
pub use permission::is_dominated_by_wildcard;
|
pub use permission::is_dominated_by_wildcard;
|
||||||
|
|||||||
Reference in New Issue
Block a user