Files
storkit/server/src/agents/token_usage.rs
dave f610ef6046 Restore codebase deleted by bad auto-commit e4227cf
Commit e4227cf (a story creation auto-commit) erroneously deleted 175
files from master's tree, likely due to a race condition between
concurrent git operations. This commit re-adds all files from the
working directory.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-22 19:07:07 +00:00

203 lines
6.2 KiB
Rust

use std::fs;
use std::path::Path;
use chrono::Utc;
use serde::{Deserialize, Serialize};
use super::TokenUsage;
/// A single token usage record persisted to disk.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct TokenUsageRecord {
pub story_id: String,
pub agent_name: String,
pub timestamp: String,
#[serde(default)]
pub model: Option<String>,
pub usage: TokenUsage,
}
/// Append a token usage record to the persistent JSONL file.
///
/// Each line is a self-contained JSON object, making appends atomic and
/// reads simple. The file lives at `.storkit/token_usage.jsonl`.
pub fn append_record(project_root: &Path, record: &TokenUsageRecord) -> Result<(), String> {
let path = token_usage_path(project_root);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.map_err(|e| format!("Failed to create token_usage directory: {e}"))?;
}
let mut line =
serde_json::to_string(record).map_err(|e| format!("Failed to serialize record: {e}"))?;
line.push('\n');
use std::io::Write;
let file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.map_err(|e| format!("Failed to open token_usage file: {e}"))?;
let mut writer = std::io::BufWriter::new(file);
writer
.write_all(line.as_bytes())
.map_err(|e| format!("Failed to write token_usage record: {e}"))?;
writer
.flush()
.map_err(|e| format!("Failed to flush token_usage file: {e}"))?;
Ok(())
}
/// Read all token usage records from the persistent file.
pub fn read_all(project_root: &Path) -> Result<Vec<TokenUsageRecord>, String> {
let path = token_usage_path(project_root);
if !path.exists() {
return Ok(Vec::new());
}
let content =
fs::read_to_string(&path).map_err(|e| format!("Failed to read token_usage file: {e}"))?;
let mut records = Vec::new();
for line in content.lines() {
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
match serde_json::from_str::<TokenUsageRecord>(trimmed) {
Ok(record) => records.push(record),
Err(e) => {
crate::slog_warn!("[token_usage] Skipping malformed line: {e}");
}
}
}
Ok(records)
}
/// Build a `TokenUsageRecord` from the parts available at completion time.
pub fn build_record(
story_id: &str,
agent_name: &str,
model: Option<String>,
usage: TokenUsage,
) -> TokenUsageRecord {
TokenUsageRecord {
story_id: story_id.to_string(),
agent_name: agent_name.to_string(),
timestamp: Utc::now().to_rfc3339(),
model,
usage,
}
}
fn token_usage_path(project_root: &Path) -> std::path::PathBuf {
project_root.join(".storkit").join("token_usage.jsonl")
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
fn sample_usage() -> TokenUsage {
TokenUsage {
input_tokens: 100,
output_tokens: 200,
cache_creation_input_tokens: 5000,
cache_read_input_tokens: 10000,
total_cost_usd: 1.57,
}
}
#[test]
fn append_and_read_roundtrip() {
let dir = TempDir::new().unwrap();
let root = dir.path();
let record = build_record("42_story_foo", "coder-1", None, sample_usage());
append_record(root, &record).unwrap();
let records = read_all(root).unwrap();
assert_eq!(records.len(), 1);
assert_eq!(records[0].story_id, "42_story_foo");
assert_eq!(records[0].agent_name, "coder-1");
assert_eq!(records[0].usage, sample_usage());
}
#[test]
fn multiple_appends_accumulate() {
let dir = TempDir::new().unwrap();
let root = dir.path();
let r1 = build_record("s1", "coder-1", None, sample_usage());
let r2 = build_record("s2", "coder-2", None, sample_usage());
append_record(root, &r1).unwrap();
append_record(root, &r2).unwrap();
let records = read_all(root).unwrap();
assert_eq!(records.len(), 2);
assert_eq!(records[0].story_id, "s1");
assert_eq!(records[1].story_id, "s2");
}
#[test]
fn read_empty_returns_empty() {
let dir = TempDir::new().unwrap();
let records = read_all(dir.path()).unwrap();
assert!(records.is_empty());
}
#[test]
fn malformed_lines_are_skipped() {
let dir = TempDir::new().unwrap();
let root = dir.path();
let path = root.join(".storkit").join("token_usage.jsonl");
fs::create_dir_all(path.parent().unwrap()).unwrap();
fs::write(&path, "not json\n{\"bad\":true}\n").unwrap();
let records = read_all(root).unwrap();
assert!(records.is_empty());
}
#[test]
fn token_usage_from_result_event() {
let json = serde_json::json!({
"type": "result",
"total_cost_usd": 1.57,
"usage": {
"input_tokens": 7,
"output_tokens": 475,
"cache_creation_input_tokens": 185020,
"cache_read_input_tokens": 810585
}
});
let usage = TokenUsage::from_result_event(&json).unwrap();
assert_eq!(usage.input_tokens, 7);
assert_eq!(usage.output_tokens, 475);
assert_eq!(usage.cache_creation_input_tokens, 185020);
assert_eq!(usage.cache_read_input_tokens, 810585);
assert!((usage.total_cost_usd - 1.57).abs() < f64::EPSILON);
}
#[test]
fn token_usage_from_result_event_missing_usage() {
let json = serde_json::json!({"type": "result"});
assert!(TokenUsage::from_result_event(&json).is_none());
}
#[test]
fn token_usage_from_result_event_partial_fields() {
let json = serde_json::json!({
"type": "result",
"total_cost_usd": 0.5,
"usage": {
"input_tokens": 10,
"output_tokens": 20
}
});
let usage = TokenUsage::from_result_event(&json).unwrap();
assert_eq!(usage.input_tokens, 10);
assert_eq!(usage.output_tokens, 20);
assert_eq!(usage.cache_creation_input_tokens, 0);
assert_eq!(usage.cache_read_input_tokens, 0);
}
}