Backend: Add Anthropic/Claude provider integration

- Add anthropic.rs module with streaming support
- Convert between internal and Anthropic tool/message formats
- Add keyring dependency for secure API key storage
- Add API key management commands (get_exists, set)
- Auto-detect provider from model name (claude-* prefix)
- Support SSE streaming from Anthropic API
- Handle tool calling with Anthropic's format
- Add cancellation support for Anthropic streams
This commit is contained in:
Dave
2025-12-27 19:41:20 +00:00
parent e71dcd8226
commit 1529ca77e7
6 changed files with 431 additions and 8 deletions

39
src-tauri/Cargo.lock generated
View File

@@ -960,6 +960,17 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "eventsource-stream"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab"
dependencies = [
"futures-core",
"nom",
"pin-project-lite",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -1988,6 +1999,16 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "keyring"
version = "3.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eebcc3aff044e5944a8fbaf69eb277d11986064cba30c468730e8b9909fb551c"
dependencies = [
"log",
"zeroize",
]
[[package]]
name = "kuchikiki"
version = "0.8.8-speedreader"
@@ -2074,8 +2095,10 @@ version = "0.1.0"
dependencies = [
"async-trait",
"chrono",
"eventsource-stream",
"futures",
"ignore",
"keyring",
"reqwest",
"serde",
"serde_json",
@@ -2162,6 +2185,12 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.9"
@@ -2276,6 +2305,16 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "num-conv"
version = "0.1.0"

View File

@@ -32,3 +32,5 @@ chrono = { version = "0.4.42", features = ["serde"] }
async-trait = "0.1.89"
tauri-plugin-store = "2.4.1"
tokio = { version = "1", features = ["sync"] }
keyring = "3.2"
eventsource-stream = "0.2.3"

View File

@@ -1,4 +1,5 @@
use crate::commands::{fs, search, shell};
use crate::llm::anthropic::AnthropicProvider;
use crate::llm::ollama::OllamaProvider;
use crate::llm::prompts::SYSTEM_PROMPT;
use crate::llm::types::{Message, Role, ToolCall, ToolDefinition, ToolFunctionDefinition};
@@ -23,6 +24,35 @@ pub async fn get_ollama_models(base_url: Option<String>) -> Result<Vec<String>,
OllamaProvider::get_models(&url).await
}
#[tauri::command]
pub async fn get_anthropic_api_key_exists() -> Result<bool, String> {
match keyring::Entry::new("living-spec-anthropic-api-key", "default") {
Ok(entry) => Ok(entry.get_password().is_ok()),
Err(e) => Err(format!("Failed to access keychain: {}", e)),
}
}
#[tauri::command]
pub async fn set_anthropic_api_key(api_key: String) -> Result<(), String> {
let entry = keyring::Entry::new("living-spec-anthropic-api-key", "default")
.map_err(|e| format!("Failed to create keychain entry: {}", e))?;
entry
.set_password(&api_key)
.map_err(|e| format!("Failed to store API key: {}", e))?;
Ok(())
}
fn get_anthropic_api_key() -> Result<String, String> {
let entry = keyring::Entry::new("living-spec-anthropic-api-key", "default")
.map_err(|e| format!("Failed to access keychain: {}", e))?;
entry
.get_password()
.map_err(|_| "Anthropic API key not found. Please set your API key.".to_string())
}
#[tauri::command]
pub async fn chat(
app: AppHandle,
@@ -46,12 +76,13 @@ pub async fn chat(
.clone()
.unwrap_or_else(|| "http://localhost:11434".to_string());
if config.provider.as_str() != "ollama" {
// Determine provider from model name
let is_claude = config.model.starts_with("claude-");
if !is_claude && config.provider.as_str() != "ollama" {
return Err(format!("Unsupported provider: {}", config.provider));
}
let provider = OllamaProvider::new(base_url);
// 2. Define Tools
let tool_defs = get_tool_definitions();
let tools = if config.enable_tools.unwrap_or(true) {
@@ -100,10 +131,22 @@ pub async fn chat(
turn_count += 1;
// Call LLM with streaming
let response = provider
let response = if is_claude {
// Use Anthropic provider
let api_key = get_anthropic_api_key()?;
let anthropic_provider = AnthropicProvider::new(api_key);
anthropic_provider
.chat_stream(&app, &config.model, &current_history, tools, &mut cancel_rx)
.await
.map_err(|e| format!("LLM Error: {}", e))?;
.map_err(|e| format!("Anthropic Error: {}", e))?
} else {
// Use Ollama provider
let ollama_provider = OllamaProvider::new(base_url.clone());
ollama_provider
.chat_stream(&app, &config.model, &current_history, tools, &mut cancel_rx)
.await
.map_err(|e| format!("Ollama Error: {}", e))?
};
// Process Response
if let Some(tool_calls) = response.tool_calls {

View File

@@ -24,7 +24,9 @@ pub fn run() {
commands::shell::exec_shell,
commands::chat::chat,
commands::chat::get_ollama_models,
commands::chat::cancel_chat
commands::chat::cancel_chat,
commands::chat::get_anthropic_api_key_exists,
commands::chat::set_anthropic_api_key
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View File

@@ -0,0 +1,336 @@
use crate::llm::types::{
CompletionResponse, FunctionCall, Message, Role, ToolCall, ToolDefinition,
};
use futures::StreamExt;
use reqwest::header::{HeaderMap, HeaderValue, CONTENT_TYPE};
use serde::{Deserialize, Serialize};
use serde_json::json;
use tauri::{AppHandle, Emitter};
use tokio::sync::watch::Receiver;
const ANTHROPIC_API_URL: &str = "https://api.anthropic.com/v1/messages";
const ANTHROPIC_VERSION: &str = "2023-06-01";
pub struct AnthropicProvider {
api_key: String,
client: reqwest::Client,
}
#[derive(Debug, Serialize, Deserialize)]
struct AnthropicMessage {
role: String, // "user" or "assistant"
content: AnthropicContent,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(untagged)]
enum AnthropicContent {
Text(String),
Blocks(Vec<AnthropicContentBlock>),
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
enum AnthropicContentBlock {
#[serde(rename = "text")]
Text { text: String },
#[serde(rename = "tool_use")]
ToolUse {
id: String,
name: String,
input: serde_json::Value,
},
#[serde(rename = "tool_result")]
ToolResult {
tool_use_id: String,
content: String,
},
}
#[derive(Debug, Serialize)]
struct AnthropicTool {
name: String,
description: String,
input_schema: serde_json::Value,
}
#[derive(Debug, Deserialize)]
struct StreamEvent {
#[serde(rename = "type")]
event_type: String,
#[serde(flatten)]
data: serde_json::Value,
}
impl AnthropicProvider {
pub fn new(api_key: String) -> Self {
Self {
api_key,
client: reqwest::Client::new(),
}
}
/// Convert our internal tool definitions to Anthropic format
fn convert_tools(tools: &[ToolDefinition]) -> Vec<AnthropicTool> {
tools
.iter()
.map(|tool| AnthropicTool {
name: tool.function.name.clone(),
description: tool.function.description.clone(),
input_schema: tool.function.parameters.clone(),
})
.collect()
}
/// Convert our internal messages to Anthropic format
fn convert_messages(messages: &[Message]) -> Vec<AnthropicMessage> {
let mut anthropic_messages: Vec<AnthropicMessage> = Vec::new();
for msg in messages {
match msg.role {
Role::System => {
// Anthropic doesn't support system messages in the messages array
// They should be passed separately in the 'system' parameter
// For now, we'll skip them or convert to user messages
continue;
}
Role::User => {
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Text(msg.content.clone()),
});
}
Role::Assistant => {
if let Some(tool_calls) = &msg.tool_calls {
// Assistant message with tool calls
let mut blocks = Vec::new();
// Add text content if present
if !msg.content.is_empty() {
blocks.push(AnthropicContentBlock::Text {
text: msg.content.clone(),
});
}
// Add tool use blocks
for call in tool_calls {
let input: serde_json::Value =
serde_json::from_str(&call.function.arguments).unwrap_or(json!({}));
blocks.push(AnthropicContentBlock::ToolUse {
id: call
.id
.clone()
.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()),
name: call.function.name.clone(),
input,
});
}
anthropic_messages.push(AnthropicMessage {
role: "assistant".to_string(),
content: AnthropicContent::Blocks(blocks),
});
} else {
// Regular assistant message
anthropic_messages.push(AnthropicMessage {
role: "assistant".to_string(),
content: AnthropicContent::Text(msg.content.clone()),
});
}
}
Role::Tool => {
// Tool result - needs to be sent as a user message with tool_result content
let tool_use_id = msg.tool_call_id.clone().unwrap_or_default();
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Blocks(vec![
AnthropicContentBlock::ToolResult {
tool_use_id,
content: msg.content.clone(),
},
]),
});
}
}
}
anthropic_messages
}
/// Extract system prompt from messages
fn extract_system_prompt(messages: &[Message]) -> String {
messages
.iter()
.filter(|m| matches!(m.role, Role::System))
.map(|m| m.content.as_str())
.collect::<Vec<_>>()
.join("\n\n")
}
pub async fn chat_stream(
&self,
app: &AppHandle,
model: &str,
messages: &[Message],
tools: &[ToolDefinition],
cancel_rx: &mut Receiver<bool>,
) -> Result<CompletionResponse, String> {
// Convert messages and tools
let anthropic_messages = Self::convert_messages(messages);
let anthropic_tools = Self::convert_tools(tools);
let system_prompt = Self::extract_system_prompt(messages);
// Build request
let mut request_body = json!({
"model": model,
"max_tokens": 4096,
"messages": anthropic_messages,
"stream": true,
});
if !system_prompt.is_empty() {
request_body["system"] = json!(system_prompt);
}
if !anthropic_tools.is_empty() {
request_body["tools"] = json!(anthropic_tools);
}
// Build headers
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
headers.insert(
"x-api-key",
HeaderValue::from_str(&self.api_key).map_err(|e| e.to_string())?,
);
headers.insert(
"anthropic-version",
HeaderValue::from_static(ANTHROPIC_VERSION),
);
// Make streaming request
let response = self
.client
.post(ANTHROPIC_API_URL)
.headers(headers)
.json(&request_body)
.send()
.await
.map_err(|e| format!("Failed to send request to Anthropic: {}", e))?;
if !response.status().is_success() {
let status = response.status();
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(format!("Anthropic API error {}: {}", status, error_text));
}
// Process streaming response
let mut stream = response.bytes_stream();
let mut accumulated_text = String::new();
let mut tool_calls: Vec<ToolCall> = Vec::new();
let mut current_tool_use: Option<(String, String, String)> = None; // (id, name, input_json)
loop {
let chunk = tokio::select! {
result = stream.next() => {
match result {
Some(c) => c,
None => break,
}
}
_ = cancel_rx.changed() => {
if *cancel_rx.borrow() {
return Err("Chat cancelled by user".to_string());
}
continue;
}
};
let bytes = chunk.map_err(|e| format!("Stream error: {}", e))?;
let text = String::from_utf8_lossy(&bytes);
// Parse SSE events
for line in text.lines() {
if let Some(json_str) = line.strip_prefix("data: ") {
// Remove "data: " prefix
if json_str == "[DONE]" {
break;
}
let event: StreamEvent = match serde_json::from_str(json_str) {
Ok(e) => e,
Err(_) => continue,
};
match event.event_type.as_str() {
"content_block_start" => {
// Check if this is a tool use block
if let Some(content_block) = event.data.get("content_block") {
if content_block.get("type") == Some(&json!("tool_use")) {
let id = content_block["id"].as_str().unwrap_or("").to_string();
let name =
content_block["name"].as_str().unwrap_or("").to_string();
current_tool_use = Some((id, name, String::new()));
}
}
}
"content_block_delta" => {
if let Some(delta) = event.data.get("delta") {
// Text delta
if delta.get("type") == Some(&json!("text_delta")) {
if let Some(text) = delta.get("text").and_then(|t| t.as_str()) {
accumulated_text.push_str(text);
// Emit token to frontend
let _ = app.emit("chat:token", text);
}
}
// Tool input delta
else if delta.get("type") == Some(&json!("input_json_delta")) {
if let Some((_, _, input_json)) = &mut current_tool_use {
if let Some(partial) =
delta.get("partial_json").and_then(|p| p.as_str())
{
input_json.push_str(partial);
}
}
}
}
}
"content_block_stop" => {
// Finalize tool use if we have one
if let Some((id, name, input_json)) = current_tool_use.take() {
tool_calls.push(ToolCall {
id: Some(id),
kind: "function".to_string(),
function: FunctionCall {
name,
arguments: input_json,
},
});
}
}
_ => {}
}
}
}
}
Ok(CompletionResponse {
content: if accumulated_text.is_empty() {
None
} else {
Some(accumulated_text)
},
tool_calls: if tool_calls.is_empty() {
None
} else {
Some(tool_calls)
},
})
}
}

View File

@@ -1,3 +1,4 @@
pub mod anthropic;
pub mod ollama;
pub mod prompts;
pub mod types;