Backend: Add Anthropic/Claude provider integration
- Add anthropic.rs module with streaming support - Convert between internal and Anthropic tool/message formats - Add keyring dependency for secure API key storage - Add API key management commands (get_exists, set) - Auto-detect provider from model name (claude-* prefix) - Support SSE streaming from Anthropic API - Handle tool calling with Anthropic's format - Add cancellation support for Anthropic streams
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
use crate::commands::{fs, search, shell};
|
||||
use crate::llm::anthropic::AnthropicProvider;
|
||||
use crate::llm::ollama::OllamaProvider;
|
||||
use crate::llm::prompts::SYSTEM_PROMPT;
|
||||
use crate::llm::types::{Message, Role, ToolCall, ToolDefinition, ToolFunctionDefinition};
|
||||
@@ -23,6 +24,35 @@ pub async fn get_ollama_models(base_url: Option<String>) -> Result<Vec<String>,
|
||||
OllamaProvider::get_models(&url).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_anthropic_api_key_exists() -> Result<bool, String> {
|
||||
match keyring::Entry::new("living-spec-anthropic-api-key", "default") {
|
||||
Ok(entry) => Ok(entry.get_password().is_ok()),
|
||||
Err(e) => Err(format!("Failed to access keychain: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn set_anthropic_api_key(api_key: String) -> Result<(), String> {
|
||||
let entry = keyring::Entry::new("living-spec-anthropic-api-key", "default")
|
||||
.map_err(|e| format!("Failed to create keychain entry: {}", e))?;
|
||||
|
||||
entry
|
||||
.set_password(&api_key)
|
||||
.map_err(|e| format!("Failed to store API key: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_anthropic_api_key() -> Result<String, String> {
|
||||
let entry = keyring::Entry::new("living-spec-anthropic-api-key", "default")
|
||||
.map_err(|e| format!("Failed to access keychain: {}", e))?;
|
||||
|
||||
entry
|
||||
.get_password()
|
||||
.map_err(|_| "Anthropic API key not found. Please set your API key.".to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn chat(
|
||||
app: AppHandle,
|
||||
@@ -46,12 +76,13 @@ pub async fn chat(
|
||||
.clone()
|
||||
.unwrap_or_else(|| "http://localhost:11434".to_string());
|
||||
|
||||
if config.provider.as_str() != "ollama" {
|
||||
// Determine provider from model name
|
||||
let is_claude = config.model.starts_with("claude-");
|
||||
|
||||
if !is_claude && config.provider.as_str() != "ollama" {
|
||||
return Err(format!("Unsupported provider: {}", config.provider));
|
||||
}
|
||||
|
||||
let provider = OllamaProvider::new(base_url);
|
||||
|
||||
// 2. Define Tools
|
||||
let tool_defs = get_tool_definitions();
|
||||
let tools = if config.enable_tools.unwrap_or(true) {
|
||||
@@ -100,10 +131,22 @@ pub async fn chat(
|
||||
turn_count += 1;
|
||||
|
||||
// Call LLM with streaming
|
||||
let response = provider
|
||||
.chat_stream(&app, &config.model, ¤t_history, tools, &mut cancel_rx)
|
||||
.await
|
||||
.map_err(|e| format!("LLM Error: {}", e))?;
|
||||
let response = if is_claude {
|
||||
// Use Anthropic provider
|
||||
let api_key = get_anthropic_api_key()?;
|
||||
let anthropic_provider = AnthropicProvider::new(api_key);
|
||||
anthropic_provider
|
||||
.chat_stream(&app, &config.model, ¤t_history, tools, &mut cancel_rx)
|
||||
.await
|
||||
.map_err(|e| format!("Anthropic Error: {}", e))?
|
||||
} else {
|
||||
// Use Ollama provider
|
||||
let ollama_provider = OllamaProvider::new(base_url.clone());
|
||||
ollama_provider
|
||||
.chat_stream(&app, &config.model, ¤t_history, tools, &mut cancel_rx)
|
||||
.await
|
||||
.map_err(|e| format!("Ollama Error: {}", e))?
|
||||
};
|
||||
|
||||
// Process Response
|
||||
if let Some(tool_calls) = response.tool_calls {
|
||||
|
||||
Reference in New Issue
Block a user