import { invoke } from "@tauri-apps/api/core"; import { listen } from "@tauri-apps/api/event"; import { ask } from "@tauri-apps/plugin-dialog"; import { useEffect, useRef, useState } from "react"; import Markdown from "react-markdown"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; import { oneDark } from "react-syntax-highlighter/dist/esm/styles/prism"; import type { Message, ProviderConfig } from "../types"; interface ChatProps { projectPath: string; onCloseProject: () => void; } export function Chat({ projectPath, onCloseProject }: ChatProps) { const [messages, setMessages] = useState([]); const [input, setInput] = useState(""); const [loading, setLoading] = useState(false); const [model, setModel] = useState("llama3.1"); // Default local model const [enableTools, setEnableTools] = useState(true); const [availableModels, setAvailableModels] = useState([]); const [claudeModels] = useState([ "claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022", ]); const [streamingContent, setStreamingContent] = useState(""); const [showApiKeyDialog, setShowApiKeyDialog] = useState(false); const [apiKeyInput, setApiKeyInput] = useState(""); const messagesEndRef = useRef(null); const inputRef = useRef(null); const scrollContainerRef = useRef(null); const shouldAutoScrollRef = useRef(true); const lastScrollTopRef = useRef(0); const userScrolledUpRef = useRef(false); // Token estimation and context window tracking const estimateTokens = (text: string): number => { return Math.ceil(text.length / 4); }; const getContextWindowSize = (modelName: string): number => { if (modelName.startsWith("claude-")) return 200000; if (modelName.includes("llama3")) return 8192; if (modelName.includes("qwen2.5")) return 32768; if (modelName.includes("deepseek")) return 16384; return 8192; // Default }; const calculateContextUsage = (): { used: number; total: number; percentage: number; } => { let totalTokens = 0; // System prompts (approximate) totalTokens += 200; // All messages for (const msg of messages) { totalTokens += estimateTokens(msg.content); if (msg.tool_calls) { totalTokens += estimateTokens(JSON.stringify(msg.tool_calls)); } } // Streaming content if (streamingContent) { totalTokens += estimateTokens(streamingContent); } const contextWindow = getContextWindowSize(model); const percentage = Math.round((totalTokens / contextWindow) * 100); return { used: totalTokens, total: contextWindow, percentage, }; }; const contextUsage = calculateContextUsage(); const getContextEmoji = (percentage: number): string => { if (percentage >= 90) return "🔴"; if (percentage >= 75) return "🟡"; return "🟢"; }; useEffect(() => { invoke("get_ollama_models") .then(async (models) => { if (models.length > 0) { // Sort models alphabetically (case-insensitive) const sortedModels = models.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()), ); setAvailableModels(sortedModels); // Check backend store for saved model try { const savedModel = await invoke( "get_model_preference", ); if (savedModel) { setModel(savedModel); } else if (models.length > 0) { setModel(models[0]); } } catch (e) { console.error(e); } } }) .catch((err) => console.error(err)); // eslint-disable-next-line react-hooks/exhaustive-deps }, []); useEffect(() => { const unlistenUpdatePromise = listen("chat:update", (event) => { setMessages(event.payload); setStreamingContent(""); // Clear streaming content when final update arrives }); const unlistenTokenPromise = listen("chat:token", (event) => { setStreamingContent((prev) => prev + event.payload); }); return () => { unlistenUpdatePromise.then((unlisten) => unlisten()); unlistenTokenPromise.then((unlisten) => unlisten()); }; }, []); const scrollToBottom = () => { const element = scrollContainerRef.current; if (element) { element.scrollTop = element.scrollHeight; lastScrollTopRef.current = element.scrollHeight; } }; const handleScroll = () => { const element = scrollContainerRef.current; if (!element) return; const currentScrollTop = element.scrollTop; const isAtBottom = element.scrollHeight - element.scrollTop - element.clientHeight < 5; // Detect if user scrolled UP if (currentScrollTop < lastScrollTopRef.current) { userScrolledUpRef.current = true; shouldAutoScrollRef.current = false; } // If user scrolled back to bottom, re-enable auto-scroll if (isAtBottom) { userScrolledUpRef.current = false; shouldAutoScrollRef.current = true; } lastScrollTopRef.current = currentScrollTop; }; // Smart auto-scroll: only scroll if user hasn't scrolled up // biome-ignore lint/correctness/useExhaustiveDependencies: We intentionally trigger on messages/streamingContent changes useEffect(() => { if (shouldAutoScrollRef.current && !userScrolledUpRef.current) { scrollToBottom(); } }, [messages, streamingContent]); useEffect(() => { inputRef.current?.focus(); }, []); const cancelGeneration = async () => { try { await invoke("cancel_chat"); // Preserve any partial streaming content as a message if (streamingContent) { setMessages((prev) => [ ...prev, { role: "assistant", content: streamingContent }, ]); setStreamingContent(""); } setLoading(false); } catch (e) { console.error("Failed to cancel chat:", e); } }; const sendMessage = async () => { if (!input.trim() || loading) return; // Check if using Claude and API key is required if (model.startsWith("claude-")) { const hasKey = await invoke("get_anthropic_api_key_exists"); if (!hasKey) { setShowApiKeyDialog(true); return; } } const userMsg: Message = { role: "user", content: input }; const newHistory = [...messages, userMsg]; setMessages(newHistory); setInput(""); setLoading(true); setStreamingContent(""); // Clear any previous streaming content try { const config: ProviderConfig = { provider: model.startsWith("claude-") ? "anthropic" : "ollama", model: model, base_url: "http://localhost:11434", enable_tools: enableTools, }; // Invoke backend chat command // We rely on 'chat:update' events to update the state in real-time await invoke("chat", { messages: newHistory, config: config, }); } catch (e) { console.error(e); // Don't show error message if user cancelled const errorMessage = String(e); if (!errorMessage.includes("Chat cancelled by user")) { setMessages((prev) => [ ...prev, { role: "assistant", content: `**Error:** ${e}` }, ]); } } finally { setLoading(false); } }; const handleSaveApiKey = async () => { if (!apiKeyInput.trim()) return; try { await invoke("set_anthropic_api_key", { apiKey: apiKeyInput }); setShowApiKeyDialog(false); setApiKeyInput(""); // Retry sending the message sendMessage(); } catch (e) { console.error("Failed to save API key:", e); alert(`Failed to save API key: ${e}`); } }; const clearSession = async () => { const confirmed = await ask( "Are you sure? This will clear all messages and reset the conversation context.", { title: "New Session", kind: "warning", }, ); if (confirmed) { // Cancel any in-flight backend requests first try { await invoke("cancel_chat"); } catch (e) { console.error("Failed to cancel chat:", e); } // Then clear frontend state setMessages([]); setStreamingContent(""); setLoading(false); } }; return (
{/* Sticky Header */}
{/* Project Info */}
{projectPath}
{/* Model Controls */}
{/* Context Usage Indicator */}
{getContextEmoji(contextUsage.percentage)} {contextUsage.percentage} %
{availableModels.length > 0 || claudeModels.length > 0 ? ( ) : ( { const newModel = e.target.value; setModel(newModel); invoke("set_model_preference", { model: newModel }).catch( console.error, ); }} placeholder="Model" style={{ padding: "6px 12px", borderRadius: "99px", border: "none", fontSize: "0.9em", background: "#2f2f2f", color: "#ececec", outline: "none", }} /> )}
{/* Messages Area */}
{messages.map((msg, idx) => (
{msg.role === "user" ? ( msg.content ) : msg.role === "tool" ? (
â–¶ Tool Output {msg.tool_call_id && ` (${msg.tool_call_id})`}
											{msg.content}
										
) : (
{ const match = /language-(\w+)/.exec(className || ""); const isInline = !className; return !isInline && match ? ( {String(children).replace(/\n$/, "")} ) : ( {children} ); }, }} > {msg.content}
)} {/* Show Tool Calls if present */} {msg.tool_calls && (
{msg.tool_calls.map((tc, i) => { // Parse arguments to extract key info let argsSummary = ""; try { const args = JSON.parse(tc.function.arguments); const firstKey = Object.keys(args)[0]; if (firstKey && args[firstKey]) { argsSummary = String(args[firstKey]); // Truncate if too long if (argsSummary.length > 50) { argsSummary = `${argsSummary.substring(0, 47)}...`; } } } catch (_e) { // If parsing fails, just show empty } return (
â–¶ {tc.function.name} {argsSummary && `(${argsSummary})`}
); })}
)}
))} {loading && streamingContent && (
{ const match = /language-(\w+)/.exec(className || ""); const isInline = !className; return !isInline && match ? ( {String(children).replace(/\n$/, "")} ) : ( {children} ); }, }} > {streamingContent}
)} {loading && !streamingContent && (
Thinking...
)}
{/* Input Area */}
setInput(e.target.value)} onKeyDown={(e) => e.key === "Enter" && sendMessage()} placeholder="Send a message..." style={{ flex: 1, padding: "14px 20px", borderRadius: "24px", border: "1px solid #333", outline: "none", fontSize: "1rem", fontWeight: "500", background: "#2f2f2f", color: "#ececec", boxShadow: "0 2px 6px rgba(0,0,0,0.02)", }} />
{/* API Key Dialog */} {showApiKeyDialog && (

Enter Anthropic API Key

To use Claude models, please enter your Anthropic API key. Your key will be stored securely in your system keychain.

setApiKeyInput(e.target.value)} onKeyDown={(e) => e.key === "Enter" && handleSaveApiKey()} placeholder="sk-ant-..." style={{ width: "100%", padding: "12px", borderRadius: "8px", border: "1px solid #555", backgroundColor: "#1a1a1a", color: "#ececec", fontSize: "1em", marginBottom: "20px", outline: "none", }} />
)}
); }