import * as React from "react"; import Markdown from "react-markdown"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; import { oneDark } from "react-syntax-highlighter/dist/esm/styles/prism"; import type { PipelineState } from "../api/client"; import { api, ChatWebSocket } from "../api/client"; import { useChatHistory } from "../hooks/useChatHistory"; import type { Message, ProviderConfig } from "../types"; import { AgentPanel } from "./AgentPanel"; import { ChatHeader } from "./ChatHeader"; import type { ChatInputHandle } from "./ChatInput"; import { ChatInput } from "./ChatInput"; import { HelpOverlay } from "./HelpOverlay"; import { LozengeFlyProvider } from "./LozengeFlyContext"; import { MessageItem } from "./MessageItem"; import { SideQuestionOverlay } from "./SideQuestionOverlay"; import { StagePanel } from "./StagePanel"; import { WorkItemDetailPanel } from "./WorkItemDetailPanel"; const { useCallback, useEffect, useMemo, useRef, useState } = React; /** Fixed-height thinking trace block that auto-scrolls to bottom as text arrives. */ function ThinkingBlock({ text }: { text: string }) { const scrollRef = useRef(null); useEffect(() => { const el = scrollRef.current; if (el) { el.scrollTop = el.scrollHeight; } }, [text]); return (
thinking {text}
); } /** Streaming message renderer — stable component to avoid recreation on each render. */ function StreamingMessage({ content }: { content: string }) { return ( { const match = /language-(\w+)/.exec(className || ""); const isInline = !className; return !isInline && match ? ( {String(children).replace(/\n$/, "")} ) : ( {children} ); }, }} > {content} ); } const NARROW_BREAKPOINT = 900; function formatToolActivity(toolName: string): string { switch (toolName) { // Built-in provider tool names case "read_file": case "Read": return "Reading file..."; case "write_file": case "Write": case "Edit": return "Writing file..."; case "list_directory": case "Glob": return "Listing files..."; case "search_files": case "Grep": return "Searching files..."; case "exec_shell": case "Bash": return "Executing command..."; // Claude Code additional tool names case "Task": return "Running task..."; case "WebFetch": return "Fetching web content..."; case "WebSearch": return "Searching the web..."; case "NotebookEdit": return "Editing notebook..."; case "TodoWrite": return "Updating tasks..."; default: return `Using ${toolName}...`; } } const estimateTokens = (text: string): number => Math.ceil(text.length / 4); const getContextWindowSize = (modelName: string): number => { if (modelName.startsWith("claude-")) return 200000; if (modelName.includes("llama3")) return 8192; if (modelName.includes("qwen2.5")) return 32768; if (modelName.includes("deepseek")) return 16384; return 8192; }; interface ChatProps { projectPath: string; onCloseProject: () => void; } export function Chat({ projectPath, onCloseProject }: ChatProps) { const { messages, setMessages, clearMessages } = useChatHistory(projectPath); const [loading, setLoading] = useState(false); const [model, setModel] = useState("claude-code-pty"); const [enableTools, setEnableTools] = useState(true); const [availableModels, setAvailableModels] = useState([]); const [claudeModels, setClaudeModels] = useState([]); const [streamingContent, setStreamingContent] = useState(""); const [streamingThinking, setStreamingThinking] = useState(""); const [showApiKeyDialog, setShowApiKeyDialog] = useState(false); const [apiKeyInput, setApiKeyInput] = useState(""); const [hasAnthropicKey, setHasAnthropicKey] = useState(false); const [pipeline, setPipeline] = useState({ upcoming: [], current: [], qa: [], merge: [], done: [], }); const [claudeSessionId, setClaudeSessionId] = useState(null); const [activityStatus, setActivityStatus] = useState(null); const [permissionQueue, setPermissionQueue] = useState< { requestId: string; toolName: string; toolInput: Record; }[] >([]); const [isNarrowScreen, setIsNarrowScreen] = useState( window.innerWidth < NARROW_BREAKPOINT, ); const [reconciliationActive, setReconciliationActive] = useState(false); const [reconciliationEvents, setReconciliationEvents] = useState< { id: string; storyId: string; status: string; message: string }[] >([]); const reconciliationEventIdRef = useRef(0); const [agentConfigVersion, setAgentConfigVersion] = useState(0); const [agentStateVersion, setAgentStateVersion] = useState(0); const [pipelineVersion, setPipelineVersion] = useState(0); const [needsOnboarding, setNeedsOnboarding] = useState(false); const onboardingTriggeredRef = useRef(false); const [selectedWorkItemId, setSelectedWorkItemId] = useState( null, ); const [queuedMessages, setQueuedMessages] = useState< { id: string; text: string }[] >([]); const [sideQuestion, setSideQuestion] = useState<{ question: string; response: string; loading: boolean; } | null>(null); const [showHelp, setShowHelp] = useState(false); // Ref so stale WebSocket callbacks can read the current queued messages const queuedMessagesRef = useRef<{ id: string; text: string }[]>([]); const queueIdCounterRef = useRef(0); // Trigger state: set to a batch of message strings to fire auto-send after loading ends const [pendingAutoSendBatch, setPendingAutoSendBatch] = useState< string[] | null >(null); const wsRef = useRef(null); const chatInputRef = useRef(null); const messagesEndRef = useRef(null); const scrollContainerRef = useRef(null); const shouldAutoScrollRef = useRef(true); const lastScrollTopRef = useRef(0); const userScrolledUpRef = useRef(false); const pendingMessageRef = useRef(""); const contextUsage = useMemo(() => { let totalTokens = 0; totalTokens += 200; for (const msg of messages) { totalTokens += estimateTokens(msg.content); if (msg.tool_calls) { totalTokens += estimateTokens(JSON.stringify(msg.tool_calls)); } } if (streamingContent) { totalTokens += estimateTokens(streamingContent); } const contextWindow = getContextWindowSize(model); const percentage = Math.round((totalTokens / contextWindow) * 100); return { used: totalTokens, total: contextWindow, percentage, }; }, [messages, streamingContent, model]); useEffect(() => { api .getOllamaModels() .then(async (models) => { if (models.length > 0) { const sortedModels = models.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()), ); setAvailableModels(sortedModels); try { const savedModel = await api.getModelPreference(); if (savedModel) { setModel(savedModel); } } catch (e) { console.error(e); } } }) .catch((err) => console.error(err)); api .getAnthropicApiKeyExists() .then((exists) => { setHasAnthropicKey(exists); if (!exists) return; return api.getAnthropicModels().then((models) => { if (models.length > 0) { const sortedModels = models.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()), ); setClaudeModels(sortedModels); } else { setClaudeModels([]); } }); }) .catch((err) => { console.error(err); setHasAnthropicKey(false); setClaudeModels([]); }); }, []); useEffect(() => { const ws = new ChatWebSocket(); wsRef.current = ws; ws.connect({ onToken: (content) => { setStreamingContent((prev: string) => prev + content); }, onThinkingToken: (content) => { setStreamingThinking((prev: string) => prev + content); }, onUpdate: (history) => { setMessages(history); setStreamingContent(""); setStreamingThinking(""); const last = history[history.length - 1]; if (last?.role === "assistant" && !last.tool_calls) { setLoading(false); setActivityStatus(null); if (queuedMessagesRef.current.length > 0) { const batch = queuedMessagesRef.current.map((item) => item.text); queuedMessagesRef.current = []; setQueuedMessages([]); setPendingAutoSendBatch(batch); } } }, onSessionId: (sessionId) => { setClaudeSessionId(sessionId); }, onError: (message) => { console.error("WebSocket error:", message); setLoading(false); setActivityStatus(null); if (queuedMessagesRef.current.length > 0) { const batch = queuedMessagesRef.current.map((item) => item.text); queuedMessagesRef.current = []; setQueuedMessages([]); setPendingAutoSendBatch(batch); } }, onPipelineState: (state) => { setPipeline(state); setPipelineVersion((v) => v + 1); }, onPermissionRequest: (requestId, toolName, toolInput) => { setPermissionQueue((prev) => [ ...prev, { requestId, toolName, toolInput }, ]); }, onActivity: (toolName) => { setActivityStatus(formatToolActivity(toolName)); }, onReconciliationProgress: (storyId, status, message) => { if (status === "done") { setReconciliationActive(false); } else { setReconciliationActive(true); setReconciliationEvents((prev) => { const id = String(reconciliationEventIdRef.current++); const next = [...prev, { id, storyId, status, message }]; // Keep only the last 8 events to avoid the banner growing too tall. return next.slice(-8); }); } }, onAgentConfigChanged: () => { setAgentConfigVersion((v) => v + 1); }, onAgentStateChanged: () => { setAgentStateVersion((v) => v + 1); }, onOnboardingStatus: (onboarding: boolean) => { setNeedsOnboarding(onboarding); }, onSideQuestionToken: (content) => { setSideQuestion((prev) => prev ? { ...prev, response: prev.response + content } : prev, ); }, onSideQuestionDone: (response) => { setSideQuestion((prev) => prev ? { ...prev, response, loading: false } : prev, ); }, }); return () => { ws.close(); wsRef.current = null; }; }, []); const scrollToBottom = useCallback(() => { const element = scrollContainerRef.current; if (element) { element.scrollTop = element.scrollHeight; lastScrollTopRef.current = element.scrollHeight; } }, []); const handleScroll = () => { const element = scrollContainerRef.current; if (!element) return; const currentScrollTop = element.scrollTop; const isAtBottom = element.scrollHeight - element.scrollTop - element.clientHeight < 5; if (currentScrollTop < lastScrollTopRef.current) { userScrolledUpRef.current = true; shouldAutoScrollRef.current = false; } if (isAtBottom) { userScrolledUpRef.current = false; shouldAutoScrollRef.current = true; } lastScrollTopRef.current = currentScrollTop; }; const autoScrollKey = messages.length + streamingContent.length + streamingThinking.length; useEffect(() => { if ( autoScrollKey >= 0 && shouldAutoScrollRef.current && !userScrolledUpRef.current ) { scrollToBottom(); } }, [autoScrollKey, scrollToBottom]); // Auto-send all queued messages as a batch when loading ends useEffect(() => { if (pendingAutoSendBatch && pendingAutoSendBatch.length > 0) { const batch = pendingAutoSendBatch; setPendingAutoSendBatch(null); sendMessageBatch(batch); } }, [pendingAutoSendBatch]); useEffect(() => { const handleResize = () => setIsNarrowScreen(window.innerWidth < NARROW_BREAKPOINT); window.addEventListener("resize", handleResize); return () => window.removeEventListener("resize", handleResize); }, []); const cancelGeneration = async () => { // Preserve queued messages by appending them to the chat input box if (queuedMessagesRef.current.length > 0) { const queued = queuedMessagesRef.current .map((item) => item.text) .join("\n"); chatInputRef.current?.appendToInput(queued); } queuedMessagesRef.current = []; setQueuedMessages([]); try { wsRef.current?.cancel(); await api.cancelChat(); if (streamingContent) { setMessages((prev: Message[]) => [ ...prev, { role: "assistant", content: streamingContent }, ]); setStreamingContent(""); } setStreamingThinking(""); setLoading(false); setActivityStatus(null); } catch (e) { console.error("Failed to cancel chat:", e); } }; const sendMessage = async (messageText: string) => { if (!messageText.trim()) return; // /help — show available slash commands overlay if (/^\/help\s*$/i.test(messageText)) { setShowHelp(true); return; } // /btw — answered from context without disrupting main chat const btwMatch = messageText.match(/^\/btw\s+(.+)/s); if (btwMatch) { const question = btwMatch[1].trim(); setSideQuestion({ question, response: "", loading: true }); const isClaudeCode = model === "claude-code-pty"; const provider = isClaudeCode ? "claude-code" : model.startsWith("claude-") ? "anthropic" : "ollama"; const config: ProviderConfig = { provider, model, base_url: "http://localhost:11434", enable_tools: false, }; wsRef.current?.sendSideQuestion(question, messages, config); return; } // Agent is busy — queue the message instead of dropping it if (loading) { const newItem = { id: String(queueIdCounterRef.current++), text: messageText, }; queuedMessagesRef.current = [...queuedMessagesRef.current, newItem]; setQueuedMessages([...queuedMessagesRef.current]); return; } const isClaudeCode = model === "claude-code-pty"; if (!isClaudeCode && model.startsWith("claude-")) { const hasKey = await api.getAnthropicApiKeyExists(); if (!hasKey) { pendingMessageRef.current = messageText; setShowApiKeyDialog(true); return; } } const userMsg: Message = { role: "user", content: messageText }; const newHistory = [...messages, userMsg]; setMessages(newHistory); setLoading(true); setStreamingContent(""); setStreamingThinking(""); setActivityStatus(null); try { const provider = isClaudeCode ? "claude-code" : model.startsWith("claude-") ? "anthropic" : "ollama"; const config: ProviderConfig = { provider, model, base_url: "http://localhost:11434", enable_tools: enableTools, ...(isClaudeCode && claudeSessionId ? { session_id: claudeSessionId } : {}), }; wsRef.current?.sendChat(newHistory, config); } catch (e) { console.error("Chat error:", e); const errorMessage = String(e); if (!errorMessage.includes("Chat cancelled by user")) { setMessages((prev: Message[]) => [ ...prev, { role: "assistant", content: `**Error:** ${e}` }, ]); } setLoading(false); } }; const sendMessageBatch = async (messageTexts: string[]) => { if (messageTexts.length === 0) return; const userMsgs: Message[] = messageTexts.map((text) => ({ role: "user", content: text, })); const newHistory = [...messages, ...userMsgs]; setMessages(newHistory); setLoading(true); setStreamingContent(""); setStreamingThinking(""); setActivityStatus(null); try { const isClaudeCode = model === "claude-code-pty"; const provider = isClaudeCode ? "claude-code" : model.startsWith("claude-") ? "anthropic" : "ollama"; const config: ProviderConfig = { provider, model, base_url: "http://localhost:11434", enable_tools: enableTools, ...(isClaudeCode && claudeSessionId ? { session_id: claudeSessionId } : {}), }; wsRef.current?.sendChat(newHistory, config); } catch (e) { console.error("Chat error:", e); const errorMessage = String(e); if (!errorMessage.includes("Chat cancelled by user")) { setMessages((prev: Message[]) => [ ...prev, { role: "assistant", content: `**Error:** ${e}` }, ]); } setLoading(false); } }; const handleSaveApiKey = async () => { if (!apiKeyInput.trim()) return; try { await api.setAnthropicApiKey(apiKeyInput); setShowApiKeyDialog(false); setApiKeyInput(""); const pendingMessage = pendingMessageRef.current; pendingMessageRef.current = ""; if (pendingMessage.trim()) { sendMessage(pendingMessage); } } catch (e) { console.error("Failed to save API key:", e); alert(`Failed to save API key: ${e}`); } }; const handlePermissionResponse = (approved: boolean, alwaysAllow = false) => { const current = permissionQueue[0]; if (!current) return; wsRef.current?.sendPermissionResponse( current.requestId, approved, alwaysAllow, ); setPermissionQueue((prev) => prev.slice(1)); }; const clearSession = async () => { const confirmed = window.confirm( "Are you sure? This will clear all messages and reset the conversation context.", ); if (confirmed) { try { await api.cancelChat(); wsRef.current?.cancel(); } catch (e) { console.error("Failed to cancel chat:", e); } clearMessages(); setStreamingContent(""); setStreamingThinking(""); setLoading(false); setActivityStatus(null); setClaudeSessionId(null); } }; const handleRemoveQueuedMessage = useCallback((id: string) => { queuedMessagesRef.current = queuedMessagesRef.current.filter( (item) => item.id !== id, ); setQueuedMessages([...queuedMessagesRef.current]); }, []); return (
{ setModel(newModel); api.setModelPreference(newModel).catch(console.error); }} enableTools={enableTools} onToggleTools={setEnableTools} /> {/* Two-column content area */}
{/* Left column: chat messages + input pinned at bottom */}
{/* Scrollable messages area */}
{needsOnboarding && messages.length === 0 && !loading && (

Welcome to Story Kit

This project needs to be set up before you can start writing stories. The agent will guide you through configuring your project goals and tech stack.

)} {messages.map((msg: Message, idx: number) => ( ))} {loading && streamingThinking && ( )} {loading && streamingContent && (
)} {loading && (activityStatus != null || (!streamingContent && !streamingThinking)) && (
{activityStatus ?? "Thinking..."}
)}
{/* Startup reconciliation progress banner */} {reconciliationActive && (
Reconciling startup state...
{reconciliationEvents.map((evt) => (
{evt.storyId ? `[${evt.storyId}] ` : ""} {evt.message}
))}
)} {/* Chat input pinned at bottom of left column */}
{/* Right column: panels independently scrollable */}
{selectedWorkItemId ? ( setSelectedWorkItemId(null)} /> ) : ( <> setSelectedWorkItemId(item.story_id)} /> setSelectedWorkItemId(item.story_id)} /> setSelectedWorkItemId(item.story_id)} /> setSelectedWorkItemId(item.story_id)} /> setSelectedWorkItemId(item.story_id)} /> )}
{showApiKeyDialog && (

Enter Anthropic API Key

To use Claude models, please enter your Anthropic API key. Your key will be stored server-side and reused across sessions.

setApiKeyInput(e.target.value)} onKeyDown={(e) => e.key === "Enter" && handleSaveApiKey()} placeholder="sk-ant-..." style={{ width: "100%", padding: "12px", borderRadius: "8px", border: "1px solid #555", backgroundColor: "#1a1a1a", color: "#ececec", fontSize: "1em", marginBottom: "20px", outline: "none", }} />
)} {permissionQueue.length > 0 && (

Permission Request {permissionQueue.length > 1 && ( (+{permissionQueue.length - 1} queued) )}

The agent wants to use the{" "} {permissionQueue[0].toolName} {" "} tool. Do you approve?

{Object.keys(permissionQueue[0].toolInput).length > 0 && (
								{JSON.stringify(permissionQueue[0].toolInput, null, 2)}
							
)}
)} {showHelp && setShowHelp(false)} />} {sideQuestion && ( setSideQuestion(null)} /> )}
); }