Story 18: Token-by-token streaming responses
- Backend: Added OllamaProvider::chat_stream() with newline-delimited JSON parsing - Backend: Emit chat:token events for each token received from Ollama - Backend: Added futures dependency and stream feature for reqwest - Frontend: Added streamingContent state and chat:token event listener - Frontend: Real-time token display with auto-scroll - Frontend: Markdown and syntax highlighting support for streaming content - Fixed all TypeScript errors (tsc --noEmit) - Fixed all Biome warnings and errors - Fixed all Clippy warnings - Added comprehensive code quality documentation - Added tsc --noEmit to verification checklist Tested and verified: - Tokens stream in real-time - Auto-scroll works during streaming - Tool calls interrupt streaming correctly - Multi-turn conversations work - Smooth performance with no lag
This commit is contained in:
@@ -25,10 +25,9 @@ serde_json = "1"
|
||||
tauri-plugin-dialog = "2.4.2"
|
||||
ignore = "0.4.25"
|
||||
walkdir = "2.5.0"
|
||||
reqwest = { version = "0.12.28", features = ["json", "blocking"] }
|
||||
reqwest = { version = "0.12.28", features = ["json", "blocking", "stream"] }
|
||||
futures = "0.3"
|
||||
uuid = { version = "1.19.0", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4.42", features = ["serde"] }
|
||||
async-trait = "0.1.89"
|
||||
tauri-plugin-store = "2.4.1"
|
||||
tokio = { version = "1.48.0", features = ["sync"] }
|
||||
|
||||
|
||||
Reference in New Issue
Block a user