story-kit: merge 199_story_web_ui_submits_all_queued_items_at_once
This commit is contained in:
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: "Web UI submits all queued items at once"
|
||||
---
|
||||
|
||||
# Story 199: Web UI submits all queued items at once
|
||||
|
||||
## User Story
|
||||
|
||||
As a user, I want the web UI to submit all queued items at once when there are multiple waiting items, so that I don't have to submit them one at a time.
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] When the agent finishes responding and there are multiple queued messages, all queued messages are submitted at once (not one at a time)
|
||||
- [ ] All queued messages are appended to the conversation history as sequential user turns before sending
|
||||
- [ ] After draining the queue, no queued message indicators remain visible
|
||||
- [ ] Queue draining also works when the agent finishes with an error
|
||||
|
||||
## Out of Scope
|
||||
|
||||
- TBD
|
||||
@@ -851,7 +851,7 @@ describe("Chat message queue (Story 155)", () => {
|
||||
expect(indicators[1]).toHaveTextContent("Queue 2");
|
||||
});
|
||||
|
||||
it("queued messages are delivered in order (Bug 168)", async () => {
|
||||
it("all queued messages are drained at once when agent responds (Story 199)", async () => {
|
||||
render(<Chat projectPath="/tmp/project" onCloseProject={vi.fn()} />);
|
||||
|
||||
await waitFor(() => expect(capturedWsHandlers).not.toBeNull());
|
||||
@@ -866,7 +866,7 @@ describe("Chat message queue (Story 155)", () => {
|
||||
fireEvent.keyDown(input, { key: "Enter", shiftKey: false });
|
||||
});
|
||||
|
||||
// Queue two messages
|
||||
// Queue two messages while loading
|
||||
await act(async () => {
|
||||
fireEvent.change(input, { target: { value: "Second" } });
|
||||
});
|
||||
@@ -886,7 +886,7 @@ describe("Chat message queue (Story 155)", () => {
|
||||
expect(indicators[0]).toHaveTextContent("Second");
|
||||
expect(indicators[1]).toHaveTextContent("Third");
|
||||
|
||||
// Simulate first response completing — "Second" is sent next
|
||||
// Simulate first response completing — both "Second" and "Third" are drained at once
|
||||
act(() => {
|
||||
capturedWsHandlers?.onUpdate([
|
||||
{ role: "user", content: "First" },
|
||||
@@ -894,11 +894,10 @@ describe("Chat message queue (Story 155)", () => {
|
||||
]);
|
||||
});
|
||||
|
||||
// "Third" should remain queued; "Second" was consumed
|
||||
// Both queued indicators should be gone — entire queue drained in one shot
|
||||
await waitFor(() => {
|
||||
const remaining = screen.queryAllByTestId("queued-message-indicator");
|
||||
expect(remaining).toHaveLength(1);
|
||||
expect(remaining[0]).toHaveTextContent("Third");
|
||||
expect(remaining).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -192,8 +192,10 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
// Ref so stale WebSocket callbacks can read the current queued messages
|
||||
const queuedMessagesRef = useRef<{ id: string; text: string }[]>([]);
|
||||
const queueIdCounterRef = useRef(0);
|
||||
// Trigger state: set to a message string to fire auto-send after loading ends
|
||||
const [pendingAutoSend, setPendingAutoSend] = useState<string | null>(null);
|
||||
// Trigger state: set to a batch of message strings to fire auto-send after loading ends
|
||||
const [pendingAutoSendBatch, setPendingAutoSendBatch] = useState<
|
||||
string[] | null
|
||||
>(null);
|
||||
|
||||
const wsRef = useRef<ChatWebSocket | null>(null);
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
@@ -294,10 +296,11 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
if (last?.role === "assistant" && !last.tool_calls) {
|
||||
setLoading(false);
|
||||
setActivityStatus(null);
|
||||
const nextQueued = queuedMessagesRef.current.shift();
|
||||
if (nextQueued !== undefined) {
|
||||
setQueuedMessages([...queuedMessagesRef.current]);
|
||||
setPendingAutoSend(nextQueued.text);
|
||||
if (queuedMessagesRef.current.length > 0) {
|
||||
const batch = queuedMessagesRef.current.map((item) => item.text);
|
||||
queuedMessagesRef.current = [];
|
||||
setQueuedMessages([]);
|
||||
setPendingAutoSendBatch(batch);
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -308,10 +311,11 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
console.error("WebSocket error:", message);
|
||||
setLoading(false);
|
||||
setActivityStatus(null);
|
||||
const nextQueued = queuedMessagesRef.current.shift();
|
||||
if (nextQueued !== undefined) {
|
||||
setQueuedMessages([...queuedMessagesRef.current]);
|
||||
setPendingAutoSend(nextQueued.text);
|
||||
if (queuedMessagesRef.current.length > 0) {
|
||||
const batch = queuedMessagesRef.current.map((item) => item.text);
|
||||
queuedMessagesRef.current = [];
|
||||
setQueuedMessages([]);
|
||||
setPendingAutoSendBatch(batch);
|
||||
}
|
||||
},
|
||||
onPipelineState: (state) => {
|
||||
@@ -395,14 +399,14 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
}
|
||||
}, [autoScrollKey, scrollToBottom]);
|
||||
|
||||
// Auto-send queued message when loading ends
|
||||
// Auto-send all queued messages as a batch when loading ends
|
||||
useEffect(() => {
|
||||
if (pendingAutoSend) {
|
||||
const msg = pendingAutoSend;
|
||||
setPendingAutoSend(null);
|
||||
sendMessage(msg);
|
||||
if (pendingAutoSendBatch && pendingAutoSendBatch.length > 0) {
|
||||
const batch = pendingAutoSendBatch;
|
||||
setPendingAutoSendBatch(null);
|
||||
sendMessageBatch(batch);
|
||||
}
|
||||
}, [pendingAutoSend]);
|
||||
}, [pendingAutoSendBatch]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleResize = () =>
|
||||
@@ -498,6 +502,51 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
}
|
||||
};
|
||||
|
||||
const sendMessageBatch = async (messageTexts: string[]) => {
|
||||
if (messageTexts.length === 0) return;
|
||||
|
||||
const userMsgs: Message[] = messageTexts.map((text) => ({
|
||||
role: "user",
|
||||
content: text,
|
||||
}));
|
||||
const newHistory = [...messages, ...userMsgs];
|
||||
|
||||
setMessages(newHistory);
|
||||
setLoading(true);
|
||||
setStreamingContent("");
|
||||
setStreamingThinking("");
|
||||
setActivityStatus(null);
|
||||
|
||||
try {
|
||||
const isClaudeCode = model === "claude-code-pty";
|
||||
const provider = isClaudeCode
|
||||
? "claude-code"
|
||||
: model.startsWith("claude-")
|
||||
? "anthropic"
|
||||
: "ollama";
|
||||
const config: ProviderConfig = {
|
||||
provider,
|
||||
model,
|
||||
base_url: "http://localhost:11434",
|
||||
enable_tools: enableTools,
|
||||
...(isClaudeCode && claudeSessionId
|
||||
? { session_id: claudeSessionId }
|
||||
: {}),
|
||||
};
|
||||
wsRef.current?.sendChat(newHistory, config);
|
||||
} catch (e) {
|
||||
console.error("Chat error:", e);
|
||||
const errorMessage = String(e);
|
||||
if (!errorMessage.includes("Chat cancelled by user")) {
|
||||
setMessages((prev: Message[]) => [
|
||||
...prev,
|
||||
{ role: "assistant", content: `**Error:** ${e}` },
|
||||
]);
|
||||
}
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveApiKey = async () => {
|
||||
if (!apiKeyInput.trim()) return;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user