Skip to content

Commit 9f5f22a

Browse files
preserve temp chat mid-stream and scope Stop to active thread (#2971)
1 parent cd803ed commit 9f5f22a

4 files changed

Lines changed: 614 additions & 465 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@red-hat-developer-hub/backstage-plugin-lightspeed': patch
3+
---
4+
5+
Fix new chat streams when switching threads and scope Stop to the streaming conversation (RHDHBUGS-3040).

workspaces/lightspeed/plugins/lightspeed/src/components/LightSpeedChat.tsx

Lines changed: 26 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -835,18 +835,27 @@ export const LightspeedChat = ({
835835
setNewChatCreated(false);
836836
};
837837

838-
const { conversationMessages, handleInputPrompt, scrollToBottomRef } =
839-
useConversationMessages(
840-
viewConversationId,
841-
userName,
842-
selectedModel,
843-
selectedProvider,
844-
avatar,
845-
onComplete,
846-
onStart,
847-
undefined,
848-
onRequestIdReady,
849-
);
838+
const {
839+
conversationMessages,
840+
handleInputPrompt,
841+
scrollToBottomRef,
842+
streamingConversationId,
843+
} = useConversationMessages(
844+
viewConversationId,
845+
userName,
846+
selectedModel,
847+
selectedProvider,
848+
avatar,
849+
onComplete,
850+
onStart,
851+
undefined,
852+
onRequestIdReady,
853+
);
854+
855+
const streamingUiMatchesView =
856+
isSendButtonDisabled &&
857+
streamingConversationId !== null &&
858+
viewConversationId === streamingConversationId;
850859

851860
const [messages, setMessages] =
852861
useState<MessageProps[]>(conversationMessages);
@@ -1463,7 +1472,7 @@ export const LightspeedChat = ({
14631472
ref={scrollToBottomRef}
14641473
welcomePrompts={welcomePrompts}
14651474
conversationId={conversationId}
1466-
isStreaming={isSendButtonDisabled}
1475+
isStreaming={streamingUiMatchesView}
14671476
topicRestrictionEnabled={topicRestrictionEnabled}
14681477
displayMode={displayMode}
14691478
/>
@@ -1487,8 +1496,10 @@ export const LightspeedChat = ({
14871496
hasMicrophoneButton
14881497
value={draftMessage}
14891498
onChange={handleDraftMessage}
1490-
hasStopButton={isSendButtonDisabled}
1491-
handleStopButton={isSendButtonDisabled ? handleStopButton : undefined}
1499+
hasStopButton={streamingUiMatchesView}
1500+
handleStopButton={
1501+
streamingUiMatchesView ? handleStopButton : undefined
1502+
}
14921503
buttonProps={{
14931504
attach: {
14941505
inputTestId: 'attachment-input',

workspaces/lightspeed/plugins/lightspeed/src/hooks/__tests__/useConversationMessages.test.tsx

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -840,4 +840,91 @@ data: {"event": "token", "data": {"id": 2, "token": ""}}\n
840840
).toBeUndefined();
841841
});
842842
});
843+
844+
it('preserves temp thread when switching conversations mid-stream and returning (RHDHBUGS-3040)', async () => {
845+
mockLightspeedApi.getConversationMessages.mockResolvedValue([]);
846+
const onComplete = jest.fn();
847+
848+
let resolveSecondRead!: (value: IteratorResult<Uint8Array | null>) => void;
849+
const secondReadPromise = new Promise<IteratorResult<Uint8Array | null>>(
850+
resolve => {
851+
resolveSecondRead = resolve;
852+
},
853+
);
854+
855+
const firstChunk = createSSEStream([
856+
{ event: 'start', data: { conversation_id: 'persisted-after-stream' } },
857+
{ event: 'token', data: { id: 0, token: 'Hello ', role: 'inference' } },
858+
]);
859+
const secondChunk = createSSEStream([
860+
{ event: 'token', data: { id: 1, token: 'world!', role: 'inference' } },
861+
]);
862+
863+
const read = jest
864+
.fn()
865+
.mockResolvedValueOnce({
866+
done: false,
867+
value: new TextEncoder().encode(firstChunk),
868+
})
869+
.mockImplementationOnce(() => secondReadPromise)
870+
.mockResolvedValueOnce({ done: true, value: null });
871+
872+
const lightSpeedApi = {
873+
...mockLightspeedApi,
874+
createMessage: jest.fn().mockResolvedValue({ read }),
875+
};
876+
(useApi as jest.Mock).mockReturnValue(lightSpeedApi);
877+
878+
const { result, rerender } = renderHook(
879+
({ conversationId }) =>
880+
useConversationMessages(
881+
conversationId,
882+
'test-user',
883+
'gpt-3',
884+
'openai',
885+
'user.png',
886+
onComplete,
887+
),
888+
{
889+
initialProps: { conversationId: TEMP_CONVERSATION_ID },
890+
wrapper,
891+
},
892+
);
893+
894+
await act(async () => {
895+
void result.current.handleInputPrompt('Hi');
896+
});
897+
898+
await waitFor(() => {
899+
const msgs = result.current.conversations[TEMP_CONVERSATION_ID];
900+
expect(msgs?.[1]?.content).toContain('Hello ');
901+
});
902+
903+
expect(result.current.streamingConversationId).toBe(TEMP_CONVERSATION_ID);
904+
905+
rerender({ conversationId: 'other-conv-id' });
906+
907+
expect(result.current.streamingConversationId).toBe(TEMP_CONVERSATION_ID);
908+
909+
rerender({ conversationId: TEMP_CONVERSATION_ID });
910+
911+
await waitFor(() => {
912+
const msgs = result.current.conversations[TEMP_CONVERSATION_ID] ?? [];
913+
expect(msgs.length).toBeGreaterThanOrEqual(2);
914+
expect(String(msgs[1]?.content ?? '')).toContain('Hello ');
915+
});
916+
917+
await act(async () => {
918+
resolveSecondRead({
919+
done: false,
920+
value: new TextEncoder().encode(secondChunk),
921+
});
922+
});
923+
924+
await waitFor(() => {
925+
expect(onComplete).toHaveBeenCalled();
926+
});
927+
928+
expect(result.current.streamingConversationId).toBeNull();
929+
});
843930
});

0 commit comments

Comments
 (0)