Skip to content

Commit f9d1bc4

Browse files
feat(lightspeed): align with road-core API endpoints (#666)
* feat(lightspeed): align with road-core API endpoints * fix e2e tests * add inline error messages
1 parent 544c80a commit f9d1bc4

13 files changed

Lines changed: 366 additions & 248 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@red-hat-developer-hub/backstage-plugin-lightspeed': minor
3+
---
4+
5+
Align with road-core service API response

workspaces/lightspeed/packages/app/e2e-tests/fixtures/responses.ts

Lines changed: 45 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -39,21 +39,26 @@ export const defaultConversation = {
3939
export const conversations = [
4040
{
4141
conversation_id: 'user:development/guest+Av8Fax73D4XPx5Ls',
42-
summary: 'Conversation 1',
43-
lastMessageTimestamp: createdAt,
42+
topic_summary: 'Conversation 1',
43+
last_message_timestamp: createdAt,
44+
},
45+
{
46+
conversation_id: 'temp-conversation-id',
47+
topic_summary: 'Temporary conversation',
48+
last_message_timestamp: createdAt,
4449
},
4550
];
4651

4752
export const moreConversations = [
4853
{
4954
conversation_id: 'user:development/guest+Av8Fax73D4XPx5Ls',
50-
summary: 'Conversation 1',
51-
lastMessageTimestamp: createdAt,
55+
topic_summary: 'Conversation 1',
56+
last_message_timestamp: createdAt,
5257
},
5358
{
5459
conversation_id: 'user:development/guest+Av8Fax73D4XPx5La',
55-
summary: 'New Conversation',
56-
lastMessageTimestamp: createdAt,
60+
topic_summary: 'New Conversation',
61+
last_message_timestamp: createdAt,
5762
},
5863
];
5964

@@ -62,24 +67,20 @@ export const contents = [
6267
lc: 1,
6368
type: 'constructor',
6469
id: ['langchain_core', 'messages', 'HumanMessage'],
65-
kwargs: {
66-
content: 'New conversation',
67-
response_metadata: {
68-
created_at: createdAt,
69-
},
70-
additional_kwargs: {},
70+
content: 'New conversation',
71+
response_metadata: {
72+
created_at: createdAt,
7173
},
74+
additional_kwargs: {},
7275
},
7376
{
7477
lc: 1,
7578
type: 'constructor',
7679
id: ['langchain_core', 'messages', 'AIMessage'],
77-
kwargs: {
78-
content: 'Still a placeholder message',
79-
response_metadata: {
80-
created_at: createdAt,
81-
model: models[1].id,
82-
},
80+
content: 'Still a placeholder message',
81+
response_metadata: {
82+
created_at: createdAt,
83+
model: models[1].id,
8384
tool_calls: [],
8485
invalid_tool_calls: [],
8586
additional_kwargs: {},
@@ -90,12 +91,32 @@ export const contents = [
9091
export const botResponse = `This is a placeholder message`;
9192

9293
export const generateQueryResponse = (conversationId: string) => {
93-
let body = '';
94+
const tokens = botResponse.match(/(\s+|[^\s]+)/g) || [];
95+
96+
const events: {
97+
event: string;
98+
data?: Record<string, any>;
99+
done?: boolean;
100+
}[] = [];
101+
102+
events.push({
103+
event: 'start',
104+
data: { conversation_id: conversationId },
105+
});
106+
107+
tokens.forEach((token, index) => {
108+
events.push({
109+
event: 'token',
110+
data: { id: index, token },
111+
});
112+
});
94113

95-
for (const token of botResponse.split(' ')) {
96-
body += `{"conversation_id":"${conversationId}","response":{"lc":1,"type":"constructor","id":["langchain_core","messages","AIMessageChunk"],"kwargs":{"content":" ${token}","tool_call_chunks":[],"additional_kwargs":{},"id":"chatcmpl-890","tool_calls":[],"invalid_tool_calls":[],"response_metadata":{"prompt":0,"completion":0,"created_at":1736332476031,"model":"${models[1].id}"}}}}`;
97-
}
98-
body += `{"conversation_id":"${conversationId}","response":{"lc":1,"type":"constructor","id":["langchain_core","messages","AIMessageChunk"],"kwargs":{"content":"","tool_call_chunks":[],"additional_kwargs":{},"id":"chatcmpl-890","tool_calls":[],"invalid_tool_calls":[],"response_metadata":{"prompt":0,"completion":0,"finish_reason":"stop","system_fingerprint":"fp_ollama","created_at":1736332476031,"model":"${models[1].id}"}}}}`;
114+
events.push({
115+
event: 'end',
116+
done: true,
117+
});
99118

100-
return body;
119+
return `${events
120+
.map(({ event, data }) => `data: ${JSON.stringify({ event, data })}\n\n`)
121+
.join('')}\n`;
101122
};

workspaces/lightspeed/packages/app/e2e-tests/lightspeed.test.ts

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ import {
2727
} from './fixtures/responses';
2828
import { openLightspeed, sendMessage } from './utils/testHelper';
2929

30+
const botQuery = 'Please respond';
31+
3032
test.beforeEach(async ({ page }) => {
3133
await page.route(`${modelBaseUrl}/v1/models`, async route => {
3234
const json = { object: 'list', data: models };
@@ -37,20 +39,19 @@ test.beforeEach(async ({ page }) => {
3739
const json = [];
3840
await route.fulfill({ json });
3941
}
40-
if (route.request().method() === 'POST') {
41-
const json = defaultConversation;
42-
await route.fulfill({ json });
43-
}
4442
});
4543
await page.route(`${modelBaseUrl}/conversations/user*`, async route => {
4644
const json = [];
4745
await route.fulfill({ json });
4846
});
4947
await page.route(`${modelBaseUrl}/v1/query`, async route => {
5048
const payload = route.request().postDataJSON();
51-
conversations[0].conversation_id = payload.conversation_id;
52-
contents[0].kwargs.content = payload.query;
53-
const body = generateQueryResponse(payload.conversation_id);
49+
50+
const body = generateQueryResponse(
51+
payload.query === botQuery
52+
? (conversations[1].conversation_id = payload.conversation_id)
53+
: conversations[0].conversation_id,
54+
);
5455
await route.fulfill({ body });
5556
});
5657

@@ -82,27 +83,26 @@ test.describe('Conversation', () => {
8283
test.beforeEach(async ({ page }) => {
8384
await page.route(`${modelBaseUrl}/conversations`, async route => {
8485
if (route.request().method() === 'GET') {
85-
const json = conversations;
86+
const json = { conversations };
8687
await route.fulfill({ json });
8788
} else {
8889
await route.fulfill();
8990
}
9091
});
9192
await page.route(`${modelBaseUrl}/conversations/user*`, async route => {
92-
const json = contents;
93+
const json = { chat_history: contents };
9394
await route.fulfill({ json });
9495
});
9596
});
9697

9798
test('Bot responds', async ({ page }) => {
98-
const inputText = 'Please respond';
99-
await sendMessage(inputText, page);
99+
await sendMessage(botQuery, page);
100100

101101
const userMessage = page.locator('.pf-chatbot__message--user');
102102
const botMessage = page.locator('.pf-chatbot__message--bot');
103103

104104
await expect(userMessage).toBeVisible();
105-
await expect(userMessage).toContainText(inputText);
105+
await expect(userMessage).toContainText(botQuery);
106106
await expect(botMessage).toBeVisible();
107107
await expect(botMessage).toContainText(botResponse);
108108
});
@@ -123,7 +123,7 @@ test.describe('Conversation', () => {
123123
test('Filter and switch conversations', async ({ page }) => {
124124
await page.route(`${modelBaseUrl}/conversations`, async route => {
125125
if (route.request().method() === 'GET') {
126-
const json = moreConversations;
126+
const json = { conversations: moreConversations };
127127
await route.fulfill({ json });
128128
} else {
129129
await route.fulfill();
@@ -133,7 +133,7 @@ test.describe('Conversation', () => {
133133
const sidePanel = page.locator('.pf-v6-c-drawer__panel');
134134

135135
const currentChat = sidePanel.locator('li.pf-chatbot__menu-item--active');
136-
await expect(currentChat).toHaveText(moreConversations[0].summary);
136+
await expect(currentChat).toHaveText(moreConversations[0].topic_summary);
137137

138138
const chats = sidePanel.locator('li.pf-chatbot__menu-item');
139139
await expect(chats).toHaveCount(2);
@@ -143,14 +143,14 @@ test.describe('Conversation', () => {
143143
);
144144
await searchBox.fill('new');
145145
await expect(chats).toHaveCount(1);
146-
await expect(chats).toHaveText(moreConversations[1].summary);
146+
await expect(chats).toHaveText(moreConversations[1].topic_summary);
147147

148148
await chats.click();
149149

150150
const userMessage = page.locator('.pf-chatbot__message--user');
151151
const botMessage = page.locator('.pf-chatbot__message--bot');
152152

153-
await expect(userMessage).toContainText(contents[0].kwargs.content);
154-
await expect(botMessage).toContainText(contents[1].kwargs.content);
153+
await expect(userMessage).toContainText(contents[0].content);
154+
await expect(botMessage).toContainText(contents[1].content);
155155
});
156156
});

workspaces/lightspeed/plugins/lightspeed/src/api/LightspeedApiClient.ts

Lines changed: 43 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
import { ConfigApi, FetchApi } from '@backstage/core-plugin-api';
1818

19+
import { TEMP_CONVERSATION_ID } from '../const';
1920
import { LightspeedAPI } from './api';
2021

2122
export type Options = {
@@ -56,11 +57,15 @@ export class LightspeedApiClient implements LightspeedAPI {
5657
'Content-Type': 'application/json',
5758
},
5859
body: JSON.stringify({
59-
conversation_id,
60-
serverURL: this.getServerUrl(),
60+
conversation_id:
61+
conversation_id === TEMP_CONVERSATION_ID
62+
? undefined
63+
: conversation_id,
6164
model: selectedModel,
65+
provider: this.configApi
66+
.getConfigArray('lightspeed.servers')[0]
67+
.getOptionalString('id'), // Currently supports a single llm server
6268
query: prompt,
63-
historyLength: 10,
6469
}),
6570
});
6671

@@ -69,9 +74,19 @@ export class LightspeedApiClient implements LightspeedAPI {
6974
}
7075

7176
if (!response.ok) {
72-
throw new Error(
73-
`failed to fetch data, status ${response.status}: ${response.statusText}`,
74-
);
77+
const body = await response.body.getReader();
78+
const reader = body.read();
79+
const decoder = new TextDecoder('utf-8');
80+
const text = await reader.then(({ done, value }) => {
81+
if (done) {
82+
return '';
83+
}
84+
return decoder.decode(value);
85+
});
86+
const errorMessage = JSON.parse(text);
87+
if (errorMessage?.error) {
88+
throw new Error(`failed to create message: ${errorMessage.error}`);
89+
}
7590
}
7691
return response.body.getReader();
7792
}
@@ -93,45 +108,46 @@ export class LightspeedApiClient implements LightspeedAPI {
93108
async getAllModels() {
94109
const baseUrl = await this.getBaseUrl();
95110
const result = await this.fetcher(`${baseUrl}/v1/models`);
111+
112+
if (!result.ok) {
113+
throw new Error(
114+
`failed to get models, status ${result.status}: ${result.statusText}`,
115+
);
116+
}
117+
96118
const response = await result.json();
97119
return response?.data ? response.data : [];
98120
}
99121

100122
async getConversationMessages(conversation_id: string) {
123+
if (conversation_id === TEMP_CONVERSATION_ID) {
124+
return [];
125+
}
101126
const baseUrl = await this.getBaseUrl();
102127
const result = await this.fetcher(
103128
`${baseUrl}/conversations/${encodeURIComponent(conversation_id)}`,
104129
);
105-
return await result.json();
130+
if (!result.ok) {
131+
throw new Error(
132+
`failed to get conversation messages, status ${result.status}: ${result.statusText}`,
133+
);
134+
}
135+
const response = await result.json();
136+
return response.chat_history ?? [];
106137
}
107138

108139
async getConversations() {
109140
const baseUrl = await this.getBaseUrl();
110141
const result = await this.fetcher(`${baseUrl}/conversations`);
111-
return await result.json();
112-
}
113142

114-
async createConversation() {
115-
const baseUrl = await this.getBaseUrl();
116-
117-
const response = await this.fetchApi.fetch(`${baseUrl}/conversations`, {
118-
method: 'POST',
119-
headers: {
120-
'Content-Type': 'application/json',
121-
},
122-
body: JSON.stringify({}),
123-
});
124-
125-
if (!response.body) {
126-
throw new Error('Something went wrong.');
127-
}
128-
129-
if (!response.ok) {
143+
if (!result.ok) {
130144
throw new Error(
131-
`failed to create conversation, status ${response.status}: ${response.statusText}`,
145+
`failed to get conversation, status ${result.status}: ${result.statusText}`,
132146
);
133147
}
134-
return await response.json();
148+
149+
const response = await result.json();
150+
return response.conversations ?? [];
135151
}
136152

137153
async deleteConversation(conversation_id: string) {

workspaces/lightspeed/plugins/lightspeed/src/api/api.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ import { BaseMessage, ConversationList } from '../types';
2323
export type LightspeedAPI = {
2424
getAllModels: () => Promise<OpenAI.Models.Model[]>;
2525
getConversationMessages: (conversation_id: string) => Promise<BaseMessage[]>;
26-
createConversation: () => Promise<{ conversation_id: string }>;
2726
createMessage: (
2827
prompt: string,
2928
selectedModel: string,

0 commit comments

Comments
 (0)