Skip to content

Commit

Permalink
fix(chat): optimize chat messages processing logic, remove extra requ…
Browse files Browse the repository at this point in the history
…ests (Issue #2891) (#2917)
  • Loading branch information
Gimir authored Jan 10, 2025
1 parent 519eb6b commit 0d7b47a
Show file tree
Hide file tree
Showing 9 changed files with 46 additions and 40 deletions.
2 changes: 1 addition & 1 deletion apps/chat-e2e/src/assertions/api/apiAssertion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export class ApiAssertion {
expectedModel: DialAIEntityModel,
) {
expect
.soft(request.modelId, ExpectedMessages.chatRequestModelIsValid)
.soft(request.model?.id, ExpectedMessages.chatRequestModelIsValid)
.toBe(expectedModel.id);
}

Expand Down
8 changes: 5 additions & 3 deletions apps/chat-e2e/src/testData/api/chatApiHelper.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Conversation } from '@/chat/types/chat';
import { API } from '@/src/testData';
import { BaseApiHelper } from '@/src/testData/api/baseApiHelper';
import { BucketUtil } from '@/src/utils';
import { BucketUtil, ModelsUtil } from '@/src/utils';

export class ChatApiHelper extends BaseApiHelper {
public buildRequestData(conversation: Conversation) {
Expand All @@ -25,7 +25,7 @@ export class ChatApiHelper extends BaseApiHelper {
const commonData = {
id: `conversations/${BucketUtil.getBucket()}/` + conversation.id,
messages: [userMessage],
modelId: conversation.model.id,
model: ModelsUtil.getOpenAIEntity(conversation.model.id),
prompt: conversation.prompt,
temperature: conversation.temperature,
selectedAddons: conversation.selectedAddons,
Expand All @@ -34,7 +34,9 @@ export class ChatApiHelper extends BaseApiHelper {
return conversation.assistantModelId
? {
...commonData,
assistantModelId: conversation.assistantModelId,
assistantModel: ModelsUtil.getOpenAIEntity(
conversation.assistantModelId,
),
}
: commonData;
}
Expand Down
2 changes: 1 addition & 1 deletion apps/chat-e2e/src/tests/chatHeader.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ dialTest(
const requestsData = await chat.sendRequestWithKeyboard(request, false);

expect
.soft(requestsData.modelId, ExpectedMessages.requestModeIdIsValid)
.soft(requestsData.model.id, ExpectedMessages.requestModeIdIsValid)
.toBe(conversation.model.id);
expect
.soft(requestsData.prompt, ExpectedMessages.requestPromptIsValid)
Expand Down
8 changes: 4 additions & 4 deletions apps/chat-e2e/src/tests/compareMode.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ dialTest(

expect
.soft(
requestsData.rightRequest.modelId,
requestsData.rightRequest.model.id,
ExpectedMessages.requestModeIdIsValid,
)
.toBe(defaultModel.id);
Expand All @@ -570,7 +570,7 @@ dialTest(

expect
.soft(
requestsData.leftRequest.modelId,
requestsData.leftRequest.model.id,
ExpectedMessages.requestModeIdIsValid,
)
.toBe(aModel.id);
Expand Down Expand Up @@ -1460,13 +1460,13 @@ dialTest(
);
expect
.soft(
requestsData.rightRequest.modelId,
requestsData.rightRequest.model.id,
ExpectedMessages.requestModeIdIsValid,
)
.toBe(firstFolderConversation.conversations[0].model.id);
expect
.soft(
requestsData.leftRequest.modelId,
requestsData.leftRequest.model.id,
ExpectedMessages.requestModeIdIsValid,
)
.toBe(secondFolderConversation.conversations[0].model.id);
Expand Down
22 changes: 17 additions & 5 deletions apps/chat-e2e/src/tests/replay.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,10 @@ dialTest(
'Verify chat API request is sent with correct settings',
async () => {
expect
.soft(replayRequest.modelId, ExpectedMessages.chatRequestModelIsValid)
.soft(
replayRequest.model?.id,
ExpectedMessages.chatRequestModelIsValid,
)
.toBe(replayModel.id);
expect
.soft(replayRequest.prompt, ExpectedMessages.chatRequestPromptIsValid)
Expand Down Expand Up @@ -413,7 +416,10 @@ dialTest(
conversation.messages[0].content,
);
expect
.soft(replayRequest.modelId, ExpectedMessages.chatRequestModelIsValid)
.soft(
replayRequest.model?.id,
ExpectedMessages.chatRequestModelIsValid,
)
.toBe(conversation.model.id);
expect
.soft(replayRequest.prompt, ExpectedMessages.chatRequestPromptIsValid)
Expand Down Expand Up @@ -651,7 +657,10 @@ dialTest(
true,
);
expect
.soft(replayRequest.modelId, ExpectedMessages.chatRequestModelIsValid)
.soft(
replayRequest.model.id,
ExpectedMessages.chatRequestModelIsValid,
)
.toBe(conversation.model.id);
},
);
Expand All @@ -674,7 +683,7 @@ dialTest(
const newMessage = '2+3';
const newRequest = await chat.sendRequestWithButton(newMessage);
expect
.soft(newRequest.modelId, ExpectedMessages.chatRequestModelIsValid)
.soft(newRequest.model.id, ExpectedMessages.chatRequestModelIsValid)
.toBe(conversation.model.id);
expect
.soft(
Expand Down Expand Up @@ -850,7 +859,10 @@ dialTest(
const modelId =
i === 1 ? ImportedModelIds.CHAT_BISON : ImportedModelIds.GPT_4;
expect
.soft(requests[i].modelId, ExpectedMessages.chatRequestModelIsValid)
.soft(
requests[i].model.id,
ExpectedMessages.chatRequestModelIsValid,
)
.toBe(modelId);
}
},
Expand Down
12 changes: 7 additions & 5 deletions apps/chat/src/components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -310,14 +310,16 @@ export const ChatView = memo(() => {
useLayoutEffect(() => {
if (selectedConversations.length > 0) {
const mergedMessages: MergedMessages[] = [];
const firstConversationMessages = excludeSystemMessages(
selectedConversations[0].messages,
const userMessages = selectedConversations.map((conv) =>
excludeSystemMessages(conv.messages),
);
for (let i = 0; i < firstConversationMessages.length; i++) {
const messagesLength = userMessages[0].length;

for (let i = 0; i < messagesLength; i++) {
mergedMessages.push(
selectedConversations.map((conv) => [
selectedConversations.map((conv, convIndex) => [
conv,
excludeSystemMessages(conv.messages)[i] || {
userMessages[convIndex][i] || {
role: Role.Assistant,
content: '',
},
Expand Down
22 changes: 5 additions & 17 deletions apps/chat/src/pages/api/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import {
getUserMessageCustomContent,
limitMessagesByTokens,
} from '@/src/utils/server/chat';
import { getSortedEntities } from '@/src/utils/server/get-sorted-entities';

import { ChatBody } from '@/src/types/chat';
import { EntityType } from '@/src/types/common';
Expand All @@ -30,33 +29,22 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => {
}

const {
modelId,
id,
messages,
prompt,
temperature,
selectedAddons,
assistantModelId,
model,
assistantModel,
} = req.body as ChatBody;

try {
const token = await getToken({ req });
const models = await getSortedEntities(token);
const model = models.find(
({ id, reference }) => id === modelId || reference === modelId,
);
const assistantModel = assistantModelId
? models.find(
({ id, reference }) =>
id === assistantModelId || reference === assistantModelId,
)
: undefined;

if (
!id ||
!model ||
(!!assistantModelId && !assistantModel) ||
(!!assistantModelId && model.type !== EntityType.Assistant) ||
(!!assistantModel && model.type !== EntityType.Assistant) ||
(!prompt && !messages?.length)
) {
return res.status(400).send(errorsMessages[400]);
Expand Down Expand Up @@ -119,7 +107,7 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => {
temperature: temperatureToUse,
messages: messagesToSend,
selectedAddonsIds: selectedAddons?.length ? selectedAddons : undefined,
assistantModelId,
assistantModelId: assistantModel?.id,
userJWT: token?.access_token as string,
chatId: id,
jobTitle: token?.jobTitle as string,
Expand Down Expand Up @@ -158,7 +146,7 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => {
return chatErrorHandler({
error,
res,
msg: `Error while sending chat request to '${modelId}'`,
msg: `Error while sending chat request to '${model?.id}'`,
});
}
};
Expand Down
4 changes: 2 additions & 2 deletions apps/chat/src/store/conversations/conversations.epics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1333,14 +1333,14 @@ const streamMessageEpic: AppEpic = (action$, state$) =>
}
if (conversationModelType === EntityType.Assistant && assistantModelId) {
modelAdditionalSettings = {
assistantModelId,
assistantModel: modelsMap[assistantModelId],
temperature: payload.conversation.temperature,
selectedAddons,
};
}

const chatBody: ChatBody = {
modelId: payload.conversation.model.id,
model: modelsMap[payload.conversation.model.id],
messages: payload.conversation.messages
.filter(
(message, index) =>
Expand Down
6 changes: 4 additions & 2 deletions apps/chat/src/types/chat.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { DialAIEntityModel } from '@/src/types/models';

import { ConversationInfo, Message, ShareEntity } from '@epam/ai-dial-shared';

export enum CopyTableType {
Expand All @@ -7,13 +9,13 @@ export enum CopyTableType {
}

export interface ChatBody {
modelId: string;
messages: Message[];
id: string;
prompt?: string;
temperature?: number;
selectedAddons?: string[];
assistantModelId?: string;
model?: DialAIEntityModel;
assistantModel?: DialAIEntityModel;
}

export interface RateBody {
Expand Down

0 comments on commit 0d7b47a

Please sign in to comment.