Skip to content

Commit f6211f4

Browse files
authored
Merge pull request #3 from golivecosmos/conversation-summary-memory
Implements conversation summary memory
2 parents c8ec99f + 1c12a88 commit f6211f4

File tree

3 files changed

+17
-15
lines changed

3 files changed

+17
-15
lines changed

server/handlers/chat_handler.js

+14-13
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,27 @@
1-
import { AgentExecutor, ChatAgent } from 'langchain/agents';
2-
import { ConversationChain, LLMChain } from 'langchain/chains';
1+
import { ConversationChain } from 'langchain/chains';
32
import { ChatOpenAI } from 'langchain/chat_models/openai';
43
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
5-
import { BufferMemory } from 'langchain/memory';
6-
import { HumanChatMessage, SystemChatMessage } from 'langchain/schema';
7-
import { SerpAPI } from 'langchain/tools';
4+
import { ConversationSummaryMemory } from 'langchain/memory';
85

96
class ChatService {
10-
static async startChat(data) {
11-
const chat = new ChatOpenAI({ openAIApiKey: process.env.OPENAI_API_KEY, temperature: 0, verbose: true });
12-
const { body: { userInput } } = data;
13-
14-
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
7+
constructor () {
8+
this.chat = new ChatOpenAI({ temperature: 0, verbose: true });
9+
this.chatPrompt = ChatPromptTemplate.fromPromptMessages([
1510
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
1611
new MessagesPlaceholder('history'),
1712
HumanMessagePromptTemplate.fromTemplate('{input}'),
1813
]);
1914

15+
this.memory = new ConversationSummaryMemory({ llm: this.chat, returnMessages: true });
16+
}
17+
18+
async startChat(data) {
19+
const { body: { userInput } } = data;
20+
2021
const chain = new ConversationChain({
21-
memory: new BufferMemory({ returnMessages: true }),
22-
prompt: chatPrompt,
23-
llm: chat,
22+
memory: this.memory,
23+
prompt: this.chatPrompt,
24+
llm: this.chat,
2425
});
2526

2627
const response = await chain.call({

server/index.js

-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import 'dotenv/config'
2-
import * as fs from 'fs';
32
import Koa from 'koa';
43
import cors from '@koa/cors';
54
import { koaBody } from 'koa-body';

server/routers/chat.js

+3-1
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,14 @@ const router = new Router({
66
prefix: '/chat',
77
});
88

9+
const chatService = new ChatService();
10+
911
router.post('/', async (ctx) => {
1012
const handlerData = {};
1113
handlerData.body = ctx.request.body;
1214
handlerData.user = ctx.state.user;
1315

14-
const res = await ChatService.startChat(handlerData);
16+
const res = await chatService.startChat(handlerData);
1517
ctx.body = res;
1618
});
1719

0 commit comments

Comments
 (0)