Skip to content

Commit

Permalink
feat: initial implementation of summary feature (#3)
Browse files Browse the repository at this point in the history
  • Loading branch information
Yengas authored Jan 2, 2023
1 parent f44ae1b commit 14ac7d6
Show file tree
Hide file tree
Showing 16 changed files with 675 additions and 231 deletions.
107 changes: 102 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,9 @@ import { Configuration, OpenAIApi } from "openai";
import { AIPersona, ModelConfiguration, ConversationPromptService } from "@wisegpt/gpt-conversation-prompt";

const aiPersona: AIPersona = {
name: "wiseGPT",
instructions: `You are a software engineer.
When providing code examples, use triple backticks.`,
exampleConversations: [],
name: "WiseGPT",
instructions: `When providing code examples, use triple backticks.`,
personality: `You are a software engineer.`,
};

const modelConfiguration: ModelConfiguration = {
Expand All @@ -24,7 +23,7 @@ const openAIApi = new OpenAIApi(new Configuration({ apiKey: "<<your-secret>>" })
const conversationPromptService = new ConversationPromptService(openAIApi);

async function exampleUsage() {
const { text, usage } = await conversationPromptService.conversationCompletion({
const { text, usage } = await conversationPromptService.completion({
prompt: {
conversation: {
messages: [
Expand All @@ -49,4 +48,102 @@ async function exampleUsage() {

console.log(JSON.stringify({ text, usage }));
}
```

## Detailed Example Usage
Below is an example usage which includes summary and re-using the summarized conversation to keeping the conversation going and summarizing again.

```typescript
import { Configuration, OpenAIApi } from "openai";
import { AIPersona, ModelConfiguration, ConversationPromptService, Author, Conversation } from "@wisegpt/gpt-conversation-prompt";

const aiPersona: AIPersona = {
name: "WiseGPT",
instructions: `When providing code examples, use triple backticks.`,
personality: `You are a software engineer.`,
};

const modelConfiguration: ModelConfiguration = {
model: "text-davinci-003",
max_tokens: 1000,
};

const openAIApi = new OpenAIApi(new Configuration({ apiKey: "<<your-secret>>" }));
const conversationPromptService = new ConversationPromptService(openAIApi);

async function exampleUsage() {
const authors: Record<string, Author> = {
user1: { type: "USER", id: "EU01" },
user2: { type: "USER", id: "EU02" },
};

const conversation1: Conversation = {
messages: [
{
text: "My name is Yigitcan.",
author: authors.user1,
},
{
text: "My name is Tolga.",
author: authors.user2,
},
],
};

const botResponse1 = await conversationPromptService.completion({
prompt: { conversation: conversation1, aiPersona },
modelConfiguration,
});

console.log(JSON.stringify({ botResponse1 }));

// add bots response to the conversation
conversation1.messages.push({
text: botResponse1.text,
author: { type: "BOT" },
});

const summaryResponse1 = await conversationPromptService.summary({
prompt: { conversation: conversation1, aiPersona },
modelConfiguration,
});

console.log(JSON.stringify({ summaryResponse1 }));

// create a whole new conversation with the summary and a new message
const conversation2 = {
summary: summaryResponse1.summary,
messages: [
{
text: "what is my name? and what is the capital of Turkey?",
author: authors.user1,
},
],
};

const botResponse2 = await conversationPromptService.completion({
prompt: { conversation: conversation2, aiPersona },
modelConfiguration,
});

console.log(JSON.stringify({ botResponse2 }));

// add bots response to the conversation
conversation2.messages.push({
text: botResponse1.text,
author: { type: "BOT" },
});

const summaryResponse2 = await conversationPromptService.summary({
prompt: { conversation: conversation2, aiPersona },
modelConfiguration,
});

console.log(JSON.stringify({ summaryResponse2 }));
}

exampleUsage().catch((err) => {
console.error(err);
process.exit(1);
});
```
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export type ConversationCompleteInput = {
prompt: {
aiPersona: AIPersona;
conversation: Conversation;
exampleConversations?: Conversation[];
};
modelConfiguration: ModelConfiguration;
};
Expand All @@ -29,3 +30,21 @@ export type ConversationCompleteOutput = {
totalTokens: number;
};
};

export type ConversationSummaryInput = {
// prompt generation related details
prompt: {
aiPersona: AIPersona;
conversation: Conversation;
};
modelConfiguration: ModelConfiguration;
};

export type ConversationSummaryOutput = {
summary: string;
usage: {
promptTokens: number;
completionTokens: number;
totalTokens: number;
};
};
43 changes: 36 additions & 7 deletions src/conversation-prompt/conversation-prompt.service.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import { OpenAIApi } from "openai";
import {
STATEMENT_SEPARATOR_TOKEN,
createConversationCompletionPrompt,
} from "./create-conversation-completion-prompt";
import {
ConversationCompleteInput,
ConversationCompleteOutput,
} from "./prompt.dto";
ConversationSummaryInput,
ConversationSummaryOutput,
} from "./conversation-prompt-service.dto";
import { createConversationCompletionPrompt } from "./prompts/create-conversation-completion-prompt";
import { createConversationSummaryPrompt } from "./prompts/create-conversation-summary-prompt";
import { STATEMENT_SEPARATOR_TOKEN } from "./prompts/prompts.constants";

export class ConversationPromptService {
constructor(private readonly openAIApi: OpenAIApi) {}

async conversationCompletion(
async completion(
input: ConversationCompleteInput
): Promise<ConversationCompleteOutput> {
const prompt = createConversationCompletionPrompt(input.prompt);
Expand All @@ -26,7 +27,8 @@ export class ConversationPromptService {

const text = data.choices?.[0]
.text!.trim()
.replace(STATEMENT_SEPARATOR_TOKEN, "");
.replace(STATEMENT_SEPARATOR_TOKEN, "")
.trim();

const {
prompt_tokens: promptTokens,
Expand All @@ -36,4 +38,31 @@ export class ConversationPromptService {

return { text, usage: { promptTokens, completionTokens, totalTokens } };
}

async summary(
input: ConversationSummaryInput
): Promise<ConversationSummaryOutput> {
const prompt = createConversationSummaryPrompt(input.prompt);

const { data } = await this.openAIApi.createCompletion({
...input.modelConfiguration,
best_of: 1,
n: 1,
echo: false,
prompt,
});

const summary = data.choices?.[0]
.text!.trim()
.replace(STATEMENT_SEPARATOR_TOKEN, "")
.trim();

const {
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: totalTokens,
} = data.usage!;

return { summary, usage: { promptTokens, completionTokens, totalTokens } };
}
}
71 changes: 0 additions & 71 deletions src/conversation-prompt/create-conversation-completion-prompt.ts

This file was deleted.

2 changes: 1 addition & 1 deletion src/conversation-prompt/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
export * from "./prompt.dto";
export * from "./conversation-prompt-service.dto";
export * from "./mention";
export { ConversationPromptService } from "./conversation-prompt.service";
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { AIPersona, Conversation } from "../../types";
import { BOT_MENTION } from "../mention";
import { STATEMENT_SEPARATOR_TOKEN } from "./prompts.constants";
import { renderAIPersona } from "./render-ai-persona";
import { renderConversation } from "./render-conversation";
import { renderFormatAndExamples } from "./render-format-and-examples";

const CURRENT_CONVERSATION_PROMPT = `Continue the conversation, paying very close attention to things entities told you; such as their name, and personal details. Never say "${STATEMENT_SEPARATOR_TOKEN}". Current conversation:`;

export type CreateConversationCompletionPromptInput = {
aiPersona: AIPersona;
exampleConversations?: Conversation[];
conversation: Conversation;
};

export function createConversationCompletionPrompt({
aiPersona,
conversation,
exampleConversations,
}: CreateConversationCompletionPromptInput): string {
const hasSummary = !!conversation.summary;

return (
renderAIPersona(aiPersona) +
`\n${renderFormatAndExamples({
hasSummary,
exampleConversations,
})}` +
`\n\n${CURRENT_CONVERSATION_PROMPT}\n\n` +
(renderConversation(conversation) + `${BOT_MENTION}:`)
);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import { AIPersona, Conversation } from "../../types";
import { STATEMENT_SEPARATOR_TOKEN } from "./prompts.constants";
import { renderAIPersona } from "./render-ai-persona";
import { renderConversation } from "./render-conversation";
import { renderFormatAndExamples } from "./render-format-and-examples";

export type CreateConversationSummaryPromptInput = {
aiPersona: AIPersona;
conversation: Conversation;
};

const buildPrompt = ({
hasSummary,
hasMultipleEntities,
}: {
hasSummary: boolean;
hasMultipleEntities: boolean;
}): string => {
let prompt = "";

if (hasSummary) {
prompt +=
"Summarize the conversation below. Make a detailed summary which only consists of the previous summary and later messages. ";
} else {
prompt +=
"Summarize the conversation below. Make a detailed summary of the existing messages. ";
}

prompt += `Do not summarize the instructions or examples. Do not add anything extra or something that was not discussed. Do not repeat details. Pay close attention to the things that entities told you; especially their personal details and code details. `;

if (hasMultipleEntities) {
prompt += `You must reference entities in the conversation with the "<@id>" format in the summary to differentiate their personal details and messages. `;
}

prompt += `Omit small talk and conversation status. Never say "${STATEMENT_SEPARATOR_TOKEN}":`;

return prompt;
};

export const createConversationSummaryPrompt = ({
aiPersona,
conversation,
}: CreateConversationSummaryPromptInput) => {
const hasSummary = !!conversation.summary;
const participantCount = conversation.messages.reduce(
(set, { author }) => (author.type !== "BOT" ? set.add(author.id) : set),
new Set<string>()
).size;
const prompt = buildPrompt({
hasSummary,
hasMultipleEntities: participantCount > 1,
});

return (
renderAIPersona(aiPersona) +
`\n${renderFormatAndExamples({
hasSummary,
})}` +
`\n\n${prompt}\n\n` +
(renderConversation(conversation) + `...\nSummary:`)
);
};
1 change: 1 addition & 0 deletions src/conversation-prompt/prompts/prompts.constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export const STATEMENT_SEPARATOR_TOKEN = "<|endofstatement|>";
Loading

0 comments on commit 14ac7d6

Please sign in to comment.