Skip to content

Commit

Permalink
docs
Browse files Browse the repository at this point in the history
  • Loading branch information
felixfeng33 committed Oct 25, 2024
1 parent 3f8aa3a commit 8c82e9e
Show file tree
Hide file tree
Showing 11 changed files with 143 additions and 90 deletions.
1 change: 1 addition & 0 deletions templates/plate-playground-template/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
OPENAI_API_KEY=
29 changes: 13 additions & 16 deletions templates/plate-playground-template/README.md
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@
# Playground Template
## Environment Variables
Copy the example env file:

A comprehensive playground template for building rich-text editors with [Plate](https://platejs.org/) and Next.js 14.
```
cp ./.env.example ./.env.local
```

## Usage
- `OPENAI_API_KEY` – Your OpenAI API key (obtain one [here](https://platform.openai.com/account/api-keys))

```bash
pnpm install
pnpm dev
```

## Features
## Running the App

- Next.js 14 App Directory
- [Plate](https://platejs.org/) Editor
- [shadcn/ui](https://ui.shadcn.com/)
- Radix UI Primitives
- Tailwind CSS
- Icons from [Lucide](https://lucide.dev)
- Dark mode with `next-themes`
- Tailwind CSS class sorting, merging and linting.
To run the app locally, you can run the following commands:

```
pnpm i
pnpm dev
```
2 changes: 1 addition & 1 deletion templates/plate-playground-template/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"@radix-ui/react-context-menu": "^2.2.1",
"@radix-ui/react-icons": "^1.3.0",
"ai": "^3.4.10",
"@faker-js/faker": "^9.0.2",
"@ai-sdk/openai": "^0.0.67",
"@udecode/cn": "^39.0.0",
"@udecode/plate-ai": "^39.2.10",
"@udecode/plate-slash-command": "^39.0.0",
Expand Down
24 changes: 15 additions & 9 deletions templates/plate-playground-template/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { openai } from '@ai-sdk/openai';
import { convertToCoreMessages, streamText } from 'ai';

import { limitTotalCharacters } from '../utils/limitTotalCharacters';
import { truncateSystemPrompt } from '../utils/truncateSystemPrompt';

import type { NextRequest } from 'next/server';

export async function POST(req: NextRequest) {
const { messages, system } = await req.json();
const limitedMessages = limitTotalCharacters(messages, 8000);

const result = await streamText({
maxTokens: 2048,
messages: convertToCoreMessages(limitedMessages),
model: openai('gpt-4o-mini'),
system: system ? truncateSystemPrompt(system, 12_000) : undefined,
});

return result.toDataStreamResponse();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { openai } from '@ai-sdk/openai';
import { generateText } from 'ai';

import type { NextRequest } from 'next/server';

export async function POST(req: NextRequest) {
const { prompt, system } = await req.json();

try {
const result = await generateText({
abortSignal: req.signal,
maxTokens: 50,
model: openai('gpt-4o-mini'),
prompt: prompt,
system,
temperature: 0.7,
});

return new Response(JSON.stringify(result), {
headers: { 'Content-Type': 'application/json' },
});
} catch (error: any) {
if (error.name === 'AbortError') {
return new Response(null, { status: 408 });
}

return new Response(JSON.stringify({ error: error.message }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { Message } from 'ai';

export function limitTotalCharacters(
messages: Message[],
maxTotalChars: number
) {
let totalChars = 0;
const limitedMessages: Message[] = [];

for (let i = messages.length - 1; i >= 0; i--) {
const msgChars = messages[i].content.length;

if (totalChars + msgChars > maxTotalChars) break;

totalChars += msgChars;
limitedMessages.unshift(messages[i]);
}

return limitedMessages;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
export function truncateSystemPrompt(systemPrompt: string, maxChars: number) {
if (systemPrompt.length <= maxChars) return systemPrompt;

// Find the position of <Block> and <Selection> tags
const blockStart = systemPrompt.indexOf('<Block>');
const selectionStart = systemPrompt.indexOf('<Selection>');

if (blockStart === -1 || selectionStart === -1) {
// If tags are not found, simple truncation
return systemPrompt.slice(0, maxChars - 3) + '...';
}

// Preserve the structure and truncate content within tags if necessary
const prefix = systemPrompt.slice(0, blockStart);
const blockContent = systemPrompt.slice(blockStart, selectionStart);
const selectionContent = systemPrompt.slice(selectionStart);

const availableChars = maxChars - prefix.length - 6; // 6 for '...' in both block and selection
const halfAvailable = availableChars / 2;

const truncatedBlock =
blockContent.length > halfAvailable
? blockContent.slice(0, halfAvailable - 3) + '...'
: blockContent;

const truncatedSelection =
selectionContent.length > availableChars - truncatedBlock.length
? selectionContent.slice(0, availableChars - truncatedBlock.length - 3) +
'...'
: selectionContent;

return prefix + truncatedBlock + truncatedSelection;
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import * as React from 'react';
import { faker } from '@faker-js/faker';
import { AIChatPlugin, useEditorChat } from '@udecode/plate-ai/react';
import {
getAncestorNode,
Expand Down Expand Up @@ -40,19 +39,9 @@ export function AIMenu() {
const chat = useChat({
id: 'editor',
// API to be implemented
api: '/api/ai',
// Mock the API response. Remove it when you implement the route /api/ai
fetch: async () => {
await new Promise((resolve) => setTimeout(resolve, 400));

const stream = fakeStreamText();

return new Response(stream, {
headers: {
Connection: 'keep-alive',
'Content-Type': 'text/plain',
},
});
api: '/api/ai/command',
onError: (error) => {
throw error;
},
});

Expand Down Expand Up @@ -176,42 +165,3 @@ export function AIMenu() {
</Popover>
);
}

// Used for testing. Remove it after implementing useChat api.
const fakeStreamText = ({
chunkCount = 10,
streamProtocol = 'data',
}: {
chunkCount?: number;
streamProtocol?: 'data' | 'text';
} = {}) => {
const chunks = Array.from({ length: chunkCount }, () => ({
delay: faker.number.int({ max: 150, min: 50 }),
texts: faker.lorem.words({ max: 3, min: 1 }) + ' ',
}));
const encoder = new TextEncoder();

return new ReadableStream({
async start(controller) {
for (const chunk of chunks) {
await new Promise((resolve) => setTimeout(resolve, chunk.delay));

if (streamProtocol === 'text') {
controller.enqueue(encoder.encode(chunk.texts));
} else {
controller.enqueue(
encoder.encode(`0:${JSON.stringify(chunk.texts)}\n`)
);
}
}

if (streamProtocol === 'data') {
controller.enqueue(
`d:{"finishReason":"stop","usage":{"promptTokens":0,"completionTokens":${chunks.length}}}\n`
);
}

controller.close();
},
});
};
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ export function InsertDropdownMenu(props: DropdownMenuProps) {
{nestedItems.map(
({ icon: Icon, label: itemLabel, value: type }) => (
<DropdownMenuItem
key={itemLabel}
className="min-w-[180px]"
onSelect={async () => {
switch (type) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { faker } from '@faker-js/faker';
import { CopilotPlugin } from '@udecode/plate-ai/react';
import { getAncestorNode } from '@udecode/plate-common';
import { serializeMdNodes, stripMarkdown } from '@udecode/plate-markdown';
Expand Down Expand Up @@ -26,16 +25,6 @@ export const copilotPlugins = [
- CRITICAL: Avoid starting a new block. Do not use block formatting like >, #, 1., 2., -, etc. The suggestion should continue in the same block as the context.
- If no context is provided or you can't generate a continuation, return "0" without explanation.`,
},
// Mock the API response. Remove it when you implement the route /api/ai/copilot
fetch: async () => {
const text = await new Promise<string>((resolve) =>
setTimeout(() => resolve(faker.lorem.sentence()), 100)
);

return new Response(JSON.stringify({ text }), {
headers: { 'Content-Type': 'application/json' },
});
},
onFinish: (_, completion) => {
if (completion === '0') return;

Expand All @@ -44,6 +33,9 @@ export const copilotPlugins = [
text: stripMarkdown(completion),
});
},
onError: (error) => {
throw error;
},
},
debounceDelay: 500,
getPrompt: ({ editor }) => {
Expand Down

0 comments on commit 8c82e9e

Please sign in to comment.