diff --git a/templates/plate-playground-template/.env.example b/templates/plate-playground-template/.env.example new file mode 100644 index 0000000000..9847a1df18 --- /dev/null +++ b/templates/plate-playground-template/.env.example @@ -0,0 +1 @@ +OPENAI_API_KEY= \ No newline at end of file diff --git a/templates/plate-playground-template/README.md b/templates/plate-playground-template/README.md index e0597fbaea..bd6bcbe489 100644 --- a/templates/plate-playground-template/README.md +++ b/templates/plate-playground-template/README.md @@ -1,21 +1,18 @@ -# Playground Template +## Environment Variables +Copy the example env file: -A comprehensive playground template for building rich-text editors with [Plate](https://platejs.org/) and Next.js 14. +``` +cp ./.env.example ./.env.local +``` -## Usage +- `OPENAI_API_KEY` – Your OpenAI API key (obtain one [here](https://platform.openai.com/account/api-keys)) -```bash -pnpm install -pnpm dev -``` -## Features +## Running the App -- Next.js 14 App Directory -- [Plate](https://platejs.org/) Editor -- [shadcn/ui](https://ui.shadcn.com/) -- Radix UI Primitives -- Tailwind CSS -- Icons from [Lucide](https://lucide.dev) -- Dark mode with `next-themes` -- Tailwind CSS class sorting, merging and linting. +To run the app locally, you can run the following commands: + +``` +pnpm i +pnpm dev +``` diff --git a/templates/plate-playground-template/package.json b/templates/plate-playground-template/package.json index 4546a171cd..14efd4234d 100644 --- a/templates/plate-playground-template/package.json +++ b/templates/plate-playground-template/package.json @@ -27,7 +27,7 @@ "@radix-ui/react-context-menu": "^2.2.1", "@radix-ui/react-icons": "^1.3.0", "ai": "^3.4.10", - "@faker-js/faker": "^9.0.2", + "@ai-sdk/openai": "^0.0.67", "@udecode/cn": "^39.0.0", "@udecode/plate-ai": "^39.2.10", "@udecode/plate-slash-command": "^39.0.0", diff --git a/templates/plate-playground-template/pnpm-lock.yaml b/templates/plate-playground-template/pnpm-lock.yaml index 83a60a4701..c1b9f43356 100644 --- a/templates/plate-playground-template/pnpm-lock.yaml +++ b/templates/plate-playground-template/pnpm-lock.yaml @@ -8,12 +8,12 @@ importers: .: dependencies: + '@ai-sdk/openai': + specifier: ^0.0.67 + version: 0.0.67(zod@3.23.8) '@ariakit/react': specifier: 0.4.11 version: 0.4.11(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@faker-js/faker': - specifier: ^9.0.2 - version: 9.0.3 '@radix-ui/react-avatar': specifier: ^1.1.1 version: 1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.11)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -318,6 +318,12 @@ importers: packages: + '@ai-sdk/openai@0.0.67': + resolution: {integrity: sha512-LOvbQaKXuNdhlZ+Asinc4DGdh4v32wKTzFB8FIKvVYjPuXwMWAsK4ZjrS6sxwB37AjmyGLPTMVBUtwcHZdpk+A==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + '@ai-sdk/provider-utils@1.0.20': resolution: {integrity: sha512-ngg/RGpnA00eNOWEtXHenpX1MsM2QshQh4QJFjUfwcqHpM5kTfG7je7Rc3HcEDP+OkRVv2GF+X4fC1Vfcnl8Ow==} engines: {node: '>=18'} @@ -508,10 +514,6 @@ packages: react: ^17.0.2 || ^18.2.0 react-dom: ^17.0.2 || ^18.2.0 - '@faker-js/faker@9.0.3': - resolution: {integrity: sha512-lWrrK4QNlFSU+13PL9jMbMKLJYXDFu3tQfayBsMXX7KL/GiQeqfB1CzHkqD5UHBUtPAuPo6XwGbMFNdVMZObRA==} - engines: {node: '>=18.0.0', npm: '>=9.0.0'} - '@floating-ui/core@1.6.8': resolution: {integrity: sha512-7XJ9cPU+yI2QeLS+FCSlqNFZJq8arvswefkZrYI1yQBbftw6FyrZOxYSh+9S7z7TpeWlRt9zJ5IhM1WIL334jA==} @@ -4079,6 +4081,12 @@ packages: snapshots: + '@ai-sdk/openai@0.0.67(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + '@ai-sdk/provider-utils@1.0.20(zod@3.23.8)': dependencies: '@ai-sdk/provider': 0.0.24 @@ -4311,8 +4319,6 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@faker-js/faker@9.0.3': {} - '@floating-ui/core@1.6.8': dependencies: '@floating-ui/utils': 0.2.8 diff --git a/templates/plate-playground-template/src/app/api/ai/command/route.ts b/templates/plate-playground-template/src/app/api/ai/command/route.ts new file mode 100644 index 0000000000..38ffd36fbb --- /dev/null +++ b/templates/plate-playground-template/src/app/api/ai/command/route.ts @@ -0,0 +1,21 @@ +import { openai } from '@ai-sdk/openai'; +import { convertToCoreMessages, streamText } from 'ai'; + +import { limitTotalCharacters } from '../utils/limitTotalCharacters'; +import { truncateSystemPrompt } from '../utils/truncateSystemPrompt'; + +import type { NextRequest } from 'next/server'; + +export async function POST(req: NextRequest) { + const { messages, system } = await req.json(); + const limitedMessages = limitTotalCharacters(messages, 8000); + + const result = await streamText({ + maxTokens: 2048, + messages: convertToCoreMessages(limitedMessages), + model: openai('gpt-4o-mini'), + system: system ? truncateSystemPrompt(system, 12_000) : undefined, + }); + + return result.toDataStreamResponse(); +} diff --git a/templates/plate-playground-template/src/app/api/ai/copilot/route.ts b/templates/plate-playground-template/src/app/api/ai/copilot/route.ts new file mode 100644 index 0000000000..47cc795610 --- /dev/null +++ b/templates/plate-playground-template/src/app/api/ai/copilot/route.ts @@ -0,0 +1,32 @@ +import { openai } from '@ai-sdk/openai'; +import { generateText } from 'ai'; + +import type { NextRequest } from 'next/server'; + +export async function POST(req: NextRequest) { + const { prompt, system } = await req.json(); + + try { + const result = await generateText({ + abortSignal: req.signal, + maxTokens: 50, + model: openai('gpt-4o-mini'), + prompt: prompt, + system, + temperature: 0.7, + }); + + return new Response(JSON.stringify(result), { + headers: { 'Content-Type': 'application/json' }, + }); + } catch (error: any) { + if (error.name === 'AbortError') { + return new Response(null, { status: 408 }); + } + + return new Response(JSON.stringify({ error: error.message }), { + status: 500, + headers: { 'Content-Type': 'application/json' }, + }); + } +} diff --git a/templates/plate-playground-template/src/app/api/ai/utils/limitTotalCharacters.ts b/templates/plate-playground-template/src/app/api/ai/utils/limitTotalCharacters.ts new file mode 100644 index 0000000000..4672af5571 --- /dev/null +++ b/templates/plate-playground-template/src/app/api/ai/utils/limitTotalCharacters.ts @@ -0,0 +1,20 @@ +import { Message } from 'ai'; + +export function limitTotalCharacters( + messages: Message[], + maxTotalChars: number +) { + let totalChars = 0; + const limitedMessages: Message[] = []; + + for (let i = messages.length - 1; i >= 0; i--) { + const msgChars = messages[i].content.length; + + if (totalChars + msgChars > maxTotalChars) break; + + totalChars += msgChars; + limitedMessages.unshift(messages[i]); + } + + return limitedMessages; +} diff --git a/templates/plate-playground-template/src/app/api/ai/utils/truncateSystemPrompt.ts b/templates/plate-playground-template/src/app/api/ai/utils/truncateSystemPrompt.ts new file mode 100644 index 0000000000..2a96bfc62e --- /dev/null +++ b/templates/plate-playground-template/src/app/api/ai/utils/truncateSystemPrompt.ts @@ -0,0 +1,33 @@ +export function truncateSystemPrompt(systemPrompt: string, maxChars: number) { + if (systemPrompt.length <= maxChars) return systemPrompt; + + // Find the position of and tags + const blockStart = systemPrompt.indexOf(''); + const selectionStart = systemPrompt.indexOf(''); + + if (blockStart === -1 || selectionStart === -1) { + // If tags are not found, simple truncation + return systemPrompt.slice(0, maxChars - 3) + '...'; + } + + // Preserve the structure and truncate content within tags if necessary + const prefix = systemPrompt.slice(0, blockStart); + const blockContent = systemPrompt.slice(blockStart, selectionStart); + const selectionContent = systemPrompt.slice(selectionStart); + + const availableChars = maxChars - prefix.length - 6; // 6 for '...' in both block and selection + const halfAvailable = availableChars / 2; + + const truncatedBlock = + blockContent.length > halfAvailable + ? blockContent.slice(0, halfAvailable - 3) + '...' + : blockContent; + + const truncatedSelection = + selectionContent.length > availableChars - truncatedBlock.length + ? selectionContent.slice(0, availableChars - truncatedBlock.length - 3) + + '...' + : selectionContent; + + return prefix + truncatedBlock + truncatedSelection; +} diff --git a/templates/plate-playground-template/src/components/plate-ui/ai-menu.tsx b/templates/plate-playground-template/src/components/plate-ui/ai-menu.tsx index aee9ff7d22..a0494c0e94 100644 --- a/templates/plate-playground-template/src/components/plate-ui/ai-menu.tsx +++ b/templates/plate-playground-template/src/components/plate-ui/ai-menu.tsx @@ -1,5 +1,4 @@ import * as React from 'react'; -import { faker } from '@faker-js/faker'; import { AIChatPlugin, useEditorChat } from '@udecode/plate-ai/react'; import { getAncestorNode, @@ -40,19 +39,9 @@ export function AIMenu() { const chat = useChat({ id: 'editor', // API to be implemented - api: '/api/ai', - // Mock the API response. Remove it when you implement the route /api/ai - fetch: async () => { - await new Promise((resolve) => setTimeout(resolve, 400)); - - const stream = fakeStreamText(); - - return new Response(stream, { - headers: { - Connection: 'keep-alive', - 'Content-Type': 'text/plain', - }, - }); + api: '/api/ai/command', + onError: (error) => { + throw error; }, }); @@ -176,42 +165,3 @@ export function AIMenu() { ); } - -// Used for testing. Remove it after implementing useChat api. -const fakeStreamText = ({ - chunkCount = 10, - streamProtocol = 'data', -}: { - chunkCount?: number; - streamProtocol?: 'data' | 'text'; -} = {}) => { - const chunks = Array.from({ length: chunkCount }, () => ({ - delay: faker.number.int({ max: 150, min: 50 }), - texts: faker.lorem.words({ max: 3, min: 1 }) + ' ', - })); - const encoder = new TextEncoder(); - - return new ReadableStream({ - async start(controller) { - for (const chunk of chunks) { - await new Promise((resolve) => setTimeout(resolve, chunk.delay)); - - if (streamProtocol === 'text') { - controller.enqueue(encoder.encode(chunk.texts)); - } else { - controller.enqueue( - encoder.encode(`0:${JSON.stringify(chunk.texts)}\n`) - ); - } - } - - if (streamProtocol === 'data') { - controller.enqueue( - `d:{"finishReason":"stop","usage":{"promptTokens":0,"completionTokens":${chunks.length}}}\n` - ); - } - - controller.close(); - }, - }); -}; diff --git a/templates/plate-playground-template/src/components/plate-ui/insert-dropdown-menu.tsx b/templates/plate-playground-template/src/components/plate-ui/insert-dropdown-menu.tsx index 9d367c88ce..e7d3a4ac23 100644 --- a/templates/plate-playground-template/src/components/plate-ui/insert-dropdown-menu.tsx +++ b/templates/plate-playground-template/src/components/plate-ui/insert-dropdown-menu.tsx @@ -190,6 +190,7 @@ export function InsertDropdownMenu(props: DropdownMenuProps) { {nestedItems.map( ({ icon: Icon, label: itemLabel, value: type }) => ( { switch (type) { diff --git a/templates/plate-playground-template/src/components/plugins/copilot-plugins.tsx b/templates/plate-playground-template/src/components/plugins/copilot-plugins.tsx index 3885f056f7..be63e4c32f 100644 --- a/templates/plate-playground-template/src/components/plugins/copilot-plugins.tsx +++ b/templates/plate-playground-template/src/components/plugins/copilot-plugins.tsx @@ -1,4 +1,3 @@ -import { faker } from '@faker-js/faker'; import { CopilotPlugin } from '@udecode/plate-ai/react'; import { getAncestorNode } from '@udecode/plate-common'; import { serializeMdNodes, stripMarkdown } from '@udecode/plate-markdown'; @@ -26,16 +25,6 @@ export const copilotPlugins = [ - CRITICAL: Avoid starting a new block. Do not use block formatting like >, #, 1., 2., -, etc. The suggestion should continue in the same block as the context. - If no context is provided or you can't generate a continuation, return "0" without explanation.`, }, - // Mock the API response. Remove it when you implement the route /api/ai/copilot - fetch: async () => { - const text = await new Promise((resolve) => - setTimeout(() => resolve(faker.lorem.sentence()), 100) - ); - - return new Response(JSON.stringify({ text }), { - headers: { 'Content-Type': 'application/json' }, - }); - }, onFinish: (_, completion) => { if (completion === '0') return; @@ -44,6 +33,9 @@ export const copilotPlugins = [ text: stripMarkdown(completion), }); }, + onError: (error) => { + throw error; + }, }, debounceDelay: 500, getPrompt: ({ editor }) => {