Skip to content

Commit

Permalink
feat
Browse files Browse the repository at this point in the history
  • Loading branch information
zbeyens committed Nov 4, 2024
1 parent a6fa8f4 commit f777836
Show file tree
Hide file tree
Showing 22 changed files with 587 additions and 91 deletions.
2 changes: 1 addition & 1 deletion apps/www/public/r/styles/default/ai-menu.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
},
"files": [
{
"content": "'use client';\n\nimport * as React from 'react';\n\nimport { faker } from '@faker-js/faker';\nimport { AIChatPlugin, useEditorChat } from '@udecode/plate-ai/react';\nimport {\n type TElement,\n type TNodeEntry,\n getAncestorNode,\n getBlocks,\n isElementEmpty,\n isHotkey,\n isSelectionAtBlockEnd,\n} from '@udecode/plate-common';\nimport {\n type PlateEditor,\n toDOMNode,\n useEditorPlugin,\n useHotkeys,\n} from '@udecode/plate-common/react';\nimport {\n BlockSelectionPlugin,\n useIsSelecting,\n} from '@udecode/plate-selection/react';\nimport { useChat } from 'ai/react';\nimport { Loader2Icon } from 'lucide-react';\n\nimport { AIChatEditor } from './ai-chat-editor';\nimport { AIMenuItems } from './ai-menu-items';\nimport { Command, CommandList, InputCommand } from './command';\nimport { Popover, PopoverAnchor, PopoverContent } from './popover';\n\nexport function AIMenu() {\n const { api, editor, useOption } = useEditorPlugin(AIChatPlugin);\n const open = useOption('open');\n const mode = useOption('mode');\n const isSelecting = useIsSelecting();\n\n const aiEditorRef = React.useRef<PlateEditor | null>(null);\n const [value, setValue] = React.useState('');\n\n const chat = useChat({\n id: 'editor',\n // API to be implemented\n api: '/api/ai',\n // Mock the API response. Remove it when you implement the route /api/ai\n fetch: async () => {\n await new Promise((resolve) => setTimeout(resolve, 400));\n\n const stream = fakeStreamText();\n\n return new Response(stream, {\n headers: {\n Connection: 'keep-alive',\n 'Content-Type': 'text/plain',\n },\n });\n },\n });\n\n const { input, isLoading, messages, setInput } = chat;\n const [anchorElement, setAnchorElement] = React.useState<HTMLElement | null>(\n null\n );\n\n const setOpen = (open: boolean) => {\n if (open) {\n api.aiChat.show();\n } else {\n api.aiChat.hide();\n }\n };\n\n const show = (anchorElement: HTMLElement) => {\n setAnchorElement(anchorElement);\n setOpen(true);\n };\n\n useEditorChat({\n chat,\n onOpenBlockSelection: (blocks: TNodeEntry[]) => {\n show(toDOMNode(editor, blocks.at(-1)![0])!);\n },\n onOpenChange: (open) => {\n if (!open) {\n setAnchorElement(null);\n setInput('');\n }\n },\n onOpenCursor: () => {\n const ancestor = getAncestorNode(editor)?.[0] as TElement;\n\n if (!isSelectionAtBlockEnd(editor) && !isElementEmpty(editor, ancestor)) {\n editor\n .getApi(BlockSelectionPlugin)\n .blockSelection.addSelectedRow(ancestor.id as string);\n }\n\n show(toDOMNode(editor, ancestor)!);\n },\n onOpenSelection: () => {\n show(toDOMNode(editor, getBlocks(editor).at(-1)![0])!);\n },\n });\n\n useHotkeys(\n 'meta+j',\n () => {\n api.aiChat.show();\n },\n { enableOnContentEditable: true, enableOnFormTags: true }\n );\n\n return (\n <Popover open={open} onOpenChange={setOpen} modal={false}>\n <PopoverAnchor virtualRef={{ current: anchorElement }} />\n\n <PopoverContent\n className=\"border-none bg-transparent p-0 shadow-none\"\n style={{\n width: anchorElement?.offsetWidth,\n }}\n onEscapeKeyDown={(e) => {\n e.preventDefault();\n\n if (isLoading) {\n api.aiChat.stop();\n } else {\n api.aiChat.hide();\n }\n }}\n align=\"center\"\n avoidCollisions={false}\n side=\"bottom\"\n >\n <Command\n className=\"w-full rounded-lg border shadow-md\"\n value={value}\n onValueChange={setValue}\n >\n {mode === 'chat' && isSelecting && messages.length > 0 && (\n <AIChatEditor aiEditorRef={aiEditorRef} />\n )}\n\n {isLoading ? (\n <div className=\"flex grow select-none items-center gap-2 p-2 text-sm text-muted-foreground\">\n <Loader2Icon className=\"size-4 animate-spin\" />\n {messages.length > 1 ? 'Editing...' : 'Thinking...'}\n </div>\n ) : (\n <InputCommand\n variant=\"ghost\"\n className=\"rounded-none border-b border-solid border-border [&_svg]:hidden\"\n value={input}\n onKeyDown={(e) => {\n if (isHotkey('backspace')(e) && input.length === 0) {\n e.preventDefault();\n api.aiChat.hide();\n }\n if (isHotkey('enter')(e) && !e.shiftKey && !value) {\n e.preventDefault();\n void api.aiChat.submit();\n }\n }}\n onValueChange={setInput}\n placeholder=\"Ask AI anything...\"\n autoFocus\n />\n )}\n\n {!isLoading && (\n <CommandList>\n <AIMenuItems aiEditorRef={aiEditorRef} setValue={setValue} />\n </CommandList>\n )}\n </Command>\n </PopoverContent>\n </Popover>\n );\n}\n\n// Used for testing. Remove it after implementing useChat api.\nconst fakeStreamText = ({\n chunkCount = 10,\n streamProtocol = 'data',\n}: {\n chunkCount?: number;\n streamProtocol?: 'data' | 'text';\n} = {}) => {\n const chunks = Array.from({ length: chunkCount }, () => ({\n delay: faker.number.int({ max: 150, min: 50 }),\n texts: faker.lorem.words({ max: 3, min: 1 }) + ' ',\n }));\n const encoder = new TextEncoder();\n\n return new ReadableStream({\n async start(controller) {\n for (const chunk of chunks) {\n await new Promise((resolve) => setTimeout(resolve, chunk.delay));\n\n if (streamProtocol === 'text') {\n controller.enqueue(encoder.encode(chunk.texts));\n } else {\n controller.enqueue(\n encoder.encode(`0:${JSON.stringify(chunk.texts)}\\n`)\n );\n }\n }\n\n if (streamProtocol === 'data') {\n controller.enqueue(\n `d:{\"finishReason\":\"stop\",\"usage\":{\"promptTokens\":0,\"completionTokens\":${chunks.length}}}\\n`\n );\n }\n\n controller.close();\n },\n });\n};\n",
"content": "'use client';\n\nimport * as React from 'react';\n\nimport { AIChatPlugin, useEditorChat } from '@udecode/plate-ai/react';\nimport {\n type TElement,\n type TNodeEntry,\n getAncestorNode,\n getBlocks,\n isElementEmpty,\n isHotkey,\n isSelectionAtBlockEnd,\n} from '@udecode/plate-common';\nimport {\n type PlateEditor,\n toDOMNode,\n useEditorPlugin,\n useHotkeys,\n} from '@udecode/plate-common/react';\nimport {\n BlockSelectionPlugin,\n useIsSelecting,\n} from '@udecode/plate-selection/react';\nimport { Loader2Icon } from 'lucide-react';\n\nimport { useChat } from '@/components/editor/use-chat';\n\nimport { AIChatEditor } from './ai-chat-editor';\nimport { AIMenuItems } from './ai-menu-items';\nimport { Command, CommandList, InputCommand } from './command';\nimport { Popover, PopoverAnchor, PopoverContent } from './popover';\n\nexport function AIMenu() {\n const { api, editor, useOption } = useEditorPlugin(AIChatPlugin);\n const open = useOption('open');\n const mode = useOption('mode');\n const isSelecting = useIsSelecting();\n\n const aiEditorRef = React.useRef<PlateEditor | null>(null);\n const [value, setValue] = React.useState('');\n\n const chat = useChat();\n\n const { input, isLoading, messages, setInput } = chat;\n const [anchorElement, setAnchorElement] = React.useState<HTMLElement | null>(\n null\n );\n\n const setOpen = (open: boolean) => {\n if (open) {\n api.aiChat.show();\n } else {\n api.aiChat.hide();\n }\n };\n\n const show = (anchorElement: HTMLElement) => {\n setAnchorElement(anchorElement);\n setOpen(true);\n };\n\n useEditorChat({\n chat,\n onOpenBlockSelection: (blocks: TNodeEntry[]) => {\n show(toDOMNode(editor, blocks.at(-1)![0])!);\n },\n onOpenChange: (open) => {\n if (!open) {\n setAnchorElement(null);\n setInput('');\n }\n },\n onOpenCursor: () => {\n const ancestor = getAncestorNode(editor)?.[0] as TElement;\n\n if (!isSelectionAtBlockEnd(editor) && !isElementEmpty(editor, ancestor)) {\n editor\n .getApi(BlockSelectionPlugin)\n .blockSelection.addSelectedRow(ancestor.id as string);\n }\n\n show(toDOMNode(editor, ancestor)!);\n },\n onOpenSelection: () => {\n show(toDOMNode(editor, getBlocks(editor).at(-1)![0])!);\n },\n });\n\n useHotkeys(\n 'meta+j',\n () => {\n api.aiChat.show();\n },\n { enableOnContentEditable: true, enableOnFormTags: true }\n );\n\n return (\n <Popover open={open} onOpenChange={setOpen} modal={false}>\n <PopoverAnchor virtualRef={{ current: anchorElement }} />\n\n <PopoverContent\n className=\"border-none bg-transparent p-0 shadow-none\"\n style={{\n width: anchorElement?.offsetWidth,\n }}\n onEscapeKeyDown={(e) => {\n e.preventDefault();\n\n if (isLoading) {\n api.aiChat.stop();\n } else {\n api.aiChat.hide();\n }\n }}\n align=\"center\"\n avoidCollisions={false}\n side=\"bottom\"\n >\n <Command\n className=\"w-full rounded-lg border shadow-md\"\n value={value}\n onValueChange={setValue}\n >\n {mode === 'chat' && isSelecting && messages.length > 0 && (\n <AIChatEditor aiEditorRef={aiEditorRef} />\n )}\n\n {isLoading ? (\n <div className=\"flex grow select-none items-center gap-2 p-2 text-sm text-muted-foreground\">\n <Loader2Icon className=\"size-4 animate-spin\" />\n {messages.length > 1 ? 'Editing...' : 'Thinking...'}\n </div>\n ) : (\n <InputCommand\n variant=\"ghost\"\n className=\"rounded-none border-b border-solid border-border [&_svg]:hidden\"\n value={input}\n onKeyDown={(e) => {\n if (isHotkey('backspace')(e) && input.length === 0) {\n e.preventDefault();\n api.aiChat.hide();\n }\n if (isHotkey('enter')(e) && !e.shiftKey && !value) {\n e.preventDefault();\n void api.aiChat.submit();\n }\n }}\n onValueChange={setInput}\n placeholder=\"Ask AI anything...\"\n autoFocus\n />\n )}\n\n {!isLoading && (\n <CommandList>\n <AIMenuItems aiEditorRef={aiEditorRef} setValue={setValue} />\n </CommandList>\n )}\n </Command>\n </PopoverContent>\n </Popover>\n );\n}\n",
"path": "plate-ui/ai-menu.tsx",
"target": "components/plate-ui/ai-menu.tsx",
"type": "registry:ui"
Expand Down
25 changes: 25 additions & 0 deletions apps/www/public/r/styles/default/api-ai.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
{
"dependencies": [
"@ai-sdk/openai",
"ai"
],
"files": [
{
"content": "import type { NextRequest } from 'next/server';\n\nimport { createOpenAI } from '@ai-sdk/openai';\nimport { convertToCoreMessages, streamText } from 'ai';\nimport { NextResponse } from 'next/server';\n\nexport async function POST(req: NextRequest) {\n const {\n apiKey: key,\n messages,\n model = 'gpt-4o-mini',\n system,\n } = await req.json();\n\n const apiKey = key || process.env.OPENAI_API_KEY;\n\n if (!apiKey) {\n return NextResponse.json(\n { error: 'Missing OpenAI API key.' },\n { status: 401 }\n );\n }\n\n const openai = createOpenAI({ apiKey });\n\n try {\n const result = await streamText({\n maxTokens: 2048,\n messages: convertToCoreMessages(messages),\n model: openai(model),\n system: system,\n });\n\n return result.toDataStreamResponse();\n } catch {\n return NextResponse.json(\n { error: 'Failed to process AI request' },\n { status: 500 }\n );\n }\n}\n",
"path": "components/api/ai/command/route.ts",
"target": "app/api/ai/command/route.ts",
"type": "registry:page"
},
{
"content": "import type { NextRequest } from 'next/server';\n\nimport { createOpenAI } from '@ai-sdk/openai';\nimport { generateText } from 'ai';\nimport { NextResponse } from 'next/server';\n\nexport async function POST(req: NextRequest) {\n const {\n apiKey: key,\n model = 'gpt-4o-mini',\n prompt,\n system,\n } = await req.json();\n\n const apiKey = key || process.env.OPENAI_API_KEY;\n\n if (!apiKey) {\n return NextResponse.json(\n { error: 'Missing OpenAI API key.' },\n { status: 401 }\n );\n }\n\n const openai = createOpenAI({ apiKey });\n\n try {\n const result = await generateText({\n abortSignal: req.signal,\n maxTokens: 50,\n model: openai(model),\n prompt: prompt,\n system,\n temperature: 0.7,\n });\n\n return NextResponse.json(result);\n } catch (error: any) {\n if (error.name === 'AbortError') {\n return NextResponse.json(null, { status: 408 });\n }\n\n return NextResponse.json(\n { error: 'Failed to process AI request' },\n { status: 500 }\n );\n }\n}\n",
"path": "components/api/ai/copilot/route.ts",
"target": "app/api/ai/copilot/route.ts",
"type": "registry:page"
}
],
"name": "api-ai",
"registryDependencies": [
"use-chat-playground"
],
"type": "registry:component"
}
5 changes: 3 additions & 2 deletions apps/www/public/r/styles/default/copilot-plugins.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
{
"dependencies": [
"@udecode/plate-ai",
"@udecode/plate-markdown"
"@udecode/plate-markdown",
"@faker-js/faker"
],
"files": [
{
"content": "import type { TElement } from '@udecode/plate-common';\n\nimport { CopilotPlugin } from '@udecode/plate-ai/react';\nimport { getAncestorNode } from '@udecode/plate-common';\nimport { serializeMdNodes, stripMarkdown } from '@udecode/plate-markdown';\n\nimport { GhostText } from '@/components/plate-ui/ghost-text';\n\nexport const copilotPlugins = [\n CopilotPlugin.configure(({ api }) => ({\n options: {\n completeOptions: {\n api: '/api/ai/copilot',\n body: {\n system: `You are an advanced AI writing assistant, similar to VSCode Copilot but for general text. Your task is to predict and generate the next part of the text based on the given context.\n \n Rules:\n - Continue the text naturally up to the next punctuation mark (., ,, ;, :, ?, or !).\n - Maintain style and tone. Don't repeat given text.\n - For unclear context, provide the most likely continuation.\n - Handle code snippets, lists, or structured text if needed.\n - Don't include \"\"\" in your response.\n - CRITICAL: Always end with a punctuation mark.\n - CRITICAL: Avoid starting a new block. Do not use block formatting like >, #, 1., 2., -, etc. The suggestion should continue in the same block as the context.\n - If no context is provided or you can't generate a continuation, return \"0\" without explanation.`,\n },\n onError: (error) => {\n let text = '';\n\n text = error.message.includes('API key')\n ? 'Set your OpenAI API key for real AI suggestions'\n : 'Try with a valid OpenAI API key for real AI suggestions';\n\n api.copilot.setBlockSuggestion({\n text: stripMarkdown(text),\n });\n },\n onFinish: (_, completion) => {\n if (completion === '0') return;\n\n api.copilot.setBlockSuggestion({\n //stripMarkdownBlocks in plus GhostText\n text: stripMarkdown(completion),\n });\n },\n },\n debounceDelay: 500,\n getPrompt: ({ editor }) => {\n const contextEntry = getAncestorNode(editor);\n\n if (!contextEntry) return '';\n\n const prompt = serializeMdNodes([contextEntry[0] as TElement]);\n\n return `Continue the text up to the next punctuation mark:\n \"\"\"\n ${prompt}\n \"\"\"`;\n },\n renderGhostText: GhostText,\n },\n })),\n] as const;\n",
"content": "import type { TElement } from '@udecode/plate-common';\n\nimport { faker } from '@faker-js/faker';\nimport { CopilotPlugin } from '@udecode/plate-ai/react';\nimport { getAncestorNode } from '@udecode/plate-common';\nimport { serializeMdNodes, stripMarkdown } from '@udecode/plate-markdown';\n\nimport { GhostText } from '@/components/plate-ui/ghost-text';\n\nexport const copilotPlugins = [\n CopilotPlugin.configure(({ api }) => ({\n options: {\n completeOptions: {\n api: '/api/ai/copilot',\n body: {\n system: `You are an advanced AI writing assistant, similar to VSCode Copilot but for general text. Your task is to predict and generate the next part of the text based on the given context.\n \n Rules:\n - Continue the text naturally up to the next punctuation mark (., ,, ;, :, ?, or !).\n - Maintain style and tone. Don't repeat given text.\n - For unclear context, provide the most likely continuation.\n - Handle code snippets, lists, or structured text if needed.\n - Don't include \"\"\" in your response.\n - CRITICAL: Always end with a punctuation mark.\n - CRITICAL: Avoid starting a new block. Do not use block formatting like >, #, 1., 2., -, etc. The suggestion should continue in the same block as the context.\n - If no context is provided or you can't generate a continuation, return \"0\" without explanation.`,\n },\n onError: () => {\n api.copilot.setBlockSuggestion({\n text: stripMarkdown(faker.lorem.sentence()),\n });\n },\n onFinish: (_, completion) => {\n if (completion === '0') return;\n\n api.copilot.setBlockSuggestion({\n //stripMarkdownBlocks in plus GhostText\n text: stripMarkdown(completion),\n });\n },\n },\n debounceDelay: 500,\n getPrompt: ({ editor }) => {\n const contextEntry = getAncestorNode(editor);\n\n if (!contextEntry) return '';\n\n const prompt = serializeMdNodes([contextEntry[0] as TElement]);\n\n return `Continue the text up to the next punctuation mark:\n \"\"\"\n ${prompt}\n \"\"\"`;\n },\n renderGhostText: GhostText,\n },\n })),\n] as const;\n",
"path": "components/editor/plugins/copilot-plugins.tsx",
"target": "components/editor/plugins/copilot-plugins.tsx",
"type": "registry:component"
Expand Down
Loading

0 comments on commit f777836

Please sign in to comment.