From f6d3cb4de57e2c9d9241c6e37f0976bbb1742cd0 Mon Sep 17 00:00:00 2001 From: Srujan Gurram Date: Fri, 25 Oct 2024 22:14:58 +0530 Subject: [PATCH 1/8] Update chat model selection and settings --- .../Settings/Sections/ChatSettings.tsx | 63 ++++--------------- .../Sidebar/chat/ChangeChatModel.tsx | 18 +++--- src/components/Sidebar/chat/index.tsx | 10 +-- src/config/settings/index.ts | 31 +++++---- src/hooks/useChatCompletion.ts | 46 ++++---------- src/hooks/useChatModels.ts | 54 ++++++++-------- src/pages/content/sidebar.tsx | 14 +---- 7 files changed, 85 insertions(+), 151 deletions(-) diff --git a/src/components/Settings/Sections/ChatSettings.tsx b/src/components/Settings/Sections/ChatSettings.tsx index 988d2784..e6ac54d6 100644 --- a/src/components/Settings/Sections/ChatSettings.tsx +++ b/src/components/Settings/Sections/ChatSettings.tsx @@ -7,13 +7,12 @@ import { capitalizeText } from '../../../lib/capitalizeText' import { validateApiKey } from '../../../lib/validApiKey' import FieldWrapper from '../Elements/FieldWrapper' import SectionHeading from '../Elements/SectionHeading' -import { type AvailableModels, Mode } from '../../../config/settings' -import { getReadableModelName } from '../../../lib/getReadableModelName' +import { Mode } from '../../../config/settings' const ChatSettings = () => { const [settings, setSettings] = useSettings() const [showPassword, setShowPassword] = useState(false) - const { availableModels, fetchLocalModels } = useChatModels() + const { models, setActiveChatModel } = useChatModels() const OpenAiApiKeyInputRef = React.useRef(null) const OpenAiBaseUrlInputRef = React.useRef(null) @@ -120,69 +119,29 @@ const ChatSettings = () => { Update - {' '} - {/* ========================= - Model Setting - ===========================*/} - - { - setSettings({ - ...settings, - chat: { - ...chatSettings, - showLocalModels: value, - }, - }) - fetchLocalModels() - }} - className="cdx-w-[42px] cdx-h-[25px] cdx-bg-neutral-500 cdx-rounded-full cdx-relative data-[state=checked]:cdx-bg-blue-500 cdx-outline-none cdx-cursor-default" - > - - - {chatSettings.showLocalModels && ( -
- 🚧 NOTE: You must run this command for this to work: - - OLLAMA_ORIGINS= - {window.location.origin} ollama start - -
- )} - {/* ========================= - Mode Setting - ===========================*/} { - const { availableModels, activeChatModel, setActiveChatModel } = - useChatModels() + const { models, activeChatModel, setActiveChatModel } = useChatModels() return (
diff --git a/src/components/Sidebar/chat/index.tsx b/src/components/Sidebar/chat/index.tsx index 079c387a..a37a8ae9 100644 --- a/src/components/Sidebar/chat/index.tsx +++ b/src/components/Sidebar/chat/index.tsx @@ -3,7 +3,7 @@ import ChatList from './ChatList' import { SidebarInput } from './ChatInput' import { useChatCompletion } from '../../../hooks/useChatCompletion' import { SYSTEM_PROMPT } from '../../../config/prompts' -import { AvailableModels, type Settings } from '../../../config/settings' +import type { Settings } from '../../../config/settings' interface ChatProps { settings: Settings @@ -19,7 +19,7 @@ const Chat = ({ settings }: ChatProps) => { removeMessagePair, error, } = useChatCompletion({ - model: settings.chat.model, + model: settings.chat.model!, apiKey: settings.chat.openAIKey!, mode: settings.chat.mode, systemPrompt: SYSTEM_PROMPT, @@ -58,11 +58,7 @@ const Chat = ({ settings }: ChatProps) => { clearMessages={clearMessages} cancelRequest={cancelRequest} isWebpageContextOn={settings.general.webpageContext} - isVisionModel={ - settings.chat.model === AvailableModels.GPT_4_TURBO || - settings.chat.model === AvailableModels.GPT_4O || - settings.chat.model === AvailableModels.GPT_4O_MINI - } + isVisionModel={settings.chat.model?.capabilities.vision || false} /> ) diff --git a/src/config/settings/index.ts b/src/config/settings/index.ts index 8f428a32..ccbcc4dc 100644 --- a/src/config/settings/index.ts +++ b/src/config/settings/index.ts @@ -6,14 +6,6 @@ export enum ThemeOptions { SYSTEM = 'system', } -export enum AvailableModels { - GPT_4O = 'gpt-4o', - GPT_4_TURBO = 'gpt-4-turbo', - GPT_4 = 'gpt-4', - GPT_3_5_TURBO = 'gpt-3.5-turbo', - GPT_4O_MINI = 'gpt-4o-mini', -} - export enum Mode { HIGHLY_PRECISE = 0, PRECISE = 0.5, @@ -21,6 +13,23 @@ export enum Mode { CREATIVE = 1.5, } +export type ModelCapabilities = { + completion_chat: boolean + function_calling: boolean + vision: boolean + fine_tuning: boolean + completion_fim: boolean +} + +export type ModelInfo = { + id: string + name: string + description: string + capabilities: ModelCapabilities + max_context_length: number + owned_by: string +} + export type Settings = { quickMenu: { enabled: boolean @@ -29,9 +38,8 @@ export type Settings = { } chat: { openAIKey: string | null - model: AvailableModels + model: ModelInfo | null mode: Mode - showLocalModels: boolean openAiBaseUrl: string | null } general: { @@ -48,9 +56,8 @@ export const defaultSettings: Settings = { }, chat: { openAIKey: null, - model: AvailableModels.GPT_4O_MINI, + model: null, mode: Mode.BALANCED, - showLocalModels: false, openAiBaseUrl: null, }, general: { diff --git a/src/hooks/useChatCompletion.ts b/src/hooks/useChatCompletion.ts index f717e973..d6afcdfd 100644 --- a/src/hooks/useChatCompletion.ts +++ b/src/hooks/useChatCompletion.ts @@ -1,35 +1,24 @@ import endent from 'endent' import { ChatOpenAI } from '@langchain/openai' -import { Ollama } from '@langchain/community/llms/ollama' import { AIMessage, HumanMessage, SystemMessage, } from '@langchain/core/messages' import { useMemo, useState } from 'react' -import { AvailableModels, type Mode } from '../config/settings' +import type { Mode, ModelInfo } from '../config/settings' import { getMatchedContent } from '../lib/getMatchedContent' import { ChatRole, useCurrentChat } from './useCurrentChat' import type { MessageDraft } from './useMessageDraft' interface UseChatCompletionProps { - model: AvailableModels + model: ModelInfo apiKey: string mode: Mode systemPrompt: string baseURL: string } -/** - * This hook is responsible for managing the chat completion - * functionality by using the useCurrentChat hook - * - * It adds functions for - * - submitting a query to the chat - * - cancelling a query - * - * And returns them along with useful state from useCurrentChat hook - */ let controller: AbortController export const useChatCompletion = ({ @@ -51,20 +40,16 @@ export const useChatCompletion = ({ const [error, setError] = useState(null) const llm = useMemo(() => { - const isOpenAIModel = Object.values(AvailableModels).includes(model) - if (isOpenAIModel) { - return new ChatOpenAI({ - streaming: true, - openAIApiKey: apiKey, - modelName: model, - configuration: { - baseURL: baseURL, - }, - temperature: Number(mode), - maxTokens: 4_096, - }) - } - return new Ollama({ model: model.replace('ollama-', '') }) + return new ChatOpenAI({ + streaming: true, + openAIApiKey: apiKey, + modelName: model.id, + configuration: { + baseURL: baseURL, + }, + temperature: Number(mode), + maxTokens: model.max_context_length, + }) }, [apiKey, model, mode, baseURL]) const previousMessages = messages.map((msg) => { @@ -90,11 +75,6 @@ export const useChatCompletion = ({ setGenerating(true) try { - /** - * If context is provided, we need to use the LLM to get the relevant documents - * and then run the LLM on those documents. We use in memory vector store to - * get the relevant documents - */ let matchedContext: string | undefined if (context) { matchedContext = await getMatchedContent( @@ -119,7 +99,7 @@ export const useChatCompletion = ({ ...previousMessages, new HumanMessage({ content: - message.files.length > 0 + message.files.length > 0 && model.capabilities.vision ? [ { type: 'text', text: expandedQuery }, ...(message.files.length > 0 diff --git a/src/hooks/useChatModels.ts b/src/hooks/useChatModels.ts index d2feefec..c2e13b39 100644 --- a/src/hooks/useChatModels.ts +++ b/src/hooks/useChatModels.ts @@ -1,52 +1,56 @@ import { useCallback, useEffect, useState } from 'react' import { useSettings } from './useSettings' import axios from 'axios' -import { AvailableModels } from '../config/settings' +import type { ModelInfo } from '../config/settings' export const useChatModels = () => { const [settings, setSettings] = useSettings() - const [dynamicModels, setDynamicModels] = useState([]) + const [models, setModels] = useState([]) const chatSettings = settings.chat const activeChatModel = chatSettings.model - const fetchLocalModels = useCallback(async () => { - if (chatSettings.showLocalModels) { - const { - data: { models }, - } = await axios<{ models: { name: string }[] }>( - 'http://localhost:11434/api/tags', - ) - if (models) { - setDynamicModels(models.map((m) => m.name)) + const fetchAvailableModels = useCallback(async () => { + if (chatSettings.openAIKey) { + try { + const baseUrl = + chatSettings.openAiBaseUrl || 'https://api.openai.com/v1' + const { data } = await axios.get(`${baseUrl}/models`, { + headers: { + Authorization: `Bearer ${chatSettings.openAIKey}`, + }, + }) + + // Filter for chat-capable models + const chatModels = data.data.filter( + (model: ModelInfo) => model.capabilities?.completion_chat === true, + ) + + setModels(chatModels) + } catch (error) { + console.log('Failed to fetch models:', error) + setModels([]) } - } else { - setDynamicModels([]) } - }, [chatSettings.showLocalModels]) + }, [chatSettings.openAIKey, chatSettings.openAiBaseUrl]) useEffect(() => { - fetchLocalModels() - }, [fetchLocalModels]) - - const availableModels = [ - ...Object.entries(AvailableModels), - ...dynamicModels.map((m) => [m, m]), - ] + fetchAvailableModels() + }, [fetchAvailableModels]) - const setActiveChatModel = (model: AvailableModels) => { + const setActiveChatModel = (model: ModelInfo) => { setSettings({ ...settings, chat: { ...chatSettings, - model: model, + model, }, }) } return { - availableModels, + models, activeChatModel, setActiveChatModel, - fetchLocalModels, + fetchAvailableModels, } } diff --git a/src/pages/content/sidebar.tsx b/src/pages/content/sidebar.tsx index f9fb123a..a976a79a 100644 --- a/src/pages/content/sidebar.tsx +++ b/src/pages/content/sidebar.tsx @@ -1,4 +1,4 @@ -import { AvailableModels, type Settings } from '../../config/settings' +import type { Settings } from '../../config/settings' import { getScreenshotImage } from '../../lib/getScreenshotImage' import { contentScriptLog } from '../../logs' @@ -35,18 +35,6 @@ chrome.runtime.onMessage.addListener((msg) => { } }) -/** - * Convert local data `modal` typo to `model` - */ -chrome.storage.sync.get(['SETTINGS'], (result) => { - const chatSettings = (result.SETTINGS as Settings)?.chat - if ('modal' in chatSettings) { - chatSettings.model = AvailableModels.GPT_4_TURBO - delete chatSettings.modal - } - chrome.storage.sync.set({ SETTINGS: result.SETTINGS }) -}) - /** * SIDEBAR <-> CONTENT SCRIPT * Event listener for messages from the sidebar. From 866a388a063a9a117668e9260ab8bf53c6684cfc Mon Sep 17 00:00:00 2001 From: Srujan Gurram <52039218+Royal-lobster@users.noreply.github.com> Date: Fri, 25 Oct 2024 22:20:14 +0530 Subject: [PATCH 2/8] Create eight-melons-cheer.md --- .changeset/eight-melons-cheer.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .changeset/eight-melons-cheer.md diff --git a/.changeset/eight-melons-cheer.md b/.changeset/eight-melons-cheer.md new file mode 100644 index 00000000..cade47f1 --- /dev/null +++ b/.changeset/eight-melons-cheer.md @@ -0,0 +1,6 @@ +--- +"syncia": patch +--- + +- Dynamic modal names from /modals +- Deprecates usage of ollama modals directly, we can now use them via openai compatible endpoint From ddeed75ced57f33f4b49baddde0d85566009d5c6 Mon Sep 17 00:00:00 2001 From: Srujan Gurram Date: Fri, 25 Oct 2024 22:28:13 +0530 Subject: [PATCH 3/8] Add state for model selection in Sidebar Auth component --- src/components/Sidebar/auth/index.tsx | 37 +++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/src/components/Sidebar/auth/index.tsx b/src/components/Sidebar/auth/index.tsx index dd67cc26..68a34464 100644 --- a/src/components/Sidebar/auth/index.tsx +++ b/src/components/Sidebar/auth/index.tsx @@ -1,11 +1,15 @@ -import React, { useEffect } from 'react' +import React, { useEffect, useState } from 'react' import { useSettings } from '../../../hooks/useSettings' import { validateApiKey } from '../../../lib/validApiKey' +import { useChatModels } from '../../../hooks/useChatModels' +import { ModelInfo } from '../../../config/settings' const Auth = () => { const [, setSettings] = useSettings() + const { models, setActiveChatModel, fetchAvailableModels } = useChatModels() const [error, setError] = React.useState(null) const [showAdvanced, setShowAdvanced] = React.useState(false) + const [showModelSelect, setShowModelSelect] = useState(false) useEffect(() => { if (error) { @@ -27,10 +31,12 @@ const Auth = () => { ...prev, chat: { ...prev.chat, - openAIKey: key as string, + openAIKey: key, openAiBaseUrl: openAiBaseUrl, }, })) + await fetchAvailableModels() + setShowModelSelect(true) } else { setError('Invalid API key. Please try with a valid one.') } @@ -102,6 +108,33 @@ const Auth = () => {
)} + {showModelSelect && models.length > 0 && ( +
+ + +
+ )} + {error && (
{error}
)} From 55a453d60af8fee1199971c4ba1616eb141cb0c4 Mon Sep 17 00:00:00 2001 From: Srujan Gurram Date: Sat, 26 Oct 2024 20:21:57 +0530 Subject: [PATCH 4/8] Refactor chat settings model selection logic --- .../Settings/Sections/ChatSettings.tsx | 11 +++------- src/components/Sidebar/auth/index.tsx | 16 ++++---------- .../Sidebar/chat/ChangeChatModel.tsx | 9 +++----- src/components/Sidebar/chat/index.tsx | 2 +- src/components/Sidebar/index.tsx | 6 +++++- src/config/settings/index.ts | 19 +---------------- src/hooks/useChatCompletion.ts | 9 ++++---- src/hooks/useChatModels.ts | 21 ++++++++++--------- 8 files changed, 32 insertions(+), 61 deletions(-) diff --git a/src/components/Settings/Sections/ChatSettings.tsx b/src/components/Settings/Sections/ChatSettings.tsx index e6ac54d6..63739125 100644 --- a/src/components/Settings/Sections/ChatSettings.tsx +++ b/src/components/Settings/Sections/ChatSettings.tsx @@ -126,18 +126,13 @@ const ChatSettings = () => { row={true} > diff --git a/src/components/Sidebar/auth/index.tsx b/src/components/Sidebar/auth/index.tsx index 68a34464..83dc1b17 100644 --- a/src/components/Sidebar/auth/index.tsx +++ b/src/components/Sidebar/auth/index.tsx @@ -1,8 +1,7 @@ import React, { useEffect, useState } from 'react' +import { useChatModels } from '../../../hooks/useChatModels' import { useSettings } from '../../../hooks/useSettings' import { validateApiKey } from '../../../lib/validApiKey' -import { useChatModels } from '../../../hooks/useChatModels' -import { ModelInfo } from '../../../config/settings' const Auth = () => { const [, setSettings] = useSettings() @@ -13,9 +12,7 @@ const Auth = () => { useEffect(() => { if (error) { - setTimeout(() => { - setError(null) - }, 3000) + setTimeout(() => setError(null), 3000) } }, [error]) @@ -118,17 +115,12 @@ const Auth = () => { diff --git a/src/components/Sidebar/chat/ChangeChatModel.tsx b/src/components/Sidebar/chat/ChangeChatModel.tsx index e650dcac..b5a41ebf 100644 --- a/src/components/Sidebar/chat/ChangeChatModel.tsx +++ b/src/components/Sidebar/chat/ChangeChatModel.tsx @@ -7,18 +7,15 @@ const ChangeChatModel = () => {
diff --git a/src/components/Sidebar/chat/index.tsx b/src/components/Sidebar/chat/index.tsx index a37a8ae9..3ba65487 100644 --- a/src/components/Sidebar/chat/index.tsx +++ b/src/components/Sidebar/chat/index.tsx @@ -58,7 +58,7 @@ const Chat = ({ settings }: ChatProps) => { clearMessages={clearMessages} cancelRequest={cancelRequest} isWebpageContextOn={settings.general.webpageContext} - isVisionModel={settings.chat.model?.capabilities.vision || false} + isVisionModel={settings.chat.model?.includes('vision') || false} /> ) diff --git a/src/components/Sidebar/index.tsx b/src/components/Sidebar/index.tsx index 61889847..08108c20 100644 --- a/src/components/Sidebar/index.tsx +++ b/src/components/Sidebar/index.tsx @@ -10,7 +10,11 @@ function Sidebar() { return (
- {settings.chat.openAIKey ? : } + {settings.chat.openAIKey && settings.chat.model ? ( + + ) : ( + + )}
) } diff --git a/src/config/settings/index.ts b/src/config/settings/index.ts index ccbcc4dc..0066646c 100644 --- a/src/config/settings/index.ts +++ b/src/config/settings/index.ts @@ -13,23 +13,6 @@ export enum Mode { CREATIVE = 1.5, } -export type ModelCapabilities = { - completion_chat: boolean - function_calling: boolean - vision: boolean - fine_tuning: boolean - completion_fim: boolean -} - -export type ModelInfo = { - id: string - name: string - description: string - capabilities: ModelCapabilities - max_context_length: number - owned_by: string -} - export type Settings = { quickMenu: { enabled: boolean @@ -38,7 +21,7 @@ export type Settings = { } chat: { openAIKey: string | null - model: ModelInfo | null + model: string | null mode: Mode openAiBaseUrl: string | null } diff --git a/src/hooks/useChatCompletion.ts b/src/hooks/useChatCompletion.ts index d6afcdfd..791ba157 100644 --- a/src/hooks/useChatCompletion.ts +++ b/src/hooks/useChatCompletion.ts @@ -6,13 +6,13 @@ import { SystemMessage, } from '@langchain/core/messages' import { useMemo, useState } from 'react' -import type { Mode, ModelInfo } from '../config/settings' +import type { Mode } from '../config/settings' import { getMatchedContent } from '../lib/getMatchedContent' import { ChatRole, useCurrentChat } from './useCurrentChat' import type { MessageDraft } from './useMessageDraft' interface UseChatCompletionProps { - model: ModelInfo + model: string apiKey: string mode: Mode systemPrompt: string @@ -43,12 +43,11 @@ export const useChatCompletion = ({ return new ChatOpenAI({ streaming: true, openAIApiKey: apiKey, - modelName: model.id, + modelName: model, configuration: { baseURL: baseURL, }, temperature: Number(mode), - maxTokens: model.max_context_length, }) }, [apiKey, model, mode, baseURL]) @@ -99,7 +98,7 @@ export const useChatCompletion = ({ ...previousMessages, new HumanMessage({ content: - message.files.length > 0 && model.capabilities.vision + message.files.length > 0 ? [ { type: 'text', text: expandedQuery }, ...(message.files.length > 0 diff --git a/src/hooks/useChatModels.ts b/src/hooks/useChatModels.ts index c2e13b39..83ed5387 100644 --- a/src/hooks/useChatModels.ts +++ b/src/hooks/useChatModels.ts @@ -1,11 +1,17 @@ import { useCallback, useEffect, useState } from 'react' import { useSettings } from './useSettings' import axios from 'axios' -import type { ModelInfo } from '../config/settings' + +type OpenAIModel = { + id: string + object: string + created: number + owned_by: string +} export const useChatModels = () => { const [settings, setSettings] = useSettings() - const [models, setModels] = useState([]) + const [models, setModels] = useState([]) const chatSettings = settings.chat const activeChatModel = chatSettings.model @@ -20,12 +26,7 @@ export const useChatModels = () => { }, }) - // Filter for chat-capable models - const chatModels = data.data.filter( - (model: ModelInfo) => model.capabilities?.completion_chat === true, - ) - - setModels(chatModels) + setModels(data.data) } catch (error) { console.log('Failed to fetch models:', error) setModels([]) @@ -37,12 +38,12 @@ export const useChatModels = () => { fetchAvailableModels() }, [fetchAvailableModels]) - const setActiveChatModel = (model: ModelInfo) => { + const setActiveChatModel = (modelId: string) => { setSettings({ ...settings, chat: { ...chatSettings, - model, + model: modelId, }, }) } From d35237ba83f3c4d241f8b27c232336df8cc6faff Mon Sep 17 00:00:00 2001 From: Srujan Gurram Date: Sat, 26 Oct 2024 20:22:57 +0530 Subject: [PATCH 5/8] Update ChangeChatModel select width to max 100px --- src/components/Sidebar/chat/ChangeChatModel.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/components/Sidebar/chat/ChangeChatModel.tsx b/src/components/Sidebar/chat/ChangeChatModel.tsx index b5a41ebf..94975a94 100644 --- a/src/components/Sidebar/chat/ChangeChatModel.tsx +++ b/src/components/Sidebar/chat/ChangeChatModel.tsx @@ -8,7 +8,7 @@ const ChangeChatModel = () => {
- - - {showAdvanced && ( -
- - -
- )} +
+ +
- {showModelSelect && models.length > 0 && ( -
- - setActiveChatModel(e.target.value)} + disabled={!models.length} + className="cdx-p-2 cdx-w-full cdx-rounded-md cdx-border dark:cdx-border-neutral-600 cdx-border-neutral-200 dark:cdx-bg-neutral-800/90 cdx-bg-neutral-200/90 disabled:cdx-opacity-50" + > + {isLoadingModels ? ( + + ) : models.length ? ( + models.map((model) => ( - ))} - -
- )} + )) + ) : ( + + )} + + {error && (
{error}
)} -
- (Note: we only store your key locally. We do not send it anywhere. You +
+ Note: we only store your key locally. We do not send it anywhere. You can check the{' '} { > source code {' '} - and inspect network tab to verify this.) + and inspect network tab to verify this.
) From 8df3cc0b103c2df64c3785e3db63276f7d0cde41 Mon Sep 17 00:00:00 2001 From: Srujan Gurram Date: Sat, 26 Oct 2024 20:57:55 +0530 Subject: [PATCH 8/8] Update chat settings form input handling logic --- src/components/Settings/Sections/ChatSettings.tsx | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/components/Settings/Sections/ChatSettings.tsx b/src/components/Settings/Sections/ChatSettings.tsx index 63739125..1e272ab0 100644 --- a/src/components/Settings/Sections/ChatSettings.tsx +++ b/src/components/Settings/Sections/ChatSettings.tsx @@ -1,13 +1,12 @@ -import * as Switch from '@radix-ui/react-switch' import React, { useState } from 'react' import { AiOutlineEye, AiOutlineEyeInvisible } from 'react-icons/ai' -import { useSettings } from '../../../hooks/useSettings' +import { Mode } from '../../../config/settings' import { useChatModels } from '../../../hooks/useChatModels' +import { useSettings } from '../../../hooks/useSettings' import { capitalizeText } from '../../../lib/capitalizeText' import { validateApiKey } from '../../../lib/validApiKey' import FieldWrapper from '../Elements/FieldWrapper' import SectionHeading from '../Elements/SectionHeading' -import { Mode } from '../../../config/settings' const ChatSettings = () => { const [settings, setSettings] = useSettings() @@ -22,10 +21,8 @@ const ChatSettings = () => { event: React.FormEvent, ) => { event.preventDefault() - const target = event.target as HTMLFormElement - - const apiKeyValue = target.openAiApiKey.value - const baseurlValue = target.openAiBaseUrl.value + const apiKeyValue = OpenAiApiKeyInputRef.current?.value || '' + const baseurlValue = OpenAiBaseUrlInputRef.current?.value || '' if (OpenAiApiKeyInputRef.current) { const isOpenAiKeyValid: boolean = await validateApiKey(