Skip to content

Commit

Permalink
Feat/ollama (#367)
Browse files Browse the repository at this point in the history
* feat: ollama support

* fix warnings and batch summarize

* non null llm config

* update bun lock
  • Loading branch information
thewh1teagle authored Nov 11, 2024
1 parent f084761 commit eedfbfd
Show file tree
Hide file tree
Showing 12 changed files with 266 additions and 129 deletions.
Binary file modified bun.lockb
Binary file not shown.
Binary file modified desktop/bun.lockb
Binary file not shown.
3 changes: 3 additions & 0 deletions desktop/src-tauri/capabilities/main.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,9 @@
"allow": [
{
"url": "https://*.anthropic.*"
},
{
"url": "http://localhost:11434"
}
]
},
Expand Down
4 changes: 3 additions & 1 deletion desktop/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,13 @@ export default function App() {
return (
// Handle errors before first render
<ErrorBoundary FallbackComponent={BoundaryFallback}>
<div>
<Toaster position="bottom-right" />
</div>
<ErrorModalProvider>
<UpdaterProvider>
<PreferenceProvider>
<ToastProvider>
<Toaster />
<ErrorModalWithContext />
<UpdateProgress />
<FilesProvider>
Expand Down
159 changes: 96 additions & 63 deletions desktop/src/components/Params.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -80,27 +80,12 @@ export default function ModelOptions({ options, setOptions }: ParamsProps) {
handleProgressEvents()
}, [])

async function validateApiKey() {
if (preference.llmOptions.apiKey) {
const valid = await llm.apiKeyValid(preference.llmOptions.apiKey)
console.log('api key valid', valid)
if (!valid) {
await dialog.message(t('common.invalid-llm-api-key'), {
kind: 'error',
})
}
return valid
} else {
return false
}
}

async function validateLlmPrompt() {
let valid = true
if (!preference.llmOptions.prompt) {
if (!preference.llmConfig?.prompt) {
valid = false
} else {
valid = await llm.promptValid(preference.llmOptions.prompt)
valid = preference.llmConfig.prompt.includes('%s')
}
if (!valid) {
await dialog.message(t('common.invalid-llm-prompt'), {
Expand All @@ -110,27 +95,11 @@ export default function ModelOptions({ options, setOptions }: ParamsProps) {
return valid
}

async function onEnableLlm(e: ChangeEvent<HTMLInputElement>) {
if (!preference.llmOptions.apiKey) {
await dialog.message(t('common.invalid-llm-api-key'), {
kind: 'error',
})
}
if (e.target.checked) {
let valid = await validateLlmPrompt()
const llmConfig = preference.llmConfig
const setLlmConfig = preference.setLlmConfig

if (!valid) {
return
}
valid = await validateApiKey()
console.log('valid => ', valid)
if (!valid) {
return
}
preference.setLlmOptions({ ...preference.llmOptions, enabled: true })
} else {
preference.setLlmOptions({ ...preference.llmOptions, enabled: false })
}
async function onEnableLlm(_e: ChangeEvent<HTMLInputElement>) {
preference.setLlmConfig({ ...llmConfig, enabled: !llmConfig?.enabled })
}

return (
Expand Down Expand Up @@ -194,30 +163,90 @@ export default function ModelOptions({ options, setOptions }: ParamsProps) {
<InfoTooltip text={t('common.info-llm-summarize')} />
{t('common.process-with-llm')}
</span>
<input type="checkbox" className="toggle toggle-primary" checked={preference.llmOptions.enabled} onChange={(e) => onEnableLlm(e)} />
<input type="checkbox" className="toggle toggle-primary" checked={preference.llmConfig?.enabled} onChange={(e) => onEnableLlm(e)} />
</label>
</div>

<label className="form-control w-full">
<div className="label">
<span className="label-text flex items-center gap-1">
<InfoTooltip text={t('common.info-llm-api-key')} />
{t('common.llm-api-key')}
<div onClick={() => shellOpen(config.llmApiKeyUrl)} className="link link-primary">
{t('common.find-here')}
</div>
</span>
<span className="label-text flex items-center gap-1">{t('common.llm-platform')}</span>
</div>
<input
value={preference.llmOptions.apiKey}
onChange={(e) => preference.setLlmOptions({ ...preference.llmOptions, apiKey: e.target.value })}
onBlur={validateApiKey}
className="input input-bordered opacity-50 text-sm"
placeholder="Paste here your API key"
type="text"
/>
<select
value={llmConfig?.platform}
onChange={(e) => {
const newPlatform = e.target.value
if (newPlatform === 'ollama') {
const defaultConfig = llm.defaultOllamaConfig()
setLlmConfig({
...defaultConfig,
ollamaBaseUrl: llmConfig.ollamaBaseUrl,
claudeApiKey: llmConfig.claudeApiKey,
enabled: llmConfig?.enabled ?? false,
})
} else if (newPlatform === 'claude') {
const defaultConfig = llm.defaultClaudeConfig()
setLlmConfig({
...defaultConfig,
ollamaBaseUrl: llmConfig.ollamaBaseUrl,
claudeApiKey: llmConfig.claudeApiKey,
enabled: llmConfig?.enabled ?? false,
})
}
}}
className="select select-bordered capitalize">
{['claude', 'ollama'].map((name) => (
<option key={name} value={name}>
{name}
</option>
))}
</select>
</label>

{llmConfig?.platform === 'claude' && (
<label className="form-control w-full">
<div className="label">
<span className="label-text flex items-center gap-1">
<InfoTooltip text={t('common.info-llm-api-key')} />
{t('common.llm-api-key')}
<div onClick={() => shellOpen(config.llmApiKeyUrl)} className="link link-primary">
{t('common.find-here')}
</div>
</span>
</div>

<input
value={llmConfig?.claudeApiKey}
onChange={(e) => setLlmConfig({ ...preference.llmConfig, claudeApiKey: e.target.value })}
className="input input-bordered opacity-50 text-sm"
placeholder="Paste here your API key"
type="text"
/>
</label>
)}

{llmConfig?.platform === 'ollama' && (
<>
<label className="form-control w-full">
<div className="label">
<span className="label-text flex items-center gap-1">{t('common.ollama-base-url')}</span>
</div>
<input
value={llmConfig?.ollamaBaseUrl}
onChange={(e) => setLlmConfig({ ...preference.llmConfig, ollamaBaseUrl: e.target.value })}
className="input input-bordered opacity-50 text-sm"></input>
</label>
<label className="form-control w-full">
<div className="label">
<span className="label-text flex items-center gap-1">{t('common.llm-model')}</span>
</div>
<input
value={llmConfig?.model}
onChange={(e) => setLlmConfig({ ...preference.llmConfig, model: e.target.value })}
className="input input-bordered opacity-50 text-sm"></input>
</label>
</>
)}

<label className="form-control w-full">
<div className="label">
<span className="label-text flex items-center gap-1">
Expand All @@ -226,8 +255,8 @@ export default function ModelOptions({ options, setOptions }: ParamsProps) {
</span>
</div>
<textarea
value={preference.llmOptions.prompt}
onChange={(e) => preference.setLlmOptions({ ...preference.llmOptions, prompt: e.target.value })}
value={llmConfig?.prompt}
onChange={(e) => setLlmConfig({ ...preference.llmConfig, prompt: e.target.value })}
onBlur={validateLlmPrompt}
className="textarea textarea-bordered w-full"></textarea>
</label>
Expand All @@ -240,20 +269,24 @@ export default function ModelOptions({ options, setOptions }: ParamsProps) {
</span>
</div>
<input
onChange={(e) => preference.setLlmOptions({ ...preference.llmOptions, maxTokens: parseInt(e.target.value) })}
value={preference.llmOptions.maxTokens}
onChange={(e) => setLlmConfig({ ...llmConfig, maxTokens: parseInt(e.target.value) ?? 1 })}
value={llmConfig?.maxTokens}
className="input input-bordered"
type="number"
/>
</label>

<div onClick={() => shellOpen(config.llmLimitsUrl)} className="link link-primary mt-2">
{t('common.set-monthly-spend-limit')}
</div>
{llmConfig?.platform === 'claude' && (
<>
<div onClick={() => shellOpen(config.llmLimitsUrl)} className="link link-primary mt-2">
{t('common.set-monthly-spend-limit')}
</div>

<div onClick={() => shellOpen(config.llmCostUrl)} className="link link-primary mt-2">
{t('common.llm-current-cost')}
</div>
<div onClick={() => shellOpen(config.llmCostUrl)} className="link link-primary mt-2">
{t('common.llm-current-cost')}
</div>
</>
)}

<div className="label mt-10">
<span className="label-text text-2xl font-bold">{t('common.model-options')}</span>
Expand Down
46 changes: 0 additions & 46 deletions desktop/src/lib/llm.ts

This file was deleted.

48 changes: 48 additions & 0 deletions desktop/src/lib/llm/claude.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import { fetch } from '@tauri-apps/plugin-http'
import { Llm, LlmConfig } from './index'

export function deafultConfig(): LlmConfig {
return {
claudeApiKey: '',
model: 'claude-3-5-sonnet-20240620',
maxTokens: 8192,
enabled: false,
ollamaBaseUrl: '',
platform: 'claude',
prompt: `Please summarize the following transcription: \n\n"""\n%s\n"""\n`,
}
}

export class Claude implements Llm {
private config: LlmConfig

constructor(config: LlmConfig) {
this.config = config
}

async ask(prompt: string): Promise<string> {
const body = JSON.stringify({
model: this.config.model,
max_tokens: this.config.maxTokens,
messages: [{ role: 'user', content: prompt }],
})
const headers = {
'X-API-Key': this.config.claudeApiKey,
'anthropic-version': '2023-06-01',
'Content-Type': 'application/json',
}
const response = await fetch('https://api.anthropic.com/v1/messages', {
method: 'POST',
headers,
body,
})

if (!response.ok) {
console.error(`request details: `, body, headers)
throw new Error(`Error: ${response.status} - ${response.statusText}`)
}

const data = await response.json()
return data.content?.[0].text
}
}
22 changes: 22 additions & 0 deletions desktop/src/lib/llm/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { Claude, deafultConfig as defaultClaudeConfig } from './claude'
import { Ollama, defaultConfig as defaultOllamaConfig } from './ollama'

export interface Llm {
ask(prompt: string): Promise<string>
}

export interface LlmConfig {
platform: 'ollama' | 'claude'
enabled: boolean
prompt: string

// Claude
claudeApiKey: string
model: string
maxTokens?: number

// Ollama
ollamaBaseUrl: string
}

export { Ollama, Claude, defaultClaudeConfig, defaultOllamaConfig }
Loading

0 comments on commit eedfbfd

Please sign in to comment.