Skip to content

Commit

Permalink
feat(config): add support for GROQ API key and models
Browse files Browse the repository at this point in the history
feat(engine): add GroqAi engine implementation for GROQ AI provider
  • Loading branch information
ozeliurs committed Aug 17, 2024
1 parent 70c64d4 commit 32ab800
Show file tree
Hide file tree
Showing 3 changed files with 166 additions and 4 deletions.
38 changes: 34 additions & 4 deletions src/commands/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export enum CONFIG_KEYS {
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
OCO_GEMINI_API_KEY = 'OCO_GEMINI_API_KEY',
OCO_GROQ_API_KEY = 'OCO_GROQ_API_KEY',
OCO_GEMINI_BASE_PATH = 'OCO_GEMINI_BASE_PATH',
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
Expand Down Expand Up @@ -84,6 +85,16 @@ export const MODEL_LIST = {
'gemini-1.0-pro',
'gemini-pro-vision',
'text-embedding-004'
],

groq: [
'llama3-70b-8192', // Meta Llama 3 70B (default one, no daily token limit and 14 400 reqs/day)
'llama3-8b-8192', // Meta Llama 3 8B
'llama-guard-3-8b', // Llama Guard 3 8B
'llama-3.1-8b-instant', // Llama 3.1 8B (Preview)
'llama-3.1-70b-versatile', // Llama 3.1 70B (Preview)
'gemma-7b-it', // Gemma 7B
'gemma2-9b-it', // Gemma 2 9B
]
};

Expand All @@ -95,6 +106,8 @@ const getDefaultModel = (provider: string | undefined): string => {
return MODEL_LIST.anthropic[0];
case 'gemini':
return MODEL_LIST.gemini[0];
case 'groq':
return MODEL_LIST.groq[0];
default:
return MODEL_LIST.openai[0];
}
Expand Down Expand Up @@ -182,6 +195,19 @@ export const configValidators = {
return value;
},

[CONFIG_KEYS.OCO_GROQ_API_KEY](value: any, config: any = {}) { // TODO
validateConfig(
'GROQ_API_KEY',
value ||
config.OCO_OPENAI_API_KEY ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/GROQ API key'
);

return value;
},

[CONFIG_KEYS.OCO_DESCRIPTION](value: any) {
validateConfig(
CONFIG_KEYS.OCO_DESCRIPTION,
Expand Down Expand Up @@ -264,15 +290,18 @@ export const configValidators = {
[
...MODEL_LIST.openai,
...MODEL_LIST.anthropic,
...MODEL_LIST.gemini
...MODEL_LIST.gemini,
...MODEL_LIST.groq
].includes(value) ||
config.OCO_AI_PROVIDER == 'ollama' ||
config.OCO_AI_PROVIDER == 'azure' ||
config.OCO_AI_PROVIDER == 'groq' ||
config.OCO_AI_PROVIDER == 'test',
`${value} is not supported yet, use:\n\n ${[
...MODEL_LIST.openai,
...MODEL_LIST.anthropic,
...MODEL_LIST.gemini
...MODEL_LIST.gemini,
...MODEL_LIST.groq
].join('\n')}`
);
return value;
Expand Down Expand Up @@ -308,9 +337,9 @@ export const configValidators = {
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AI_PROVIDER,
['', 'openai', 'anthropic', 'gemini', 'azure', 'test'].includes(value) ||
['', 'openai', 'anthropic', 'gemini', 'azure', 'groq', 'test'].includes(value) ||
value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini' or 'openai' (default)`
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'groq' or 'openai' (default)`
);
return value;
},
Expand Down Expand Up @@ -375,6 +404,7 @@ export const getConfig = ({
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
OCO_GROQ_API_KEY: process.env.OCO_GROQ_API_KEY,
OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT
? Number(process.env.OCO_TOKENS_MAX_INPUT)
: undefined,
Expand Down
129 changes: 129 additions & 0 deletions src/engine/groq.Ai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
import axios from 'axios';
import chalk from 'chalk';
import { execa } from 'execa';

import {
ChatCompletionRequestMessage,
Configuration as OpenAiApiConfiguration,
OpenAIApi
} from 'openai';

import { intro, outro } from '@clack/prompts';

import {
CONFIG_MODES,
DEFAULT_TOKEN_LIMITS,
getConfig
} from '../commands/config';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine } from './Engine';
import { MODEL_LIST } from '../commands/config';

const config = getConfig();

const MAX_TOKENS_OUTPUT =
config?.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const MAX_TOKENS_INPUT =
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
let basePath = "https://api.groq.com/openai/v1";
let apiKey = config?.OCO_GROQ_API_KEY;

const [command, mode] = process.argv.slice(2);

const provider = config?.OCO_AI_PROVIDER;

if (
provider === 'groq' &&
!apiKey &&
command !== 'config' &&
mode !== CONFIG_MODES.set
) {
intro('opencommit');

outro(
'OCO_GROQ_API_KEY is not set, please run `oco config set OCO_GROQ_API_KEY=<your token> .'
);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
);

process.exit(1);
}

const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
if (provider === 'groq' &&
!MODEL_LIST.groq.includes(MODEL) &&
command !== 'config' &&
mode !== CONFIG_MODES.set) {
outro(
`${chalk.red('✖')} Unsupported model ${MODEL} for Groq AI. Supported models are: ${MODEL_LIST.groq.join(
', '
)}`
);

process.exit(1);
}

export class GroqAi implements AiEngine {

private groqAiApiConfiguration = new OpenAiApiConfiguration({
apiKey: apiKey
});
private groqAI!: OpenAIApi;

constructor() {
if (basePath) {
this.groqAiApiConfiguration.basePath = basePath;
}
this.groqAI = new OpenAIApi(this.groqAiApiConfiguration);
}

public generateCommitMessage = async (
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> => {
const params = {
model: MODEL,
messages,
temperature: 0,
top_p: 0.1,
max_tokens: MAX_TOKENS_OUTPUT
};
try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content as string) + 4)
.reduce((a, b) => a + b, 0);

if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
}

const { data } = await this.groqAI.createChatCompletion(params);

const message = data.choices[0].message;

return message?.content;
} catch (error) {
outro(`${chalk.red('✖')} ${JSON.stringify(params)}`);

const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);

if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
error.response?.status === 401
) {
const groqAiError = error.response.data.error;

if (groqAiError?.message) outro(groqAiError.message);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
);
}

throw err;
}
};

}
3 changes: 3 additions & 0 deletions src/utils/engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { OllamaAi } from '../engine/ollama';
import { AnthropicAi } from '../engine/anthropic'
import { TestAi } from '../engine/testAi';
import { Azure } from '../engine/azure';
import { GroqAi } from '../engine/groq.Ai';

export function getEngine(): AiEngine {
const config = getConfig();
Expand All @@ -25,6 +26,8 @@ export function getEngine(): AiEngine {
return new Gemini();
} else if (provider == 'azure') {
return new Azure();
} else if (provider == 'groq') {
return new GroqAi();
}

//open ai gpt by default
Expand Down

0 comments on commit 32ab800

Please sign in to comment.