Skip to content

Commit

Permalink
feat: Add Novita support to Python-TGPT library
Browse files Browse the repository at this point in the history
fix: Update README.md to include Novita details

patch: Bump version number to 0.7.5

fix: Add Novita provider to async_providers.py

feat: Implement Novita provider in console.py

fix: Add UnsupportedModelError exception handling
  • Loading branch information
Simatwa committed Oct 30, 2024
1 parent 5aff2c6 commit 3337a95
Show file tree
Hide file tree
Showing 11 changed files with 147 additions and 6 deletions.
6 changes: 5 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Define targets
.PHONY: install install-minimal test build build-deb build-minimal-deb clean
.PHONY: install install-minimal test test_tgpt build build-deb build-minimal-deb clean

# Define variables
PYTHON := python3
Expand Down Expand Up @@ -27,6 +27,10 @@ install-minimal: clean
test:
$(PYTHON) -m unittest discover -s tests -p 'test_*.py' -f -v

# Target to run tgpt providers test
test_tgpt:
$(PYTHON) -m unittest discover -s tests -p 'test_*_tgpt.py' -f -v

# Target to create an executable using PyInstaller
build: install
$(PI) install --upgrade pyinstaller
Expand Down
17 changes: 16 additions & 1 deletion docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ These are simply the hosts of the LLMs, which include:
14. [Groq](https://console.groq.com/playground) *(API Key required)*
15. [Perplexity](https://www.perplexity.ai)
16. [YepChat](https://yep.com)

17. [Novita](https://novita.ai) *(API key required)*

<details>

Expand Down Expand Up @@ -349,6 +349,21 @@ print(bot.chat("<Your-prompt>"))

</details>

<details>

<summary>
Novita

</summary>

```python
import pytgpt.novita as novita
bot = novita.NOVITA("<NOVITA-API-KEY>")
print(bot.chat("<Your-prompt>"))
```

</details>

### Asynchronous

**Version 0.7.0** introduces asynchronous implementation to almost all providers except a few such as *perplexity & gemini*, which relies on other libraries which lacks such implementation.
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@

setup(
name="python-tgpt",
version="0.7.4",
version="0.7.5",
license="MIT",
author="Smartwa",
maintainer="Smartwa",
Expand Down
1 change: 1 addition & 0 deletions src/pytgpt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"groq",
"perplexity",
"yepchat",
"novita",
]

gpt4free_providers = [
Expand Down
2 changes: 2 additions & 0 deletions src/pytgpt/async_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from pytgpt.koboldai import AsyncKOBOLDAI
from pytgpt.groq import AsyncGROQ
from pytgpt.blackboxai import AsyncBLACKBOXAI
from pytgpt.novita import AsyncNOVITA
from pytgpt.gpt4free import AsyncGPT4FREE

mapper: dict[str, object] = {
Expand All @@ -20,6 +21,7 @@
"leo": AsyncLEO,
"groq": AsyncGROQ,
"openai": AsyncOPENAI,
"novita": AsyncNOVITA,
}

tgpt_mapper: dict[str, object] = {
Expand Down
24 changes: 24 additions & 0 deletions src/pytgpt/console.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,6 +724,30 @@ def __init__(
quiet=quiet,
)

elif provider == "novita":
assert auth, (
"Novita's API-key is required. " "Use the flag `--key` or `-k`"
)
from pytgpt.novita import main

self.bot = main.NOVITA(
api_key=auth,
is_conversation=disable_conversation,
max_tokens=max_tokens,
temperature=temperature,
presence_penalty=top_p,
frequency_penalty=top_k,
top_p=top_p,
model=getOr(model, main.model),
timeout=timeout,
intro=intro,
filepath=filepath,
update_file=update_file,
proxies=proxies,
history_offset=history_offset,
act=awesome_prompt,
)

else:
raise NotImplementedError(
f"The provider `{provider}` is not yet implemented."
Expand Down
4 changes: 3 additions & 1 deletion src/pytgpt/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
class FailedToGenerateResponseError(Exception):
"""Provider failed to fetch response"""

pass

class UnsupportedModelError(Exception):
"""Model passed is not supported by the provider"""
9 changes: 9 additions & 0 deletions src/pytgpt/novita/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pytgpt.novita.main import NOVITA
from pytgpt.novita.main import AsyncNOVITA
from pytgpt.novita.main import available_models
from pytgpt.openai.main import session


__info__ = "Interact with NOVITA's model. " "API key is required"

__all__ = ["NOVITA", "AsyncNOVITA", "available_models", "session"]
58 changes: 58 additions & 0 deletions src/pytgpt/novita/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from pytgpt.openai import OPENAI, AsyncOPENAI
from pytgpt.exceptions import UnsupportedModelError

model = "meta-llama/llama-3.1-8b-instruct"

available_models = [
"meta-llama/llama-3.1-8b-instruct",
"meta-llama/llama-3.1-70b-instruct",
"meta-llama/llama-3.1-405b-instruct",
"meta-llama/llama-3-8b-instruct",
"meta-llama/llama-3-70b-instruct",
"gryphe/mythomax-l2-13b",
"google/gemma-2-9b-it",
"mistralai/mistral-nemo",
"microsoft/wizardlm-2-8x22b",
"mistralai/mistral-7b-instruct",
"microsoft/wizardlm-2-7b",
"openchat/openchat-7b",
"nousresearch/hermes-2-pro-llama-3-8b",
"sao10k/l3-70b-euryale-v2.1",
"cognitivecomputations/dolphin-mixtral-8x22b",
"jondurbin/airoboros-l2-70b",
"lzlv_70b",
"nousresearch/nous-hermes-llama2-13b",
"teknium/openhermes-2.5-mistral-7b",
"sophosympatheia/midnight-rose-70b",
"meta-llama/llama-3.1-8b-instruct-bf16",
"qwen/qwen-2.5-72b-instruct",
"sao10k/l31-70b-euryale-v2.2",
"qwen/qwen-2-7b-instruct",
"qwen/qwen-2-72b-instruct",
]


class NOVITA(OPENAI):
"""Novita AI provider"""

def __init__(self, *args, **kwargs):
kwargs.setdefault("model", model)
if not model in available_models:
raise UnsupportedModelError(
f"Model '{model}' is not yet supported. Choose from {available_models}"
)
super().__init__(*args, **kwargs)
self.chat_endpoint = "https://api.novita.ai/v3/openai/chat/completions"


class AsyncNOVITA(AsyncOPENAI):
"""Async Novita AI provider"""

def __init__(self, *args, **kwargs):
kwargs.setdefault("model", model)
if not model in available_models:
raise UnsupportedModelError(
f"Model '{model}' is not yet supported choose from {available_models}"
)
super().__init__(*args, **kwargs)
self.chat_endpoint = "https://api.novita.ai/v3/openai/chat/completions"
24 changes: 24 additions & 0 deletions tests/test_novita.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import unittest
import tests.base as base
from os import getenv
from pytgpt.novita import NOVITA
from pytgpt.novita import AsyncNOVITA

API_KEY = getenv("NOVITA_API_KEY")


class TestOpenai(base.llmBase):
def setUp(self):
self.bot = NOVITA(API_KEY)
self.prompt = base.prompt


class TestAsyncOpenai(base.AsyncProviderBase):

def setUp(self):
self.bot = AsyncNOVITA(API_KEY)
self.prompt = base.prompt


if __name__ == "__main__":
unittest.main()
6 changes: 4 additions & 2 deletions tests/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,19 @@
from pytgpt.openai import OPENAI
from pytgpt.openai import AsyncOPENAI

API_KEY = getenv("OPENAI_API_KEY")


class TestOpenai(base.llmBase):
def setUp(self):
self.bot = OPENAI(getenv("OPENAI_API_KEY"))
self.bot = OPENAI(API_KEY)
self.prompt = base.prompt


class TestAsyncOpenai(base.AsyncProviderBase):

def setUp(self):
self.bot = AsyncOPENAI()
self.bot = AsyncOPENAI(API_KEY)
self.prompt = base.prompt


Expand Down

0 comments on commit 3337a95

Please sign in to comment.