diff --git a/Makefile b/Makefile
index 5f67cc7..f62cc13 100644
--- a/Makefile
+++ b/Makefile
@@ -1,5 +1,5 @@
# Define targets
-.PHONY: install install-minimal test build build-deb build-minimal-deb clean
+.PHONY: install install-minimal test test_tgpt build build-deb build-minimal-deb clean
# Define variables
PYTHON := python3
@@ -27,6 +27,10 @@ install-minimal: clean
test:
$(PYTHON) -m unittest discover -s tests -p 'test_*.py' -f -v
+# Target to run tgpt providers test
+test_tgpt:
+ $(PYTHON) -m unittest discover -s tests -p 'test_*_tgpt.py' -f -v
+
# Target to create an executable using PyInstaller
build: install
$(PI) install --upgrade pyinstaller
diff --git a/docs/README.md b/docs/README.md
index 5d5a0da..3bcafef 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -88,7 +88,7 @@ These are simply the hosts of the LLMs, which include:
14. [Groq](https://console.groq.com/playground) *(API Key required)*
15. [Perplexity](https://www.perplexity.ai)
16. [YepChat](https://yep.com)
-
+17. [Novita](https://novita.ai) *(API key required)*
@@ -349,6 +349,21 @@ print(bot.chat(""))
+
+
+
+Novita
+
+
+
+```python
+import pytgpt.novita as novita
+bot = novita.NOVITA("")
+print(bot.chat(""))
+```
+
+
+
### Asynchronous
**Version 0.7.0** introduces asynchronous implementation to almost all providers except a few such as *perplexity & gemini*, which relies on other libraries which lacks such implementation.
diff --git a/setup.py b/setup.py
index 3a5ddb2..bcc355c 100644
--- a/setup.py
+++ b/setup.py
@@ -56,7 +56,7 @@
setup(
name="python-tgpt",
- version="0.7.4",
+ version="0.7.5",
license="MIT",
author="Smartwa",
maintainer="Smartwa",
diff --git a/src/pytgpt/__init__.py b/src/pytgpt/__init__.py
index 3e58148..0f405d0 100644
--- a/src/pytgpt/__init__.py
+++ b/src/pytgpt/__init__.py
@@ -28,6 +28,7 @@
"groq",
"perplexity",
"yepchat",
+ "novita",
]
gpt4free_providers = [
diff --git a/src/pytgpt/async_providers.py b/src/pytgpt/async_providers.py
index 75112b1..a2d1ff1 100644
--- a/src/pytgpt/async_providers.py
+++ b/src/pytgpt/async_providers.py
@@ -7,6 +7,7 @@
from pytgpt.koboldai import AsyncKOBOLDAI
from pytgpt.groq import AsyncGROQ
from pytgpt.blackboxai import AsyncBLACKBOXAI
+from pytgpt.novita import AsyncNOVITA
from pytgpt.gpt4free import AsyncGPT4FREE
mapper: dict[str, object] = {
@@ -20,6 +21,7 @@
"leo": AsyncLEO,
"groq": AsyncGROQ,
"openai": AsyncOPENAI,
+ "novita": AsyncNOVITA,
}
tgpt_mapper: dict[str, object] = {
diff --git a/src/pytgpt/console.py b/src/pytgpt/console.py
index 268a198..ed61abb 100644
--- a/src/pytgpt/console.py
+++ b/src/pytgpt/console.py
@@ -724,6 +724,30 @@ def __init__(
quiet=quiet,
)
+ elif provider == "novita":
+ assert auth, (
+ "Novita's API-key is required. " "Use the flag `--key` or `-k`"
+ )
+ from pytgpt.novita import main
+
+ self.bot = main.NOVITA(
+ api_key=auth,
+ is_conversation=disable_conversation,
+ max_tokens=max_tokens,
+ temperature=temperature,
+ presence_penalty=top_p,
+ frequency_penalty=top_k,
+ top_p=top_p,
+ model=getOr(model, main.model),
+ timeout=timeout,
+ intro=intro,
+ filepath=filepath,
+ update_file=update_file,
+ proxies=proxies,
+ history_offset=history_offset,
+ act=awesome_prompt,
+ )
+
else:
raise NotImplementedError(
f"The provider `{provider}` is not yet implemented."
diff --git a/src/pytgpt/exceptions.py b/src/pytgpt/exceptions.py
index 2652413..a0a56c9 100644
--- a/src/pytgpt/exceptions.py
+++ b/src/pytgpt/exceptions.py
@@ -1,4 +1,6 @@
class FailedToGenerateResponseError(Exception):
"""Provider failed to fetch response"""
- pass
+
+class UnsupportedModelError(Exception):
+ """Model passed is not supported by the provider"""
diff --git a/src/pytgpt/novita/__init__.py b/src/pytgpt/novita/__init__.py
new file mode 100644
index 0000000..a3affdc
--- /dev/null
+++ b/src/pytgpt/novita/__init__.py
@@ -0,0 +1,9 @@
+from pytgpt.novita.main import NOVITA
+from pytgpt.novita.main import AsyncNOVITA
+from pytgpt.novita.main import available_models
+from pytgpt.openai.main import session
+
+
+__info__ = "Interact with NOVITA's model. " "API key is required"
+
+__all__ = ["NOVITA", "AsyncNOVITA", "available_models", "session"]
diff --git a/src/pytgpt/novita/main.py b/src/pytgpt/novita/main.py
new file mode 100644
index 0000000..26d6270
--- /dev/null
+++ b/src/pytgpt/novita/main.py
@@ -0,0 +1,58 @@
+from pytgpt.openai import OPENAI, AsyncOPENAI
+from pytgpt.exceptions import UnsupportedModelError
+
+model = "meta-llama/llama-3.1-8b-instruct"
+
+available_models = [
+ "meta-llama/llama-3.1-8b-instruct",
+ "meta-llama/llama-3.1-70b-instruct",
+ "meta-llama/llama-3.1-405b-instruct",
+ "meta-llama/llama-3-8b-instruct",
+ "meta-llama/llama-3-70b-instruct",
+ "gryphe/mythomax-l2-13b",
+ "google/gemma-2-9b-it",
+ "mistralai/mistral-nemo",
+ "microsoft/wizardlm-2-8x22b",
+ "mistralai/mistral-7b-instruct",
+ "microsoft/wizardlm-2-7b",
+ "openchat/openchat-7b",
+ "nousresearch/hermes-2-pro-llama-3-8b",
+ "sao10k/l3-70b-euryale-v2.1",
+ "cognitivecomputations/dolphin-mixtral-8x22b",
+ "jondurbin/airoboros-l2-70b",
+ "lzlv_70b",
+ "nousresearch/nous-hermes-llama2-13b",
+ "teknium/openhermes-2.5-mistral-7b",
+ "sophosympatheia/midnight-rose-70b",
+ "meta-llama/llama-3.1-8b-instruct-bf16",
+ "qwen/qwen-2.5-72b-instruct",
+ "sao10k/l31-70b-euryale-v2.2",
+ "qwen/qwen-2-7b-instruct",
+ "qwen/qwen-2-72b-instruct",
+]
+
+
+class NOVITA(OPENAI):
+ """Novita AI provider"""
+
+ def __init__(self, *args, **kwargs):
+ kwargs.setdefault("model", model)
+ if not model in available_models:
+ raise UnsupportedModelError(
+ f"Model '{model}' is not yet supported. Choose from {available_models}"
+ )
+ super().__init__(*args, **kwargs)
+ self.chat_endpoint = "https://api.novita.ai/v3/openai/chat/completions"
+
+
+class AsyncNOVITA(AsyncOPENAI):
+ """Async Novita AI provider"""
+
+ def __init__(self, *args, **kwargs):
+ kwargs.setdefault("model", model)
+ if not model in available_models:
+ raise UnsupportedModelError(
+ f"Model '{model}' is not yet supported choose from {available_models}"
+ )
+ super().__init__(*args, **kwargs)
+ self.chat_endpoint = "https://api.novita.ai/v3/openai/chat/completions"
diff --git a/tests/test_novita.py b/tests/test_novita.py
new file mode 100644
index 0000000..7988c46
--- /dev/null
+++ b/tests/test_novita.py
@@ -0,0 +1,24 @@
+import unittest
+import tests.base as base
+from os import getenv
+from pytgpt.novita import NOVITA
+from pytgpt.novita import AsyncNOVITA
+
+API_KEY = getenv("NOVITA_API_KEY")
+
+
+class TestOpenai(base.llmBase):
+ def setUp(self):
+ self.bot = NOVITA(API_KEY)
+ self.prompt = base.prompt
+
+
+class TestAsyncOpenai(base.AsyncProviderBase):
+
+ def setUp(self):
+ self.bot = AsyncNOVITA(API_KEY)
+ self.prompt = base.prompt
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_openai.py b/tests/test_openai.py
index d61c7aa..c5e062c 100644
--- a/tests/test_openai.py
+++ b/tests/test_openai.py
@@ -4,17 +4,19 @@
from pytgpt.openai import OPENAI
from pytgpt.openai import AsyncOPENAI
+API_KEY = getenv("OPENAI_API_KEY")
+
class TestOpenai(base.llmBase):
def setUp(self):
- self.bot = OPENAI(getenv("OPENAI_API_KEY"))
+ self.bot = OPENAI(API_KEY)
self.prompt = base.prompt
class TestAsyncOpenai(base.AsyncProviderBase):
def setUp(self):
- self.bot = AsyncOPENAI()
+ self.bot = AsyncOPENAI(API_KEY)
self.prompt = base.prompt