Skip to content

Commit

Permalink
fix: Remove yepchat and opengpt providers
Browse files Browse the repository at this point in the history
fix: Remove provider promo in Blackboxai's responses
refactor: Format code with black.
fix: Other minor bugs
  • Loading branch information
Simatwa committed Nov 10, 2024
1 parent 5c361e4 commit fd63987
Show file tree
Hide file tree
Showing 15 changed files with 94 additions and 1,150 deletions.
21 changes: 1 addition & 20 deletions docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ The name *python-tgpt* draws inspiration from its parent project [tgpt](https://
These are simply the hosts of the LLMs, they include:

- [Koboldai](https://koboldai-koboldcpp-tiefighter.hf.space)
- [OpenGPTs](https://opengpts-example-vz4y4ooboq-uc.a.run.app/)
- [OpenAI](https://chat.openai.com) *(API key required)*
- [Phind](https://www.phind.com)
- [Blackboxai](https://www.blackbox.ai)
Expand Down Expand Up @@ -299,24 +298,6 @@ print(bot.chat("<Your-prompt>"))

</details>


<details>

<summary>
Opengpt

</summary>

```python
import pytgpt.opengpt as opengpt
bot = opengpt.OPENGPT()
print(bot.chat("<Your-prompt>"))
```

</details>

<details>

<summary>
phind

Expand Down Expand Up @@ -349,7 +330,7 @@ print(bot.chat("<Your-prompt>"))

**Version 0.7.0** introduces asynchronous implementation to almost all providers except a few such as *perplexity*, which relies on other libraries which lacks such implementation.

To make it easier, you just have to prefix `Async` to the common synchronous class name. For instance `OPENGPT` will be accessed as `AsyncOPENGPT`:
To make it easier, you just have to prefix `Async` to the common synchronous class name. For instance `PHIND` will be accessed as `AsyncPHIND`:

#### Streaming Whole ai response.

Expand Down
2 changes: 0 additions & 2 deletions src/pytgpt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
tgpt_providers = [
"auto",
"openai",
"opengpt",
"koboldai",
"phind",
"blackboxai",
Expand All @@ -22,7 +21,6 @@
"poe",
"groq",
"perplexity",
"yepchat",
"novita",
]

Expand Down
6 changes: 0 additions & 6 deletions src/pytgpt/async_providers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from pytgpt.phind import AsyncPHIND
from pytgpt.yepchat import AsyncYEPCHAT
from pytgpt.opengpt import AsyncOPENGPT
from pytgpt.openai import AsyncOPENAI
from pytgpt.koboldai import AsyncKOBOLDAI
from pytgpt.groq import AsyncGROQ
Expand All @@ -10,20 +8,16 @@

mapper: dict[str, object] = {
"phind": AsyncPHIND,
"opengpt": AsyncOPENGPT,
"koboldai": AsyncKOBOLDAI,
"blackboxai": AsyncBLACKBOXAI,
"gpt4free": AsyncGPT4FREE,
"yepchat": AsyncYEPCHAT,
"groq": AsyncGROQ,
"openai": AsyncOPENAI,
"novita": AsyncNOVITA,
}

tgpt_mapper: dict[str, object] = {
"phind": AsyncPHIND,
"opengpt": AsyncOPENGPT,
"koboldai": AsyncKOBOLDAI,
"blackboxai": AsyncBLACKBOXAI,
"yepchat": AsyncYEPCHAT,
}
11 changes: 2 additions & 9 deletions src/pytgpt/auto/main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from pytgpt.base import Provider, AsyncProvider
from pytgpt.opengpt import OPENGPT, AsyncOPENGPT
from pytgpt.koboldai import KOBOLDAI, AsyncKOBOLDAI
from pytgpt.phind import PHIND, AsyncPHIND
from pytgpt.blackboxai import BLACKBOXAI, AsyncBLACKBOXAI
Expand All @@ -16,12 +15,9 @@
import logging


provider_map: dict[
str, Union[OPENGPT, KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE]
] = {
provider_map: dict[str, Union[KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE]] = {
"phind": PHIND,
"perplexity": PERPLEXITY,
"opengpt": OPENGPT,
"koboldai": KOBOLDAI,
"blackboxai": BLACKBOXAI,
"gpt4free": GPT4FREE,
Expand Down Expand Up @@ -56,9 +52,7 @@ def __init__(
act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
exclude(list[str], optional): List of providers to be excluded. Defaults to [].
"""
self.provider: Union[
OPENGPT, KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE
] = None
self.provider: Union[KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE] = None
self.provider_name: str = None
self.is_conversation = is_conversation
self.max_tokens = max_tokens
Expand Down Expand Up @@ -263,7 +257,6 @@ def __init__(
exclude(list[str], optional): List of providers to be excluded. Defaults to [].
"""
self.provider: Union[
AsyncOPENGPT,
AsyncKOBOLDAI,
AsyncPHIND,
AsyncBLACKBOXAI,
Expand Down
24 changes: 20 additions & 4 deletions src/pytgpt/blackboxai/main.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import re
import json
import httpx
import requests
Expand All @@ -12,6 +13,10 @@

default_model = None

provider_promo_text = (
r"Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai"
)


class BLACKBOXAI(Provider):
def __init__(
Expand Down Expand Up @@ -153,8 +158,13 @@ def for_stream():
try:
if bool(value):
streaming_text += value + ("\n" if stream else "")

resp = dict(text=streaming_text)
resp = dict(
text=(
re.sub(provider_promo_text, "", streaming_text)
if provider_promo_text in streaming_text
else streaming_text
).strip()
)
self.last_response.update(resp)
yield value if raw else resp
except json.decoder.JSONDecodeError:
Expand Down Expand Up @@ -352,7 +362,13 @@ async def for_stream():
try:
if bool(value):
streaming_text += value + ("\n" if stream else "")
resp = dict(text=streaming_text)
resp = dict(
text=(
re.sub(provider_promo_text, "", streaming_text)
if provider_promo_text in streaming_text
else streaming_text
).strip()
)
self.last_response.update(resp)
yield value if raw else resp
except json.decoder.JSONDecodeError:
Expand Down Expand Up @@ -421,7 +437,7 @@ async def get_message(self, response: dict) -> str:
bot = BLACKBOXAI()

def main():
resp = bot.ask("hello")
resp = bot.ask("hello", True)
for value in resp:
print(value)

Expand Down
98 changes: 40 additions & 58 deletions src/pytgpt/console.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

from typing import Iterable

#pytgpt
# pytgpt

from pytgpt.utils import Optimizers
from pytgpt.utils import default_path
Expand Down Expand Up @@ -344,6 +344,7 @@ def main(*args, **kwargs):

return decorator


class CustomCompleter(Completer):
"""Suggests query based on user prompts"""

Expand Down Expand Up @@ -371,14 +372,15 @@ def get_completions(self, document: Document, complete_event):
)
return completions
for count, suggestion in enumerate(
suggest_query(word, timeout=2, die_silently=True),
start=1):
suggest_query(word, timeout=2, die_silently=True), start=1
):
completions.append(Completion(suggestion, start_position=-len(word)))
if count >= self.suggestions_limit:
break
return completions
return []


class Main(cmd.Cmd):
intro = (
"Welcome to AI Chat in terminal. "
Expand Down Expand Up @@ -506,21 +508,6 @@ def __init__(
act=awesome_prompt,
)

elif provider == "opengpt":
from pytgpt.opengpt import OPENGPT

self.bot = OPENGPT(
is_conversation=disable_conversation,
max_tokens=max_tokens,
timeout=timeout,
intro=intro,
filepath=filepath,
update_file=update_file,
proxies=proxies,
history_offset=history_offset,
act=awesome_prompt,
)

elif provider == "koboldai":
from pytgpt.koboldai import KOBOLDAI

Expand Down Expand Up @@ -571,26 +558,6 @@ def __init__(
act=awesome_prompt,
)

elif provider == "yepchat":
from pytgpt.yepchat import main as yepchat

self.bot = yepchat.YEPCHAT(
is_conversation=disable_conversation,
max_tokens=max_tokens,
temperature=temperature,
presence_penalty=top_p,
frequency_penalty=top_k,
top_p=top_p,
model=getOr(model, yepchat.model),
timeout=timeout,
intro=intro,
filepath=filepath,
update_file=update_file,
proxies=proxies,
history_offset=history_offset,
act=awesome_prompt,
)

elif provider == "gpt4all":
assert auth, (
"Path to LLM (.gguf or .bin) file is required. "
Expand Down Expand Up @@ -743,20 +710,35 @@ def __init__(
self.path_to_last_response_audio = None
if not non_interactive:
self.completer_session = PromptSession(
"",
completer=ThreadedCompleter(
CustomCompleter(
self,
suggestions_limit,
[
"cd", "copy_this", "h", "last_response", "rawdog",
"settings", "with_copied",
"clear", "exec", "help", "load", "reread", "shell",
"code", "exit", "history", "new_intro", "reset", "sys",
],
)
),
)
"",
completer=ThreadedCompleter(
CustomCompleter(
self,
suggestions_limit,
[
"cd",
"copy_this",
"h",
"last_response",
"rawdog",
"settings",
"with_copied",
"clear",
"exec",
"help",
"load",
"reread",
"shell",
"code",
"exit",
"history",
"new_intro",
"reset",
"sys",
],
)
),
)
self.__init_time = time.time()
self.__start_time = time.time()
self.__end_time = time.time()
Expand Down Expand Up @@ -787,7 +769,7 @@ def find_range(start, end, hms: bool = False):
f"~[`{Fore.LIGHTWHITE_EX}🕒{Fore.BLUE}{current_time}-`"
f"{Fore.LIGHTWHITE_EX}💻{Fore.RED}{find_range(self.__init_time, time.time(), True)}-`"
f"{Fore.LIGHTWHITE_EX}{Fore.YELLOW}{find_range(self.__start_time, self.__end_time)}s]`"
# f"\n╰─>"
# f"\n╰─>"
)
whitelist = ["[", "]", "~", "-", "(", ")"]
for character in whitelist:
Expand All @@ -800,8 +782,9 @@ def find_range(start, end, hms: bool = False):
f"~[🕒{current_time}"
f"-💻{find_range(self.__init_time, time.time(), True)}"
f"-⚡{find_range(self.__start_time, self.__end_time)}s]"
#"\n╰─>"
# "\n╰─>"
)

def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
Expand Down Expand Up @@ -864,7 +847,6 @@ def cmdloop(self, intro=None):
except ImportError:
pass


def output_bond(
self,
title: str,
Expand Down Expand Up @@ -1470,7 +1452,7 @@ class ChatInteractive:
),
)
@click.option(
'-sl',
"-sl",
"--suggestions-limit",
type=click.INT,
help="Prompt suggestions limit - 0 to disable suggestion",
Expand Down Expand Up @@ -1625,7 +1607,7 @@ def interactive(
internal_exec=internal_exec,
confirm_script=confirm_script,
interpreter=interpreter,
suggestions_limit=suggestions_limit
suggestions_limit=suggestions_limit,
)
busy_bar.spin_index = busy_bar_index
bot.code_theme = code_theme
Expand Down Expand Up @@ -1925,7 +1907,7 @@ def generate(
internal_exec=internal_exec,
confirm_script=confirm_script,
interpreter=interpreter,
non_interactive=True
non_interactive=True,
)
prompt = prompt if prompt else ""
copied_placeholder = "{{copied}}"
Expand Down
4 changes: 0 additions & 4 deletions src/pytgpt/opengpt/__init__.py

This file was deleted.

Loading

0 comments on commit fd63987

Please sign in to comment.