-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
versão com multiplos agentes e comandos
- Loading branch information
Showing
13 changed files
with
207 additions
and
72 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
app/models/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,4 @@ | ||
__pycache__/ | ||
.vscode/ | ||
.env | ||
|
||
app/models/* |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
import os | ||
|
||
#LLM | ||
from langchain import OpenAI | ||
from langchain.chat_models import ChatOpenAI | ||
from langchain.schema import SystemMessage | ||
from langchain.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder | ||
#Memory | ||
from langchain.memory import ConversationBufferMemory | ||
from langchain.chains import LLMChain | ||
|
||
#CallBack | ||
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler | ||
|
||
#Prompts | ||
from .prompts import SYS_PROMPT | ||
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY') | ||
|
||
#Define o LLM | ||
llm = ChatOpenAI(model_name="gpt-3.5-turbo") | ||
|
||
prompt = ChatPromptTemplate.from_messages([ | ||
SystemMessage(content=SYS_PROMPT), # The persistent system prompt | ||
MessagesPlaceholder(variable_name="chat_history"), # Where the memory will be stored. | ||
HumanMessagePromptTemplate.from_template("{human_input}"), # Where the human input will injected | ||
]) | ||
|
||
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) | ||
llm = ChatOpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()]) | ||
|
||
chat_llm_chain = LLMChain( | ||
llm=llm, | ||
prompt=prompt, | ||
verbose=True, | ||
memory=memory, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import os | ||
|
||
#LLM | ||
from langchain.llms import LlamaCpp | ||
from langchain.schema import SystemMessage | ||
from langchain.prompts import PromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder | ||
|
||
#Prompt and Memory | ||
from agent.prompts import SYS_PROMPT | ||
from langchain.memory import ConversationBufferMemory | ||
|
||
#Chain | ||
from langchain.chains import LLMChain | ||
|
||
#Callback para Streaming | ||
from langchain.callbacks.manager import CallbackManager | ||
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler | ||
|
||
from utils import download_model | ||
|
||
OPENAI_API_KEY = os.getenv('OPENAI_APIKEY') | ||
|
||
# Encontra o caminho atual do script | ||
|
||
|
||
|
||
LLM_MODEL = "llama-2-7b-chat.Q4_K_M.gguf" | ||
# Concatena com o caminho relativo do modelo | ||
|
||
MODEL_PATH = download_model("TheBloke/Llama-2-7b-Chat-GGUF","llama-2-7b-chat.Q4_K_M.gguf") | ||
|
||
|
||
#Define o LLM | ||
|
||
# Callbacks support token-wise streaming | ||
|
||
callback_manager = CallbackManager([StreamingStdOutCallbackHandler()]) | ||
|
||
llm = LlamaCpp( | ||
model_path=MODEL_PATH, | ||
temperature=0.75, | ||
max_tokens=512, | ||
top_p=1, | ||
callback_manager=callback_manager, | ||
verbose=True, # Verbose is required to pass to the callback manager | ||
streaming=True, | ||
stop=["Human:"] | ||
) | ||
#Memória | ||
prompt = PromptTemplate.from_template("<s>[INST] <<SYS>>\n" + SYS_PROMPT + "\n<</SYS>>\n\n{human_input} [/INST]</s>") | ||
|
||
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) | ||
|
||
chat_llm_chain = LLMChain( | ||
llm=llm, | ||
prompt=prompt, | ||
memory=memory, | ||
verbose=True | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
# commands/agent_command.py | ||
|
||
def run(args, session, ACTIVE_AGENTS, loaded_agents): | ||
if args: | ||
subcommand = args[0] | ||
if subcommand == "list": | ||
return f"Agente disponíveis: {', '.join(ACTIVE_AGENTS)}" | ||
elif subcommand == "set": | ||
new_agent = args[1] if len(args) > 1 else None | ||
if new_agent in loaded_agents.keys(): | ||
session['current_agent'] = new_agent | ||
return f"Agente configurado para {new_agent}" | ||
else: | ||
return "Agente invalido." | ||
else: | ||
current_agent = session.get('current_agent', ACTIVE_AGENTS[0]) | ||
return f"O agente atual é {current_agent}" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,4 +26,6 @@ flask | |
flask_socketio | ||
|
||
gunicorn | ||
eventlet | ||
eventlet | ||
|
||
huggingface-hub==0.17.3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
import os | ||
from huggingface_hub import hf_hub_download | ||
|
||
def download_model(repo, model_file): | ||
current_path = os.path.dirname(os.path.abspath(__file__)) | ||
model_directory = os.path.join(current_path, "models") | ||
|
||
# Cria o diretório 'model' se ele não existir | ||
if not os.path.exists(model_directory): | ||
os.mkdir(model_directory) | ||
|
||
model_path = os.path.join(model_directory, model_file) | ||
|
||
if not os.path.exists(model_path): | ||
print(f"O modelo {repo}/{model_file} não existe. Baixando...") | ||
hf_hub_download( | ||
repo_id=f"{repo}", | ||
filename=model_file, | ||
cache_dir=model_directory | ||
) | ||
|
||
return(model_path) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.