Skip to content

Commit

Permalink
Added code to acquire lock before processing the prompt to avoide app…
Browse files Browse the repository at this point in the history
…lication crash when multiple prompt requests at the same time
  • Loading branch information
KerenK-EXRM committed Feb 1, 2024
1 parent 1fca83a commit 32bdac4
Showing 1 changed file with 24 additions and 15 deletions.
39 changes: 24 additions & 15 deletions run_localGPT_API.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@

from constants import CHROMA_SETTINGS, EMBEDDING_MODEL_NAME, PERSIST_DIRECTORY, MODEL_ID, MODEL_BASENAME

# API queue addition
from threading import Lock

request_lock = Lock()


if torch.backends.mps.is_available():
DEVICE_TYPE = "mps"
elif torch.cuda.is_available():
Expand Down Expand Up @@ -155,23 +161,26 @@ def run_ingest_route():
@app.route("/api/prompt_route", methods=["GET", "POST"])
def prompt_route():
global QA
global request_lock # Make sure to use the global lock instance
user_prompt = request.form.get("user_prompt")
if user_prompt:
# print(f'User Prompt: {user_prompt}')
# Get the answer from the chain
res = QA(user_prompt)
answer, docs = res["result"], res["source_documents"]

prompt_response_dict = {
"Prompt": user_prompt,
"Answer": answer,
}

prompt_response_dict["Sources"] = []
for document in docs:
prompt_response_dict["Sources"].append(
(os.path.basename(str(document.metadata["source"])), str(document.page_content))
)
# Acquire the lock before processing the prompt
with request_lock:
# print(f'User Prompt: {user_prompt}')
# Get the answer from the chain
res = QA(user_prompt)
answer, docs = res["result"], res["source_documents"]

prompt_response_dict = {
"Prompt": user_prompt,
"Answer": answer,
}

prompt_response_dict["Sources"] = []
for document in docs:
prompt_response_dict["Sources"].append(
(os.path.basename(str(document.metadata["source"])), str(document.page_content))
)

return jsonify(prompt_response_dict), 200
else:
Expand Down

0 comments on commit 32bdac4

Please sign in to comment.