Skip to content

Commit

Permalink
CLI: chat - save|load tokens count to|from json cache
Browse files Browse the repository at this point in the history
  • Loading branch information
deedy5 committed Jul 27, 2024
1 parent dacd756 commit a1460e9
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion duckduckgo_search/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ def chat(load, proxy, multiline, timeout, model):
cache = json_loads(f.read())
client._chat_vqd = cache.get("vqd", None)
client._chat_messages = cache.get("messages", [])
client._chat_tokens_count = cache.get("tokens", 0)

while True:
print(f"{'-'*78}\nYou[{model=} tokens={client._chat_tokens_count}]: ", end="")
Expand All @@ -176,7 +177,7 @@ def chat(load, proxy, multiline, timeout, model):
resp_answer = client.chat(keywords=user_input, model=model, timeout=timeout)
click.secho(f"AI: {resp_answer}", bg="black", fg="green")

cache = {"vqd": client._chat_vqd, "messages": client._chat_messages}
cache = {"vqd": client._chat_vqd, "tokens": client._chat_tokens_count, "messages": client._chat_messages}
_save_json(cache_file, cache)


Expand Down

0 comments on commit a1460e9

Please sign in to comment.