Skip to content

Commit 8f0b2d9

Browse files
committed
Add caching for chat history loading and Improve error handling
1 parent 8a4f3a2 commit 8f0b2d9

File tree

1 file changed

+35
-34
lines changed

1 file changed

+35
-34
lines changed

clibot/chat.py

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from clibot.config import *
88
from clibot.colors import *
99
from openai import AsyncOpenAI
10+
from functools import lru_cache
1011

1112

1213
package_dir = os.path.dirname(os.path.abspath(__file__))
@@ -20,6 +21,7 @@
2021
/clear Clear the conversation history"""
2122

2223

24+
@lru_cache(maxsize=1)
2325
def load_chat_history():
2426
if os.path.exists(history_file):
2527
try:
@@ -55,18 +57,17 @@ async def chatbot(messages, messages_history, client):
5557

5658
try:
5759
loading.start()
58-
response = await client.chat.completions.create(
60+
stream = await client.chat.completions.create(
5961
model=AI_MODEL,
6062
messages=messages_history,
6163
temperature=TEMPERATURE,
6264
top_p=TOP_P,
6365
max_tokens=MAX_TOKENS,
6466
stream=True,
65-
# stop=None
6667
)
6768
loading.stop()
6869
respond = ""
69-
async for chunk in response:
70+
async for chunk in stream:
7071
if chunk.choices[0].delta.content:
7172
content = chunk.choices[0].delta.content
7273
print(content, end="", flush=True)
@@ -77,10 +78,9 @@ async def chatbot(messages, messages_history, client):
7778
print()
7879

7980
except KeyboardInterrupt:
80-
loading.stop()
81-
streaming_response(f"\n{LIGHT_RED}Exiting...{COLOR_RESET}")
82-
exit()
83-
81+
loading.stop()
82+
streaming_response(f"\n{LIGHT_RED}Exiting...{COLOR_RESET}")
83+
exit()
8484
except Exception as e:
8585
loading.stop()
8686
streaming_response(f"{LIGHT_RED}Error: {str(e)}{COLOR_RESET}\n")
@@ -90,31 +90,32 @@ async def chatbot(messages, messages_history, client):
9090
pass
9191

9292

93-
def start_chat():
94-
try:
95-
query = input(">>> ").strip() or "/help"
96-
if query == "/bye":
97-
exit()
98-
elif query == "/help":
99-
print(help_msg + "\n")
100-
elif query == "/clear":
101-
clear_chat_history()
102-
else:
103-
if AI_PROVIDER == "groq":
104-
client = AsyncOpenAI(api_key=API_KEY, base_url=GROQ_AI_ENDPOINT)
105-
elif AI_PROVIDER == "openai":
106-
client = AsyncOpenAI(api_key=API_KEY)
107-
elif AI_PROVIDER == "mistral":
108-
client = AsyncOpenAI(api_key=API_KEY, base_url=MISTRAL_AI_ENDPOINT)
109-
elif AI_PROVIDER == "ollama":
110-
client = AsyncOpenAI(api_key=API_KEY, base_url=OLLAMA_AI_ENDPOINT)
111-
asyncio.run(chatbot({"role": "user", "content": query}, load_chat_history(), client))
112-
except KeyboardInterrupt:
113-
print("\nExiting...")
114-
exit()
115-
except Exception as e:
116-
print(f"\n{LIGHT_RED}Error: {str(e)}{COLOR_RESET}")
117-
exit()
118-
except RuntimeError:
119-
pass
93+
async def start_chat():
94+
try:
95+
query = input(">>> ").strip() or "/help"
96+
if query == "/bye":
97+
exit()
98+
elif query == "/help":
99+
print(help_msg + "\n")
100+
elif query == "/clear":
101+
clear_chat_history()
102+
else:
103+
client = AsyncOpenAI(
104+
api_key=API_KEY,
105+
base_url={
106+
"groq": GROQ_AI_ENDPOINT,
107+
"openai": None,
108+
"mistral": MISTRAL_AI_ENDPOINT,
109+
"ollama": OLLAMA_AI_ENDPOINT
110+
}.get(AI_PROVIDER)
111+
)
112+
await chatbot({"role": "user", "content": query}, load_chat_history(), client)
113+
except KeyboardInterrupt:
114+
print("\nExiting...")
115+
exit()
116+
except Exception as e:
117+
print(f"\n{LIGHT_RED}Error: {str(e)}{COLOR_RESET}")
118+
exit()
119+
except RuntimeError:
120+
pass
120121

0 commit comments

Comments
 (0)