From 023e71d33c4b63e31600866560afddaa6fdc0e0e Mon Sep 17 00:00:00 2001 From: PatillaCode Date: Wed, 29 Nov 2023 23:39:40 +0100 Subject: [PATCH] streaming + clean up --- chat.py | 31 +++++++++++-------------------- test.py | 23 +++++++++++++++++++++++ 2 files changed, 34 insertions(+), 20 deletions(-) create mode 100644 test.py diff --git a/chat.py b/chat.py index 5d9eecf..a63f5ed 100644 --- a/chat.py +++ b/chat.py @@ -20,23 +20,18 @@ def start_chat(model, verbose=False): display_output("Starting chat...") system = {"role": "system", "content": f"DIRECTIVE_FOR_{model}"} - # message = {"role": "user", "content": ""} conversation = [system] - tokens = 0 console = Console() completion = client.chat.completions.create( model=model, messages=conversation, - # stream=True, ) - # for chunk in completion: - # print(chunk.choices[0].delta) while True: prompt = input("\n􀳾 > ") - if prompt == "": + if prompt.strip() == "": continue elif prompt == "exit": break @@ -47,21 +42,17 @@ def start_chat(model, verbose=False): completion = client.chat.completions.create( model=model, messages=conversation, - # stream=True, + stream=True, ) - # for chunk in completion: - # if chunk.choices[0].delta.content is not None: - # print(chunk.choices[0].delta.content, end="") - message = completion.choices[0].message - conversation.append({"role": "system", "content": message.content}) - tokens = completion.usage.total_tokens - - display_output("\n􀪬 > ", end="") - lines = split_message(message.content) - for line in lines: - display_output(f"{line}", color="yellow") - - display_output(f"\n\n􀪬 ({tokens})", color="magenta") + messages = [] + for chunk in completion: + if chunk.choices[0].delta.content is not None: + content = chunk.choices[0].delta.content + messages.append(content) + display_output(content, color="yellow", end="") + + full_message = "".join(messages) + conversation.append({"role": "system", "content": full_message}) except Exception as e: handle_error(e, verbose) diff --git a/test.py b/test.py new file mode 100644 index 0000000..cf788b8 --- /dev/null +++ b/test.py @@ -0,0 +1,23 @@ +from dotenv import load_dotenv +from icecream import ic # noqa: F401 +from openai import OpenAI + +load_dotenv() + +client = OpenAI() + +stream = client.chat.completions.create( + model="gpt-3.5-turbo", + messages=[ + { + "role": "user", + "content": "Say this is a test, write a 20 word text poem about it.", + } + ], + stream=True, +) +ic(stream) +for chunk in stream: + ic(chunk) + if chunk.choices[0].delta.content is not None: + print(chunk.choices[0].delta.content, end="")