-
Notifications
You must be signed in to change notification settings - Fork 0
/
chat.py
58 lines (46 loc) · 1.66 KB
/
chat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
from dotenv import load_dotenv
from icecream import ic # noqa: F401
from openai import OpenAI
from rich.console import Console
from utils import display_output, handle_error
load_dotenv()
client = OpenAI()
def start_chat(model, verbose=False):
"""
Start an interactive chat with the AI.
Args:
model (str): The model for the chat.
verbose (bool, optional): Whether to display verbose output. Defaults to False.
"""
display_output("Starting chat...")
system = {"role": "system", "content": f"DIRECTIVE_FOR_{model}"}
conversation = [system]
console = Console()
completion = client.chat.completions.create(
model=model,
messages=conversation,
)
while True:
prompt = input("\n > ")
if prompt.strip() == "":
continue
elif prompt == "exit":
break
try:
conversation.append({"role": "user", "content": prompt})
with console.status("[bold green]Generating..."):
completion = client.chat.completions.create(
model=model,
messages=conversation,
stream=True,
)
messages = []
for chunk in completion:
if chunk.choices[0].delta.content is not None:
content = chunk.choices[0].delta.content
messages.append(content)
display_output(content, color="yellow", end="")
full_message = "".join(messages)
conversation.append({"role": "system", "content": full_message})
except Exception as e:
handle_error(e, verbose)