From bbfeb3cbbd02d26ce5ad1121334e49d86655cd5b Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 2 Aug 2023 15:41:32 -0700 Subject: [PATCH] adding support for palm, anthropic, llama2, cohere --- bot/openai_utils.py | 13 ++++++++++--- requirements.txt | 3 ++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/bot/openai_utils.py b/bot/openai_utils.py index 611faf4b8..dd074faab 100644 --- a/bot/openai_utils.py +++ b/bot/openai_utils.py @@ -1,5 +1,5 @@ import config - +from litellm import completion import tiktoken import openai @@ -48,6 +48,15 @@ async def send_message(self, message, dialog_messages=[], chat_mode="assistant") **OPENAI_COMPLETION_OPTIONS ) answer = r.choices[0].text + elif self.model in ["gpt-3.5-turbo-16k-0613", "command-nightly", "claude-2", "claude-instant-1", "chat-bison-001", "text-bison-001"] or "replicate" in self.model: + messages = self._generate_prompt_messages(message, dialog_messages, chat_mode) + r = completion( + model=self.model, + messages=messages, + stream=True, + **OPENAI_COMPLETION_OPTIONS + ) + answer = r.choices[0].message["content"] else: raise ValueError(f"Unknown model: {self.model}") @@ -105,8 +114,6 @@ async def send_message_stream(self, message, dialog_messages=[], chat_mode="assi n_first_dialog_messages_removed = n_dialog_messages_before - len(dialog_messages) yield "not_finished", answer, (n_input_tokens, n_output_tokens), n_first_dialog_messages_removed - answer = self._postprocess_answer(answer) - except openai.error.InvalidRequestError as e: # too many tokens if len(dialog_messages) == 0: raise e diff --git a/requirements.txt b/requirements.txt index 354063b17..51f35b245 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,5 @@ tiktoken>=0.3.0 PyYAML==6.0 pymongo==4.3.3 python-dotenv==0.21.0 -pydub==0.25.1 \ No newline at end of file +pydub==0.25.1 +litellm==0.1.218 \ No newline at end of file