From da08e2369db59fd12f71d516f1862829f8da8b0f Mon Sep 17 00:00:00 2001 From: Hiroshi Nishio Date: Fri, 25 Oct 2024 19:21:35 -0700 Subject: [PATCH] Replace o1-preview with o1-mini as o1-mini codes better than o1-preview --- config.py | 2 +- services/openai/chat.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/config.py b/config.py index 98454419..c1ac635a 100644 --- a/config.py +++ b/config.py @@ -57,7 +57,7 @@ def get_env_var(name: str) -> str: OPENAI_MAX_TOOL_OUTPUTS_SIZE = 512 * 1024 # in bytes OPENAI_MAX_TOKENS = 4096 OPENAI_MODEL_ID_O1_PREVIEW = "o1-preview" # https://platform.openai.com/docs/models/o1 -# OPENAI_MODEL_ID_O1_MINI = "o1-mini" # https://platform.openai.com/docs/models/o1 +OPENAI_MODEL_ID_O1_MINI = "o1-mini" # https://platform.openai.com/docs/models/o1 OPENAI_MODEL_ID_GPT_4O = "gpt-4o" # https://platform.openai.com/docs/models/gpt-4o OPENAI_ORG_ID: str = get_env_var(name="OPENAI_ORG_ID") OPENAI_TEMPERATURE = 0.0 diff --git a/services/openai/chat.py b/services/openai/chat.py index 9a6febd3..e3ce17d5 100644 --- a/services/openai/chat.py +++ b/services/openai/chat.py @@ -3,7 +3,7 @@ from openai.types.chat import ChatCompletion # Local imports -from config import OPENAI_MODEL_ID_O1_PREVIEW +from config import OPENAI_MODEL_ID_O1_MINI from services.openai.init import create_openai_client from services.openai.truncate import truncate_message from utils.handle_exceptions import handle_exceptions @@ -25,7 +25,7 @@ def chat_with_ai(system_input: str, user_input: str) -> str: "content": truncated_msg if truncated_msg else user_input, }, ], - model=OPENAI_MODEL_ID_O1_PREVIEW, + model=OPENAI_MODEL_ID_O1_MINI, n=1, # temperature=OPENAI_TEMPERATURE, # temperature should be 0 but it is not supported for 01-mini as of Oct 5 2024 )