diff --git a/backend/app/agent.py b/backend/app/agent.py index 92f2640a..012a5425 100644 --- a/backend/app/agent.py +++ b/backend/app/agent.py @@ -58,7 +58,7 @@ class AgentType(str, Enum): GPT_35_TURBO = "GPT 3.5 Turbo" - GPT_4 = "GPT 4" + GPT_4 = "GPT 4 Turbo" AZURE_OPENAI = "GPT 4 (Azure OpenAI)" CLAUDE2 = "Claude 2" BEDROCK_CLAUDE2 = "Claude 2 (Amazon Bedrock)" @@ -169,7 +169,7 @@ def __init__( class LLMType(str, Enum): GPT_35_TURBO = "GPT 3.5 Turbo" - GPT_4 = "GPT 4" + GPT_4 = "GPT 4 Turbo" AZURE_OPENAI = "GPT 4 (Azure OpenAI)" CLAUDE2 = "Claude 2" BEDROCK_CLAUDE2 = "Claude 2 (Amazon Bedrock)" diff --git a/backend/app/llms.py b/backend/app/llms.py index bb58acb1..950c7411 100644 --- a/backend/app/llms.py +++ b/backend/app/llms.py @@ -24,20 +24,13 @@ def get_openai_llm(gpt_4: bool = False, azure: bool = False): logger.warn("Invalid proxy URL provided. Proceeding without proxy.") if not azure: - if gpt_4: - llm = ChatOpenAI( - http_client=http_client, - model="gpt-4-1106-preview", - temperature=0, - streaming=True, - ) - else: - llm = ChatOpenAI( - http_client=http_client, - model="gpt-3.5-turbo-1106", - temperature=0, - streaming=True, - ) + openai_model = "gpt-4-turbo-preview" if gpt_4 else "gpt-3.5-turbo" + llm = ChatOpenAI( + http_client=http_client, + model=openai_model, + temperature=0, + streaming=True, + ) else: llm = AzureChatOpenAI( http_client=http_client,