Skip to content

Commit

Permalink
Use o1 for chat (#1596)
Browse files Browse the repository at this point in the history
  • Loading branch information
beastoin authored Dec 27, 2024
2 parents 4dbdf63 + bda6751 commit 4e65dab
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion backend/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from utils.prompts import extract_facts_prompt, extract_learnings_prompt

llm_mini = ChatOpenAI(model='gpt-4o-mini')
llm_large = ChatOpenAI(model='o1-preview')
embeddings = OpenAIEmbeddings(model="text-embedding-3-large")
parser = PydanticOutputParser(pydantic_object=Structured)

Expand Down Expand Up @@ -623,7 +624,7 @@ def qa_rag(uid: str, question: str, context: str, plugin: Optional[Plugin] = Non
Anwser:
""".replace(' ', '').replace('\n\n\n', '\n\n').strip()
# print('qa_rag prompt', prompt)
return ChatOpenAI(model='gpt-4o').invoke(prompt).content
return llm_large.invoke(prompt).content


def qa_rag_v3(uid: str, question: str, context: str, plugin: Optional[Plugin] = None, cited: Optional[bool] = False, messages: List[Message] = [], tz: Optional[str] = "UTC") -> str:
Expand Down

0 comments on commit 4e65dab

Please sign in to comment.