From d49ec7796914678dd69177a226f50e6256e1a773 Mon Sep 17 00:00:00 2001 From: nicolasgere Date: Tue, 16 Jan 2024 16:14:55 -0800 Subject: [PATCH] [BUG] update openai api in example (#1641) ## Description of changes - Update openai call to use the new api (#1640 ) --- examples/chat_with_your_documents/main.py | 14 +++++++------- examples/chat_with_your_documents/requirements.txt | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/chat_with_your_documents/main.py b/examples/chat_with_your_documents/main.py index 58b85499d5d..dcc631beb78 100644 --- a/examples/chat_with_your_documents/main.py +++ b/examples/chat_with_your_documents/main.py @@ -1,12 +1,12 @@ import argparse import os from typing import List, Dict - +from openai.types.chat import ChatCompletionMessageParam import openai import chromadb -def build_prompt(query: str, context: List[str]) -> List[Dict[str, str]]: +def build_prompt(query: str, context: List[str]) -> List[ChatCompletionMessageParam]: """ Builds a prompt for the LLM. # @@ -21,10 +21,10 @@ def build_prompt(query: str, context: List[str]) -> List[Dict[str, str]]: context (List[str]): The context of the query, returned by embedding search. Returns: - A prompt for the LLM (List[Dict[str, str]]). + A prompt for the LLM (List[ChatCompletionMessageParam]). """ - system = { + system: ChatCompletionMessageParam = { "role": "system", "content": "I am going to ask you a question, which I would like you to answer" "based only on the provided context, and not any other information." @@ -32,11 +32,11 @@ def build_prompt(query: str, context: List[str]) -> List[Dict[str, str]]: 'say "I am not sure", then try to make a guess.' "Break your answer up into nicely readable paragraphs.", } - user = { + user: ChatCompletionMessageParam = { "role": "user", "content": f"The question is {query}. Here is all the context you have:" f'{(" ").join(context)}', - } + } return [system, user] @@ -52,7 +52,7 @@ def get_chatGPT_response(query: str, context: List[str], model_name: str) -> str Returns: A response to the question. """ - response = openai.ChatCompletion.create( + response = openai.chat.completions.create( model=model_name, messages=build_prompt(query, context), ) diff --git a/examples/chat_with_your_documents/requirements.txt b/examples/chat_with_your_documents/requirements.txt index a7b995025e1..61a378d9ea4 100644 --- a/examples/chat_with_your_documents/requirements.txt +++ b/examples/chat_with_your_documents/requirements.txt @@ -1,3 +1,3 @@ chromadb>=0.4.4 -openai +openai>=1.7.2 tqdm