diff --git a/requirements.txt b/requirements.txt index 106c82f63e..b333c71a96 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,6 +24,7 @@ importlib-metadata==4.8.2 jmespath==1.0.1 joblib>=1.1.1 kiwisolver==1.4.4 +litellm==0.1.516 matplotlib==3.7.2 multidict==6.0.4 nltk==3.6.5 diff --git a/reverie/backend_server/persona/prompt_template/gpt_structure.py b/reverie/backend_server/persona/prompt_template/gpt_structure.py index f9c4718949..f2cce11b55 100644 --- a/reverie/backend_server/persona/prompt_template/gpt_structure.py +++ b/reverie/backend_server/persona/prompt_template/gpt_structure.py @@ -8,6 +8,7 @@ import random import openai import time +import litellm from utils import * @@ -19,7 +20,7 @@ def temp_sleep(seconds=0.1): def ChatGPT_single_request(prompt): temp_sleep() - completion = openai.ChatCompletion.create( + completion = litellm.completion( model="gpt-3.5-turbo", messages=[{"role": "user", "content": prompt}] ) @@ -45,7 +46,7 @@ def GPT4_request(prompt): temp_sleep() try: - completion = openai.ChatCompletion.create( + completion = litellm.completion( model="gpt-4", messages=[{"role": "user", "content": prompt}] ) @@ -70,7 +71,7 @@ def ChatGPT_request(prompt): """ # temp_sleep() try: - completion = openai.ChatCompletion.create( + completion = litellm.completion( model="gpt-3.5-turbo", messages=[{"role": "user", "content": prompt}] ) diff --git a/reverie/backend_server/test.py b/reverie/backend_server/test.py index 41ce26155b..41f2d3ced6 100644 --- a/reverie/backend_server/test.py +++ b/reverie/backend_server/test.py @@ -7,6 +7,7 @@ import json import random import openai +import litellm import time from utils import * @@ -26,7 +27,7 @@ def ChatGPT_request(prompt): """ # temp_sleep() try: - completion = openai.ChatCompletion.create( + completion = litellm.completion( model="gpt-3.5-turbo", messages=[{"role": "user", "content": prompt}] )