Skip to content

Commit

Permalink
Merge pull request #44 from bptlab/33-newest-openai-package
Browse files Browse the repository at this point in the history
update openAI package from 0.28.2 to 1.5.0
  • Loading branch information
thangixd authored Jan 8, 2024
2 parents 0deb01c + b3856bb commit ebe7226
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 35 deletions.
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
openai==0.28.1
openai==1.5.0
pm4py
django
pre-commit
18 changes: 9 additions & 9 deletions tracex/extraction/prototype/create_prompts.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
"""Module providing functions to produce prompts by using GPT generations."""
import openai

from openai import OpenAI
import utils as u

openai.api_key = u.oaik
client = OpenAI(api_key=u.oaik)




NEW_PROMPTS_CONTEXT = """
You are an expert prompt engineer for gpt-3.5-turbo. You are tasked with creating the best possible prompts for given tasks.
Expand All @@ -20,12 +22,10 @@
{"role": "user", "content": NEW_PROMPTS_PROMPT},
{"role": "assistant", "content": NEW_PROMPTS_ANSWER},
]
new_prompts = openai.ChatCompletion.create(
model=u.MODEL,
messages=messages,
max_tokens=u.MAX_TOKENS,
temperature=u.TEMPERATURE_CREATION,
)
new_prompts = client.chat.completions.create(model=u.MODEL,
messages=messages,
max_tokens=u.MAX_TOKENS,
temperature=u.TEMPERATURE_CREATION)
output = new_prompts.choices[0].message.content
with open(
(u.output_path / "new_prompts.txt"),
Expand Down
3 changes: 0 additions & 3 deletions tracex/extraction/prototype/input_handling.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
"""Module providing functions for converting text to CSV."""
import csv

import openai

from . import utils as u
from . import prompts as p

openai.api_key = u.oaik


def convert_text_to_csv(inp):
Expand Down
3 changes: 0 additions & 3 deletions tracex/extraction/prototype/input_inquiry.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
"""Module providing functions for the input inquiry of the prototype."""
import os

import openai

from . import utils as u
from . import prompts as p

openai.api_key = u.oaik


def greeting():
Expand Down
17 changes: 5 additions & 12 deletions tracex/extraction/prototype/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,10 @@
"""Module providing the needed prompts for the gpt_queries."""
import random

import openai

from openai import OpenAI
from . import utils as u

openai.api_key = u.oaik
client = OpenAI(api_key=u.oaik)


def create_patient_journey_context():
Expand Down Expand Up @@ -42,9 +41,7 @@ def get_sex():
def get_country():
"""Randomizing country."""
message = [{"role": "user", "content": "Please give me one european country."}]
country = openai.ChatCompletion.create(
model=u.MODEL, messages=message, max_tokens=50, temperature=0.2
)
country = client.chat.completions.create(model=u.MODEL, messages=message, max_tokens=50, temperature=0.2)
return country.choices[0].message.content


Expand All @@ -56,18 +53,14 @@ def get_date():
"content": "Please give me one date between 01/01/2020 and 01/09/2023.",
}
]
country = openai.ChatCompletion.create(
model=u.MODEL, messages=message, max_tokens=50, temperature=0.5
)
country = client.chat.completions.create(model=u.MODEL, messages=message, max_tokens=50, temperature=0.5)
return country.choices[0].message.content


def get_life_circumstances(sex):
"""Randomizing life circumstances."""
message = [{"role": "user", "content": life_circumstances_prompt(sex)}]
life_circumstances = openai.ChatCompletion.create(
model=u.MODEL, messages=message, max_tokens=100, temperature=1
)
life_circumstances = client.chat.completions.create(model=u.MODEL, messages=message, max_tokens=100, temperature=1)
return life_circumstances.choices[0].message.content


Expand Down
14 changes: 7 additions & 7 deletions tracex/extraction/prototype/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
from pathlib import Path
from django.conf import settings

import openai
from openai import OpenAI

client = OpenAI()

output_path = settings.BASE_DIR / Path(
"extraction/content/outputs/"
Expand Down Expand Up @@ -41,11 +43,9 @@ def get_decision(question):

def query_gpt(messages, temperature=TEMPERATURE_SUMMARIZING):
"""Queries the GPT engine."""
response = openai.ChatCompletion.create(
model=MODEL,
messages=messages,
max_tokens=MAX_TOKENS,
temperature=temperature,
)
response = client.chat.completions.create(model=MODEL,
messages=messages,
max_tokens=MAX_TOKENS,
temperature=temperature)
output = response.choices[0].message.content
return output

0 comments on commit ebe7226

Please sign in to comment.