Skip to content

Commit

Permalink
Update app-recipe.py to latest AOAI sdk
Browse files Browse the repository at this point in the history
  • Loading branch information
carlotta94c authored Nov 30, 2023
1 parent 1501539 commit 14c1579
Showing 1 changed file with 10 additions and 20 deletions.
30 changes: 10 additions & 20 deletions 06-text-generation-apps/app-recipe.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import openai
from openai import AzureOpenAI
import os
import dotenv

# import dotenv
dotenv.load_dotenv()

openai.api_key = os.getenv("API_KEY")

# enable below if you use Azure Open AI
openai.api_type = 'azure'
openai.api_version = '2023-05-15'
openai.api_base = os.getenv("API_BASE")
# configure Azure OpenAI service client
client = AzureOpenAI(
azure_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ['AZURE_OPENAI_KEY'],
api_version = "2023-10-01-preview"
)

deployment=os.environ['AZURE_OPENAI_DEPLOYMENT']

no_recipes = input("No of recipes (for example, 5: ")

Expand All @@ -22,14 +23,7 @@
# interpolate the number of recipes into the prompt an ingredients
prompt = f"Show me {no_recipes} recipes for a dish with the following ingredients: {ingredients}. Per recipe, list all the ingredients used, no {filter}: "


# engine
engine = os.getenv("ENGINE")

# deployment_id
deployment_name = os.getenv("DEPLOYMENT_NAME")

completion = openai.Completion.create(engine=deployment_name, prompt=prompt, max_tokens=600, temperature=0.1)
completion = client.completions.create(model=deployment, prompt=prompt, max_tokens=600, temperature = 0.1)

# print response
print("Recipes:")
Expand All @@ -39,13 +33,9 @@
prompt_shopping = "Produce a shopping list, and please don't include ingredients that I already have at home: "

new_prompt = f"Given ingredients at home {ingredients} and these generated recipes: {old_prompt_result}, {prompt_shopping}"
completion = openai.Completion.create(engine=deployment_name, prompt=new_prompt, max_tokens=600)
completion = client.completions.create(model=deployment, prompt=prompt, max_tokens=600)

# print response
print("\n=====Shopping list ======= \n")
print(completion.choices[0].text)

# very unhappy _____.

# Once upon a time there was a very unhappy mermaid.

0 comments on commit 14c1579

Please sign in to comment.