Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release/promptflow/1.1.0 #1320

Merged
merged 7 commits into from
Nov 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
124 changes: 62 additions & 62 deletions examples/flows/chat/chat-with-pdf/chat_with_pdf/utils/oai.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import List
import openai
from openai.version import VERSION as OPENAI_VERSION
import os
import tiktoken
from jinja2 import Template
Expand All @@ -25,106 +26,105 @@ def extract_delay_from_rate_limit_error_msg(text):

class OAI:
def __init__(self):
if os.getenv("OPENAI_API_TYPE") is not None:
openai.api_type = os.getenv("OPENAI_API_TYPE")
if os.getenv("OPENAI_API_BASE") is not None:
openai.api_base = os.environ.get("OPENAI_API_BASE")
if OPENAI_VERSION.startswith("0."):
raise Exception(
"Please upgrade your OpenAI package to version >= 1.0.0 or "
"using the command: pip install --upgrade openai."
)
init_params = {}
api_type = os.environ.get("OPENAI_API_TYPE")
if os.getenv("OPENAI_API_VERSION") is not None:
openai.api_version = os.environ.get("OPENAI_API_VERSION")
init_params["api_version"] = os.environ.get("OPENAI_API_VERSION")
if os.getenv("OPENAI_ORG_ID") is not None:
openai.organization = os.environ.get("OPENAI_ORG_ID")
init_params["organization"] = os.environ.get("OPENAI_ORG_ID")
if os.getenv("OPENAI_API_KEY") is None:
raise ValueError("OPENAI_API_KEY is not set in environment variables")
if os.getenv("OPENAI_API_BASE") is not None:
if api_type == "azure":
init_params["azure_endpoint"] = os.environ.get("OPENAI_API_BASE")
else:
init_params["base_url"] = os.environ.get("OPENAI_API_BASE")

openai.api_key = os.environ.get("OPENAI_API_KEY")
init_params["api_key"] = os.environ.get("OPENAI_API_KEY")

# A few sanity checks
if openai.api_type == "azure" and openai.api_base is None:
raise ValueError(
"OPENAI_API_BASE is not set in environment variables, this is required when api_type==azure"
)
if openai.api_type == "azure" and openai.api_version is None:
raise ValueError(
"OPENAI_API_VERSION is not set in environment variables, this is required when api_type==azure"
)
if openai.api_type == "azure" and openai.api_key.startswith("sk-"):
raise ValueError(
"OPENAI_API_KEY should not start with sk- when api_type==azure, are you using openai key by mistake?"
)
if api_type == "azure":
if init_params.get("azure_endpoint") is None:
raise ValueError(
"OPENAI_API_BASE is not set in environment variables, this is required when api_type==azure"
)
if init_params.get("api_version") is None:
raise ValueError(
"OPENAI_API_VERSION is not set in environment variables, this is required when api_type==azure"
)
if init_params["api_key"].startswith("sk-"):
raise ValueError(
"OPENAI_API_KEY should not start with sk- when api_type==azure, "
"are you using openai key by mistake?"
)
from openai import AzureOpenAI as Client
else:
from openai import OpenAI as Client
self.client = Client(**init_params)


class OAIChat(OAI):
@retry_and_handle_exceptions(
exception_to_check=(
openai.error.RateLimitError,
openai.error.APIError,
openai.RateLimitError,
openai.APIStatusError,
openai.APIConnectionError,
KeyError,
),
max_retries=5,
extract_delay_from_error_message=extract_delay_from_rate_limit_error_msg,
)
def generate(self, messages: list, **kwargs) -> List[float]:
if openai.api_type == "azure":
return openai.ChatCompletion.create(
engine=os.environ.get("CHAT_MODEL_DEPLOYMENT_NAME"),
messages=messages,
**kwargs,
)["choices"][0]["message"]["content"]
else:
return openai.ChatCompletion.create(
model=os.environ.get("CHAT_MODEL_DEPLOYMENT_NAME"),
messages=messages,
**kwargs,
)["choices"][0]["message"]["content"]
# chat api may return message with no content.
message = self.client.chat.completions.create(
model=os.environ.get("CHAT_MODEL_DEPLOYMENT_NAME"),
messages=messages,
**kwargs,
).choices[0].message
return getattr(message, "content", "")

@retry_and_handle_exceptions_for_generator(
exception_to_check=(
openai.error.RateLimitError,
openai.error.APIError,
openai.RateLimitError,
openai.APIStatusError,
openai.APIConnectionError,
KeyError,
),
max_retries=5,
extract_delay_from_error_message=extract_delay_from_rate_limit_error_msg,
)
def stream(self, messages: list, **kwargs):
if openai.api_type == "azure":
response = openai.ChatCompletion.create(
engine=os.environ.get("CHAT_MODEL_DEPLOYMENT_NAME"),
messages=messages,
stream=True,
**kwargs,
)
else:
response = openai.ChatCompletion.create(
model=os.environ.get("CHAT_MODEL_DEPLOYMENT_NAME"),
messages=messages,
stream=True,
**kwargs,
)
response = self.client.chat.completions.create(
model=os.environ.get("CHAT_MODEL_DEPLOYMENT_NAME"),
messages=messages,
stream=True,
**kwargs,
)

for chunk in response:
if "choices" not in chunk or len(chunk["choices"]) == 0:
if not chunk.choices:
continue
delta = chunk["choices"][0]["delta"]
if "content" in delta:
yield delta["content"]
if chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
else:
yield ""


class OAIEmbedding(OAI):
@retry_and_handle_exceptions(
exception_to_check=openai.error.RateLimitError,
exception_to_check=openai.RateLimitError,
max_retries=5,
extract_delay_from_error_message=extract_delay_from_rate_limit_error_msg,
)
def generate(self, text: str) -> List[float]:
if openai.api_type == "azure":
return openai.Embedding.create(
input=text, engine=os.environ.get("EMBEDDING_MODEL_DEPLOYMENT_NAME")
)["data"][0]["embedding"]
else:
return openai.Embedding.create(
input=text, model=os.environ.get("EMBEDDING_MODEL_DEPLOYMENT_NAME")
)["data"][0]["embedding"]
return self.client.embeddings.create(
input=text, model=os.environ.get("EMBEDDING_MODEL_DEPLOYMENT_NAME")
).data[0].embedding


def count_token(text: str) -> int:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from typing import Tuple, Union, Optional
from typing import Tuple, Union, Optional, Type
import functools
import time
import random


def retry_and_handle_exceptions(
exception_to_check: Union[Exception, Tuple[Exception]],
exception_to_check: Union[Type[Exception], Tuple[Type[Exception], ...]],
max_retries: int = 3,
initial_delay: float = 1,
exponential_base: float = 2,
Expand Down Expand Up @@ -48,7 +48,7 @@ def wrapper(*args, **kwargs):


def retry_and_handle_exceptions_for_generator(
exception_to_check: Union[Exception, Tuple[Exception]],
exception_to_check: Union[Type[Exception], Tuple[Type[Exception], ...]],
max_retries: int = 3,
initial_delay: float = 1,
exponential_base: float = 2,
Expand Down
32 changes: 25 additions & 7 deletions examples/flows/standard/basic-with-connection/hello.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from typing import Union
import openai
from openai.version import VERSION as OPENAI_VERSION

from promptflow import tool
from promptflow.connections import CustomConnection, AzureOpenAIConnection
Expand All @@ -13,6 +13,28 @@ def to_bool(value) -> bool:
return str(value).lower() == "true"


def get_client(connection: Union[CustomConnection, AzureOpenAIConnection]):
if OPENAI_VERSION.startswith("0."):
raise Exception(
"Please upgrade your OpenAI package to version >= 1.0.0 or using the command: pip install --upgrade openai."
)
# connection can be extract as a dict object contains the configs and secrets
connection_dict = dict(connection)
api_key = connection_dict.get("api_key")
conn = dict(
api_key=api_key,
)
if api_key.startswith("sk-"):
from openai import OpenAI as Client
else:
from openai import AzureOpenAI as Client
conn.update(
azure_endpoint=connection_dict.get("api_base"),
api_version=connection_dict.get("api_version", "2023-07-01-preview"),
)
return Client(**conn)


@tool
def my_python_tool(
prompt: str,
Expand All @@ -37,12 +59,10 @@ def my_python_tool(

# TODO: remove below type conversion after client can pass json rather than string.
echo = to_bool(echo)
# connection can be extract as a dict object contains the configs and secrets
connection_dict = dict(connection)

response = openai.Completion.create(
response = get_client(connection).completions.create(
prompt=prompt,
engine=deployment_name,
model=deployment_name,
# empty string suffix should be treated as None.
suffix=suffix if suffix else None,
max_tokens=int(max_tokens),
Expand All @@ -59,8 +79,6 @@ def my_python_tool(
# Logit bias must be a dict if we passed it to openai api.
logit_bias=logit_bias if logit_bias else {},
user=user,
request_timeout=30,
**connection_dict,
)

# get first element because prompt is single.
Expand Down
35 changes: 23 additions & 12 deletions examples/flows/standard/basic/hello.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
import openai
from openai.version import VERSION as OPENAI_VERSION

from dotenv import load_dotenv
from promptflow import tool
Expand All @@ -13,6 +13,26 @@ def to_bool(value) -> bool:
return str(value).lower() == "true"


def get_client():
if OPENAI_VERSION.startswith("0."):
raise Exception(
"Please upgrade your OpenAI package to version >= 1.0.0 or using the command: pip install --upgrade openai."
)
api_key = os.environ["AZURE_OPENAI_API_KEY"]
conn = dict(
api_key=os.environ["AZURE_OPENAI_API_KEY"],
)
if api_key.startswith("sk-"):
from openai import OpenAI as Client
else:
from openai import AzureOpenAI as Client
conn.update(
azure_endpoint=os.environ["AZURE_OPENAI_API_BASE"],
api_version=os.environ.get("AZURE_OPENAI_API_VERSION", "2023-07-01-preview"),
)
return Client(**conn)


@tool
def my_python_tool(
prompt: str,
Expand Down Expand Up @@ -40,19 +60,12 @@ def my_python_tool(
if "AZURE_OPENAI_API_KEY" not in os.environ:
raise Exception("Please specify environment variables: AZURE_OPENAI_API_KEY")

conn = dict(
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_base=os.environ["AZURE_OPENAI_API_BASE"],
api_type=os.environ.get("AZURE_OPENAI_API_TYPE", "azure"),
api_version=os.environ.get("AZURE_OPENAI_API_VERSION", "2023-07-01-preview"),
)

# TODO: remove below type conversion after client can pass json rather than string.
echo = to_bool(echo)

response = openai.Completion.create(
response = get_client().completions.create(
prompt=prompt,
engine=deployment_name,
model=deployment_name,
# empty string suffix should be treated as None.
suffix=suffix if suffix else None,
max_tokens=int(max_tokens),
Expand All @@ -69,8 +82,6 @@ def my_python_tool(
# Logit bias must be a dict if we passed it to openai api.
logit_bias=logit_bias if logit_bias else {},
user=user,
request_timeout=30,
**conn,
)

# get first element because prompt is single.
Expand Down
Loading
Loading