Skip to content

Commit

Permalink
Merge branch 'main' into pf-component-example
Browse files Browse the repository at this point in the history
  • Loading branch information
alainli0928 authored Mar 26, 2024
2 parents a708553 + 3929c6c commit 15ef5f7
Show file tree
Hide file tree
Showing 23 changed files with 154 additions and 132 deletions.
15 changes: 0 additions & 15 deletions .github/workflows/promptflow-release-testing-matrix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,6 @@ jobs:
gci ./promptflow -Recurse | % {if ($_.Name.Contains('.whl')) {python -m pip install "$($_.FullName)"}}
gci ./promptflow-tools -Recurse | % {if ($_.Name.Contains('.whl')) {python -m pip install $_.FullName}}
pip freeze
- name: Install tracing
shell: pwsh
working-directory: ${{ env.TRACING_PATH }}
run: |
poetry install --only main
- name: Run SDK CLI Test
shell: pwsh
working-directory: ${{ env.testWorkingDirectory }}
Expand Down Expand Up @@ -180,11 +175,6 @@ jobs:
gci ./promptflow -Recurse | % {if ($_.Name.Contains('.whl')) {python -m pip install "$($_.FullName)[azure]"}}
gci ./promptflow-tools -Recurse | % {if ($_.Name.Contains('.whl')) {python -m pip install "$($_.FullName)"}}
pip freeze
- name: Install tracing
shell: pwsh
working-directory: ${{ env.TRACING_PATH }}
run: |
poetry install --only main
- name: Run SDK CLI Azure Test
shell: pwsh
working-directory: ${{ env.testWorkingDirectory }}
Expand Down Expand Up @@ -242,11 +232,6 @@ jobs:
gci ./promptflow -Recurse | % {if ($_.Name.Contains('.whl')) {python -m pip install "$($_.FullName)[azure,executor-service]"}}
gci ./promptflow-tools -Recurse | % {if ($_.Name.Contains('.whl')) {python -m pip install $_.FullName}}
pip freeze
- name: Install tracing
shell: pwsh
working-directory: ${{ env.TRACING_PATH }}
run: |
poetry install --only main
- name: Run Executor Test
shell: pwsh
working-directory: ${{ github.workspace }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/promptflow-sdk-cli-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ on:
pull_request:
paths:
- src/promptflow/**
- src/promptflow-tracing/**
- scripts/building/**
- .github/workflows/promptflow-sdk-cli-test.yml
workflow_dispatch:
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/sdk-cli-azure-test-pull-request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ on:
paths:
- src/promptflow/**
- scripts/building/**
- src/promptflow-tracing/**
- .github/workflows/sdk-cli-azure-test-pull-request.yml


Expand Down Expand Up @@ -58,7 +59,7 @@ jobs:
# replay tests can cover more combinations
os: [ubuntu-latest]
pythonVersion: ['3.8', '3.9', '3.10', '3.11']

runs-on: ${{ matrix.os }}
steps:
- name: checkout
Expand Down
2 changes: 1 addition & 1 deletion examples/connections/connection.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"metadata": {},
"outputs": [],
"source": [
"from promptflow import PFClient\n",
"from promptflow.client import PFClient\n",
"\n",
"# client can help manage your runs and connections.\n",
"client = PFClient()"
Expand Down
2 changes: 1 addition & 1 deletion examples/flows/standard/gen-docstring/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from file import File
from diff import show_diff
from load_code_tool import load_code
from promptflow import PFClient
from promptflow.client import PFClient
from pathlib import Path


Expand Down
14 changes: 7 additions & 7 deletions examples/flows/standard/maths-to-code/math_test.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"source": [
"# setup pf client and execution path\n",
"\n",
"from promptflow import PFClient\n",
"from promptflow.client import PFClient\n",
"import json\n",
"import os\n",
"\n",
Expand All @@ -28,8 +28,8 @@
"source": [
"# start batch run of maths-to-code\n",
"base_run = pf.run(\n",
" flow = flow, \n",
" data = data, \n",
" flow = flow,\n",
" data = data,\n",
" column_mapping={\"math_question\": \"${data.question}\"},\n",
" display_name=\"maths_to_code_batch_run\",\n",
" stream=True\n",
Expand Down Expand Up @@ -277,8 +277,8 @@
"source": [
"# evaluate against the batch run and groundtruth data\n",
"eval_run = pf.run(\n",
" flow = eval_flow, \n",
" data = data, \n",
" flow = eval_flow,\n",
" data = data,\n",
" run = base_run,\n",
" column_mapping={\"groundtruth\": \"${data.answer}\", \"prediction\": \"${run.outputs.answer}\"},\n",
" display_name=\"maths_to_code_eval_run\",\n",
Expand Down Expand Up @@ -643,8 +643,8 @@
"# evaluation run against base run\n",
"\n",
"eval_run = pf.run(\n",
" flow = eval_flow, \n",
" data = data, \n",
" flow = eval_flow,\n",
" data = data,\n",
" run = base_run,\n",
" column_mapping={\"groundtruth\": \"${data.answer}\", \"prediction\": \"${run.outputs.answer}\"},\n",
" stream = True,\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from flask import Flask, jsonify, request

from promptflow import load_flow
from promptflow.client import load_flow
from promptflow.connections import AzureOpenAIConnection
from promptflow.entities import FlowContext
from promptflow.exceptions import SystemErrorException, UserErrorException
Expand Down
2 changes: 1 addition & 1 deletion examples/tutorials/get-started/flow-as-function.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
"metadata": {},
"outputs": [],
"source": [
"from promptflow import load_flow\n",
"from promptflow.client import load_flow\n",
"\n",
"\n",
"flow_path = \"../../flows/standard/web-classification\"\n",
Expand Down
6 changes: 3 additions & 3 deletions examples/tutorials/get-started/quickstart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
"outputs": [],
"source": [
"import json\n",
"from promptflow import PFClient\n",
"from promptflow.client import PFClient\n",
"from promptflow.connections import AzureOpenAIConnection, OpenAIConnection\n",
"\n",
"# client can help manage your runs and connections.\n",
Expand Down Expand Up @@ -174,7 +174,7 @@
"metadata": {},
"outputs": [],
"source": [
"from promptflow import load_flow\n",
"from promptflow.client import load_flow\n",
"\n",
"flow_func = load_flow(flow)\n",
"flow_result = flow_func(**flow_inputs)\n",
Expand Down Expand Up @@ -431,7 +431,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.13"
"version": "3.9.18"
},
"resources": "examples/requirements.txt, examples/flows/standard/web-classification, examples/flows/evaluation/eval-classification-accuracy"
},
Expand Down
4 changes: 2 additions & 2 deletions examples/tutorials/run-management/run-management.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
"outputs": [],
"source": [
"import json\n",
"from promptflow import PFClient\n",
"from promptflow.client import PFClient\n",
"from promptflow.connections import AzureOpenAIConnection, OpenAIConnection\n",
"\n",
"# client can help manage your runs and connections.\n",
Expand Down Expand Up @@ -113,7 +113,7 @@
"metadata": {},
"outputs": [],
"source": [
"from promptflow._sdk._load_functions import load_run\n",
"from promptflow.client import load_run\n",
"\n",
"# load a run from YAML file\n",
"base_run = load_run(\n",
Expand Down
4 changes: 4 additions & 0 deletions src/promptflow-tracing/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ Prompt flow tracing.

# Release History

## 1.0.0 (2024.03.21)

- Compatible with promptflow 1.7.0.

## 0.1.0b2 (2024.03.18)

- First preview version.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,16 @@
IS_LEGACY_OPENAI = version("openai").startswith("0.")


def inject_function_async(args_to_ignore=None, trace_type=TraceType.LLM):
def inject_function_async(args_to_ignore=None, trace_type=TraceType.LLM, name=None):
def decorator(func):
return _traced_async(func, args_to_ignore=args_to_ignore, trace_type=trace_type)
return _traced_async(func, args_to_ignore=args_to_ignore, trace_type=trace_type, name=name)

return decorator


def inject_function_sync(args_to_ignore=None, trace_type=TraceType.LLM):
def inject_function_sync(args_to_ignore=None, trace_type=TraceType.LLM, name=None):
def decorator(func):
return _traced_sync(func, args_to_ignore=args_to_ignore, trace_type=trace_type)
return _traced_sync(func, args_to_ignore=args_to_ignore, trace_type=trace_type, name=name)

return decorator

Expand Down Expand Up @@ -90,60 +90,67 @@ def wrapper(*args, **kwargs):
return wrapper


def inject_async(f, trace_type):
def inject_async(f, trace_type, name):
wrapper_fun = inject_operation_headers(
(inject_function_async(["api_key", "headers", "extra_headers"], trace_type)(f))
(inject_function_async(["api_key", "headers", "extra_headers"], trace_type, name)(f))
)
wrapper_fun._original = f
return wrapper_fun


def inject_sync(f, trace_type):
def inject_sync(f, trace_type, name):
wrapper_fun = inject_operation_headers(
(inject_function_sync(["api_key", "headers", "extra_headers"], trace_type)(f))
(inject_function_sync(["api_key", "headers", "extra_headers"], trace_type, name)(f))
)
wrapper_fun._original = f
return wrapper_fun


def _legacy_openai_apis():
sync_apis = (
("openai", "Completion", "create", TraceType.LLM, "openai_completion_legacy"),
("openai", "ChatCompletion", "create", TraceType.LLM, "openai_chat_legacy"),
("openai", "Embedding", "create", TraceType.EMBEDDING, "openai_embedding_legacy"),
)
async_apis = (
("openai", "Completion", "acreate", TraceType.LLM, "openai_completion_legacy"),
("openai", "ChatCompletion", "acreate", TraceType.LLM, "openai_chat_legacy"),
("openai", "Embedding", "acreate", TraceType.EMBEDDING, "openai_embedding_legacy"),
)
return sync_apis, async_apis


def _openai_apis():
sync_apis = (
("openai.resources.chat", "Completions", "create", TraceType.LLM, "openai_chat"),
("openai.resources", "Completions", "create", TraceType.LLM, "openai_completion"),
("openai.resources", "Embeddings", "create", TraceType.EMBEDDING, "openai_embeddings"),
)
async_apis = (
("openai.resources.chat", "AsyncCompletions", "create", TraceType.LLM, "openai_chat_async"),
("openai.resources", "AsyncCompletions", "create", TraceType.LLM, "openai_completion_async"),
("openai.resources", "AsyncEmbeddings", "create", TraceType.EMBEDDING, "openai_embeddings_async"),
)
return sync_apis, async_apis


def _openai_api_list():
if IS_LEGACY_OPENAI:
sync_apis = (
("openai", "Completion", "create", TraceType.LLM),
("openai", "ChatCompletion", "create", TraceType.LLM),
("openai", "Embedding", "create", TraceType.EMBEDDING),
)

async_apis = (
("openai", "Completion", "acreate", TraceType.LLM),
("openai", "ChatCompletion", "acreate", TraceType.LLM),
("openai", "Embedding", "acreate", TraceType.EMBEDDING),
)
sync_apis, async_apis = _legacy_openai_apis()
else:
sync_apis = (
("openai.resources.chat", "Completions", "create", TraceType.LLM),
("openai.resources", "Completions", "create", TraceType.LLM),
("openai.resources", "Embeddings", "create", TraceType.EMBEDDING),
)

async_apis = (
("openai.resources.chat", "AsyncCompletions", "create", TraceType.LLM),
("openai.resources", "AsyncCompletions", "create", TraceType.LLM),
("openai.resources", "AsyncEmbeddings", "create", TraceType.EMBEDDING),
)

sync_apis, async_apis = _openai_apis()
yield sync_apis, inject_sync
yield async_apis, inject_async


def _generate_api_and_injector(apis):
for apis, injector in apis:
for module_name, class_name, method_name, trace_type in apis:
for module_name, class_name, method_name, trace_type, name in apis:
try:
module = importlib.import_module(module_name)
api = getattr(module, class_name)
if hasattr(api, method_name):
yield api, method_name, trace_type, injector
yield api, method_name, trace_type, injector, name
except AttributeError as e:
# Log the attribute exception with the missing class information
logging.warning(
Expand Down Expand Up @@ -176,10 +183,10 @@ def inject_openai_api():
2. Updates the openai api configs from environment variables.
"""

for api, method, trace_type, injector in available_openai_apis_and_injectors():
for api, method, trace_type, injector, name in available_openai_apis_and_injectors():
# Check if the create method of the openai_api class has already been modified
if not hasattr(getattr(api, method), "_original"):
setattr(api, method, injector(getattr(api, method), trace_type))
setattr(api, method, injector(getattr(api, method), trace_type, name))

if IS_LEGACY_OPENAI:
# For the openai versions lower than 1.0.0, it reads api configs from environment variables only at
Expand All @@ -198,6 +205,6 @@ def recover_openai_api():
"""This function restores the original create methods of the OpenAI API classes
by assigning them back from the _original attributes of the modified methods.
"""
for api, method, _, _ in available_openai_apis_and_injectors():
for api, method, _, _, _ in available_openai_apis_and_injectors():
if hasattr(getattr(api, method), "_original"):
setattr(api, method, getattr(getattr(api, method), "_original"))
Loading

0 comments on commit 15ef5f7

Please sign in to comment.