Skip to content

Commit

Permalink
Merge branch 'main' into clwan/sample_v1_11
Browse files Browse the repository at this point in the history
  • Loading branch information
wangchao1230 authored May 12, 2024
2 parents 5dad088 + 6d49c87 commit 6a33387
Show file tree
Hide file tree
Showing 65 changed files with 5,672 additions and 1,847 deletions.
11 changes: 11 additions & 0 deletions docs/reference/pf-command-reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,7 @@ pf flow serve --source
[--verbose]
[--debug]
[--skip-open-browser]
[--engine]
```

#### Examples
Expand All @@ -310,6 +311,12 @@ Serve flow as an endpoint with specific port and host.
pf flow serve --source <path-to-flow> --port <port> --host <host> --environment-variables key1="`${my_connection.api_key}`" key2="value2"
```

Serve flow as an endpoint with specific port, host, environment-variables and fastapi serving engine.

```bash
pf flow serve --source <path-to-flow> --port <port> --host <host> --environment-variables key1="`${my_connection.api_key}`" key2="value2" --engine fastapi
```

#### Required Parameter

`--source`
Expand Down Expand Up @@ -342,6 +349,10 @@ Show debug information during serve.

Skip opening browser after serve. Store true parameter.

`--engine`

Switch python serving engine between `flask` amd `fastapi`, default to `flask`.

## pf connection

Manage prompt flow connections.
Expand Down
4 changes: 4 additions & 0 deletions src/promptflow-azure/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

### Improvements
- Refine trace Cosmos DB setup process to print setup status during the process, and display error message from service when setup failed.
- Return the secrets in the connection object by default to improve flex flow experience.
- Behaviors not changed: 'pfazure connection' command will scrub secrets.
- New behavior: connection object by `client.connection.get` will have real secrets. `print(connection_obj)` directly will scrub those secrets. `print(connection_obj.api_key)` or `print(connection_obj.secrets)` will print the REAL secrets.
- Workspace listsecrets permission is required to get the secrets. Call `client.connection.get(name, with_secrets=True)` if you want to get without the secrets and listsecrets permission.

## v1.10.0 (2024.04.26)

Expand Down
3 changes: 1 addition & 2 deletions src/promptflow-azure/promptflow/azure/_entities/_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,8 @@ def _try_build_local_code(self) -> Optional[Code]:

# generate .promptflow/flow.json for csharp flow as it's required to infer signature for csharp flow
flow_directory, flow_file = resolve_flow_path(code.path)
# TODO: pass in init_kwargs to support csharp class init flex flow
ProxyFactory().create_inspector_proxy(self.language).prepare_metadata(
flow_file=flow_directory / flow_file, working_dir=flow_directory
flow_file=flow_directory / flow_file, working_dir=flow_directory, init_kwargs=self._init_kwargs
)
dag_updated = update_signatures(code=flow_dir, data=flow_dag) or dag_updated
# validate init kwargs with signature
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ Download swagger.json from [here](https://int.api.azureml-test.ms/flow/swagger/v
- 2024.3.14 - [Add enable_multi_container](https://github.com/microsoft/promptflow/pull/2313)
- 2024.4.7 - [Update SDK restclient](https://github.com/microsoft/promptflow/pull/2670)
- 2024.5.9 - [Support init Cosmos DB with setup API](https://github.com/microsoft/promptflow/pull/3167)
- 2024.5.10 - [Use new trace link instead of original run portal link](https://github.com/microsoft/promptflow/pull/3193)

## Troubleshooting

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19451,6 +19451,8 @@ class FlowRunInfo(msrest.serialization.Model):
:vartype session_id: str
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
:ivar studio_portal_trace_endpoint:
:vartype studio_portal_trace_endpoint: str
"""

_attribute_map = {
Expand All @@ -19476,6 +19478,7 @@ class FlowRunInfo(msrest.serialization.Model):
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
'studio_portal_trace_endpoint': {'key': 'studioPortalTraceEndpoint', 'type': 'str'},
}

def __init__(
Expand Down Expand Up @@ -19528,6 +19531,8 @@ def __init__(
:paramtype session_id: str
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
:keyword studio_portal_trace_endpoint:
:paramtype studio_portal_trace_endpoint: str
"""
super(FlowRunInfo, self).__init__(**kwargs)
self.flow_graph = kwargs.get('flow_graph', None)
Expand All @@ -19552,6 +19557,7 @@ def __init__(
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.session_id = kwargs.get('session_id', None)
self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None)
self.studio_portal_trace_endpoint = kwargs.get('studio_portal_trace_endpoint', None)


class FlowRunResult(msrest.serialization.Model):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21962,6 +21962,8 @@ class FlowRunInfo(msrest.serialization.Model):
:vartype session_id: str
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
:ivar studio_portal_trace_endpoint:
:vartype studio_portal_trace_endpoint: str
"""

_attribute_map = {
Expand All @@ -21987,6 +21989,7 @@ class FlowRunInfo(msrest.serialization.Model):
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
'studio_portal_trace_endpoint': {'key': 'studioPortalTraceEndpoint', 'type': 'str'},
}

def __init__(
Expand Down Expand Up @@ -22014,6 +22017,7 @@ def __init__(
flow_snapshot_id: Optional[str] = None,
session_id: Optional[str] = None,
studio_portal_endpoint: Optional[str] = None,
studio_portal_trace_endpoint: Optional[str] = None,
**kwargs
):
"""
Expand Down Expand Up @@ -22062,6 +22066,8 @@ def __init__(
:paramtype session_id: str
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
:keyword studio_portal_trace_endpoint:
:paramtype studio_portal_trace_endpoint: str
"""
super(FlowRunInfo, self).__init__(**kwargs)
self.flow_graph = flow_graph
Expand All @@ -22086,6 +22092,7 @@ def __init__(
self.flow_snapshot_id = flow_snapshot_id
self.session_id = session_id
self.studio_portal_endpoint = studio_portal_endpoint
self.studio_portal_trace_endpoint = studio_portal_trace_endpoint


class FlowRunResult(msrest.serialization.Model):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17879,6 +17879,10 @@
"studioPortalEndpoint": {
"type": "string",
"nullable": true
},
"studioPortalTraceEndpoint": {
"type": "string",
"nullable": true
}
},
"additionalProperties": false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,11 @@ def __init__(
)

def get(self, name, **kwargs):
with_secrets = kwargs.get("with_secrets", True)
if with_secrets:
return self._direct_get(
name, self._subscription_id, self._resource_group_name, self._workspace_name, self._credential
)
return _Connection._from_core_connection(self._provider.get(name))

@classmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ def _get_run_portal_url(self, run_id: str):
except Exception as e:
logger.warning(f"Failed to get run portal url from pfs for run {run_id!r}: {str(e)}")

if run_info and hasattr(run_info, "studio_portal_endpoint"):
portal_url = run_info.studio_portal_endpoint
if run_info and hasattr(run_info, "studio_portal_trace_endpoint"):
portal_url = run_info.studio_portal_trace_endpoint

return portal_url

Expand Down Expand Up @@ -936,7 +936,7 @@ def download(
logger.info(f"Successfully downloaded run {run!r} to {result_path!r}.")
return result_path

def _upload(self, run: Union[str, Run]):
def _upload(self, run: Union[str, Run]) -> str:
from promptflow._sdk._pf_client import PFClient
from promptflow.azure.operations._async_run_uploader import AsyncRunUploader

Expand Down Expand Up @@ -973,16 +973,19 @@ def _upload(self, run: Union[str, Run]):
logger.debug(f"Successfully uploaded run details of {run!r} to cloud.")

# registry the run in the cloud
self._registry_existing_bulk_run(run=run)
self._register_existing_bulk_run(run=run)

# post process after run upload, it can only be done after the run history record is created
async_run_allowing_running_loop(run_uploader.post_process)

portal_url = self._get_run_portal_url(run_id=run.name)
# print portal url when executing in jupyter notebook
if in_jupyter_notebook():
print(f"Portal url: {self._get_run_portal_url(run_id=run.name)}")
print(f"Portal url: {portal_url}")

return portal_url

def _registry_existing_bulk_run(self, run: Run):
def _register_existing_bulk_run(self, run: Run):
"""Register the run in the cloud"""
rest_obj = run._to_rest_object()
self._service_caller.create_existing_bulk_run(
Expand Down
1 change: 1 addition & 0 deletions src/promptflow-core/promptflow/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
PROMPTFLOW_SECRETS_FILE = "PROMPTFLOW_SECRETS_FILE"
PF_NO_INTERACTIVE_LOGIN = "PF_NO_INTERACTIVE_LOGIN"
PF_RUN_AS_BUILT_BINARY = "PF_RUN_AS_BUILT_BINARY"
PF_FLOW_INIT_CONFIG = "PF_FLOW_INIT_CONFIG"
ENABLE_MULTI_CONTAINER_KEY = "PF_ENABLE_MULTI_CONTAINER"
PF_LOGGING_LEVEL = "PF_LOGGING_LEVEL"
OPENAI_API_KEY = "openai-api-key"
Expand Down
7 changes: 5 additions & 2 deletions src/promptflow-core/promptflow/_utils/flow_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def resolve_flow_path(
flow_path: Union[str, Path, PathLike],
base_path: Union[str, Path, PathLike, None] = None,
check_flow_exist: bool = True,
default_flow_file: str = FLOW_DAG_YAML,
) -> Tuple[Path, str]:
"""Resolve flow path and return the flow directory path and the file name of the target yaml.
Expand All @@ -79,6 +80,8 @@ def resolve_flow_path(
:param check_flow_exist: If True, the function will try to check the target yaml and
raise FileNotFoundError if not found.
If False, the function will return the flow directory path and the file name of the target yaml.
:param default_flow_file: Default file name used when flow file is not found.
:type default_flow_file: str
:return: The flow directory path and the file name of the target yaml.
:rtype: Tuple[Path, str]
"""
Expand All @@ -89,7 +92,7 @@ def resolve_flow_path(

if flow_path.is_dir():
flow_folder = flow_path
flow_file = FLOW_DAG_YAML
flow_file = default_flow_file
flow_file_list = []
for flow_name, suffix in itertools.product([FLOW_DAG_YAML, FLOW_FLEX_YAML], [".yaml", ".yml"]):
flow_file_name = flow_name.replace(".yaml", suffix)
Expand All @@ -109,7 +112,7 @@ def resolve_flow_path(
flow_file = flow_path.name
else: # flow_path doesn't exist
flow_folder = flow_path
flow_file = FLOW_DAG_YAML
flow_file = default_flow_file

file_path = flow_folder / flow_file
if file_path.suffix.lower() not in FLOW_FILE_SUFFIX:
Expand Down
58 changes: 51 additions & 7 deletions src/promptflow-core/promptflow/contracts/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,19 +751,19 @@ def from_yaml(cls, flow_file: Path, working_dir=None, name=None) -> "Flow":
"""Load flow from yaml file."""
working_dir = cls._parse_working_dir(flow_file, working_dir)
with open(working_dir / flow_file, "r", encoding=DEFAULT_ENCODING) as fin:
flow_dag = load_yaml(fin)
flow_data = load_yaml(fin)
# Name priority: name from payload > name from yaml content > working_dir.stem
# For portal created flow, there is a meaningless predefined name in yaml, use name from payload to override it.
if name is None:
name = flow_dag.get("name", _sanitize_python_variable_name(working_dir.stem))
flow_dag["name"] = name
return Flow._from_dict(flow_dag=flow_dag, working_dir=working_dir)
return Flow._from_dict(flow_data=flow_data, working_dir=working_dir, name=name)

@classmethod
def _from_dict(cls, flow_dag: dict, working_dir: Path) -> "Flow":
def _from_dict(cls, flow_data: dict, working_dir: Path, name=None) -> "Flow":
"""Load flow from dict."""
cls._update_working_dir(working_dir)
flow = Flow.deserialize(flow_dag)
if name is None:
name = flow_data.get("name", _sanitize_python_variable_name(working_dir.stem))
flow_data["name"] = name
flow = Flow.deserialize(flow_data)
flow._set_tool_loader(working_dir)
return flow

Expand Down Expand Up @@ -1019,6 +1019,34 @@ def deserialize(data: dict) -> "FlexFlow":
environment_variables=data.get("environment_variables") or {},
)

@classmethod
def _from_dict(cls, flow_data: dict, working_dir: Path, name=None) -> "FlexFlow":
"""Load flow from dict."""
from promptflow._core.entry_meta_generator import generate_flow_meta

from .._utils.flow_utils import resolve_python_entry_file

Flow._update_working_dir(working_dir)
if name is None:
name = flow_data.get("name", _sanitize_python_variable_name(working_dir.stem))
flow_data["name"] = name

entry = flow_data.get("entry")
entry_file = resolve_python_entry_file(entry=entry, working_dir=working_dir)

meta_dict = generate_flow_meta(
flow_directory=working_dir,
source_path=entry_file,
data=flow_data,
)
return cls.deserialize(meta_dict)

def get_connection_names(self, environment_variables_overrides: Dict[str, str] = None):
"""Return connection names."""
connection_names = super().get_connection_names(environment_variables_overrides=environment_variables_overrides)

return set({item for item in connection_names if item})


@dataclass
class PromptyFlow(FlowBase):
Expand Down Expand Up @@ -1064,3 +1092,19 @@ def deserialize(cls, data: dict) -> "PromptyFlow":
environment_variables=data.get("environment_variables") or {},
message_format=data.get("message_format", MessageFormatType.BASIC),
)

@classmethod
def _from_dict(cls, flow_data: dict, working_dir: Path, name=None) -> "PromptyFlow":
"""Load flow from dict."""
Flow._update_working_dir(working_dir)
if name is None:
name = flow_data.get("name", _sanitize_python_variable_name(working_dir.stem))
flow_data["name"] = name

return cls.deserialize(flow_data)

def get_connection_names(self, environment_variables_overrides: Dict[str, str] = None):
"""Return connection names."""
connection_names = super().get_connection_names(environment_variables_overrides=environment_variables_overrides)

return set({item for item in connection_names if item})
5 changes: 0 additions & 5 deletions src/promptflow-core/promptflow/core/_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from promptflow._constants import CONNECTION_SCRUBBED_VALUE_NO_CHANGE, ConnectionType, CustomStrongTypeConnectionConfigs
from promptflow._core.token_provider import AzureTokenProvider
from promptflow._utils.logger_utils import LoggerFactory
from promptflow._utils.utils import in_jupyter_notebook
from promptflow.constants import ConnectionAuthMode, ConnectionDefaultApiVersion
from promptflow.contracts.types import Secret
from promptflow.core._errors import RequiredEnvironmentVariablesNotSetError
Expand Down Expand Up @@ -64,10 +63,6 @@ def __init__(
self.expiry_time = kwargs.get("expiry_time", None)
self.created_date = kwargs.get("created_date", None)
self.last_modified_date = kwargs.get("last_modified_date", None)
# Conditional assignment to prevent entity bloat when unused.
print_as_yaml = kwargs.pop("print_as_yaml", in_jupyter_notebook())
if print_as_yaml:
self.print_as_yaml = True

def keys(self) -> List:
"""Return keys of the connection properties."""
Expand Down
10 changes: 8 additions & 2 deletions src/promptflow-core/promptflow/core/_serving/app_base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------

import json
import mimetypes
import os
from abc import ABC, abstractmethod
Expand All @@ -17,6 +17,7 @@
from promptflow.core._utils import init_executable
from promptflow.storage._run_storage import DummyRunStorage

from ..._constants import PF_FLOW_INIT_CONFIG
from .swagger import generate_swagger


Expand Down Expand Up @@ -55,7 +56,12 @@ def init_app(self, **kwargs):
self.sample = get_sample_json(self.project_path, logger)

self.init = kwargs.get("init", {})
logger.info("Init params: " + str(self.init))
if not self.init:
init_params = os.environ.get(PF_FLOW_INIT_CONFIG, "{}")
init_dict: dict = json.loads(init_params)
self.init = init_dict

logger.debug("Init params: " + str(self.init))

self.init_swagger()
# try to initialize the flow invoker
Expand Down
Loading

0 comments on commit 6a33387

Please sign in to comment.