Skip to content

Commit

Permalink
Merge branch 'master' into new-live
Browse files Browse the repository at this point in the history
  • Loading branch information
kumare3 committed Dec 5, 2024
2 parents 5e9564f + 5323fc4 commit ba9b1b6
Show file tree
Hide file tree
Showing 8 changed files with 182 additions and 35 deletions.
1 change: 1 addition & 0 deletions .github/actions/clear-action-cache/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ runs:
using: 'composite'
steps:
- shell: bash
if: runner.os != 'Windows'
run: |
rm -rf /usr/share/dotnet
rm -rf /opt/ghc
Expand Down
5 changes: 5 additions & 0 deletions flytekit/core/array_node_map_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,11 @@ def python_interface(self):

def construct_node_metadata(self) -> NodeMetadata:
# TODO: add support for other Flyte entities
return NodeMetadata(
name=self.name,
)

def construct_sub_node_metadata(self) -> NodeMetadata:
nm = super().construct_node_metadata()
nm._name = self.name
return nm
Expand Down
104 changes: 79 additions & 25 deletions flytekit/image_spec/default_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from pathlib import Path
from string import Template
from subprocess import run
from typing import ClassVar
from typing import ClassVar, List

import click

Expand All @@ -21,6 +21,20 @@
from flytekit.tools.ignore import DockerIgnore, GitIgnore, IgnoreGroup, StandardIgnore
from flytekit.tools.script_mode import ls_files

UV_LOCK_INSTALL_TEMPLATE = Template(
"""\
RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
--mount=from=uv,source=/uv,target=/usr/bin/uv \
--mount=type=bind,target=uv.lock,src=uv.lock \
--mount=type=bind,target=pyproject.toml,src=pyproject.toml \
uv sync $PIP_INSTALL_ARGS
# Update PATH and UV_PYTHON to point to the venv created by uv sync
ENV PATH="/.venv/bin:$$PATH" \
UV_PYTHON=/.venv/bin/python
"""
)

UV_PYTHON_INSTALL_COMMAND_TEMPLATE = Template(
"""\
RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
Expand All @@ -38,7 +52,7 @@

DOCKER_FILE_TEMPLATE = Template("""\
#syntax=docker/dockerfile:1.5
FROM ghcr.io/astral-sh/uv:0.2.37 as uv
FROM ghcr.io/astral-sh/uv:0.5.1 as uv
FROM mambaorg/micromamba:2.0.3-debian12-slim as micromamba
FROM $BASE_IMAGE
Expand Down Expand Up @@ -114,26 +128,54 @@ def _is_flytekit(package: str) -> bool:
return name == "flytekit"


def create_docker_context(image_spec: ImageSpec, tmp_dir: Path):
"""Populate tmp_dir with Dockerfile as specified by the `image_spec`."""
base_image = image_spec.base_image or "debian:bookworm-slim"
def prepare_uv_lock_command(image_spec: ImageSpec, pip_install_args: List[str], tmp_dir: Path) -> str:
# uv sync is experimental, so our uv.lock support is also experimental
# the parameters we pass into install args could be different
warnings.warn("uv.lock support is experimental", UserWarning)

requirements = []
if image_spec.packages is not None:
msg = "Support for uv.lock files and packages is mutually exclusive"
raise ValueError(msg)

if image_spec.cuda is not None or image_spec.cudnn is not None:
msg = (
"cuda and cudnn do not need to be specified. If you are installed "
"a GPU accelerated library on PyPI, then it likely will install cuda "
"from PyPI."
"With conda you can installed cuda from the `nvidia` channel by adding `nvidia` to "
"ImageSpec.conda_channels and adding packages from "
"https://anaconda.org/nvidia into ImageSpec.conda_packages. If you require "
"cuda for non-python dependencies, you can set a `base_image` with cuda "
"preinstalled."
)
uv_lock_path = tmp_dir / "uv.lock"
shutil.copy2(image_spec.requirements, uv_lock_path)

# uv.lock requires pyproject.toml to be included
pyproject_toml_path = tmp_dir / "pyproject.toml"
dir_name = os.path.dirname(image_spec.requirements)

pyproject_toml_src = os.path.join(dir_name, "pyproject.toml")
if not os.path.exists(pyproject_toml_src):
msg = "To use uv.lock, a pyproject.toml must be in the same directory as the lock file"
raise ValueError(msg)

shutil.copy2(pyproject_toml_src, pyproject_toml_path)

# --locked: Assert that the `uv.lock` will remain unchanged
# --no-dev: Omit the development dependency group
# --no-install-project: Do not install the current project
pip_install_args.extend(["--locked", "--no-dev", "--no-install-project"])
pip_install_args = " ".join(pip_install_args)

return UV_LOCK_INSTALL_TEMPLATE.substitute(PIP_INSTALL_ARGS=pip_install_args)


def prepare_python_install(image_spec: ImageSpec, tmp_dir: Path) -> str:
pip_install_args = []
if image_spec.pip_index:
pip_install_args.append(f"--index-url {image_spec.pip_index}")

if image_spec.pip_extra_index_url:
extra_urls = [f"--extra-index-url {url}" for url in image_spec.pip_extra_index_url]
pip_install_args.extend(extra_urls)

requirements = []
if image_spec.requirements:
requirement_basename = os.path.basename(image_spec.requirements)
if requirement_basename == "uv.lock":
return prepare_uv_lock_command(image_spec, pip_install_args, tmp_dir)

# Assume this is a requirements.txt file
with open(image_spec.requirements) as f:
requirements.extend([line.strip() for line in f.readlines()])

Expand All @@ -146,19 +188,31 @@ def create_docker_context(image_spec: ImageSpec, tmp_dir: Path):

requirements_uv_path = tmp_dir / "requirements_uv.txt"
requirements_uv_path.write_text("\n".join(requirements))
pip_install_args.extend(["--requirement", "requirements_uv.txt"])

pip_install_args = ["--requirement", "requirements_uv.txt"]
pip_install_args = " ".join(pip_install_args)

if image_spec.pip_index:
pip_install_args.append(f"--index-url {image_spec.pip_index}")
if image_spec.pip_extra_index_url:
extra_urls = [f"--extra-index-url {url}" for url in image_spec.pip_extra_index_url]
pip_install_args.extend(extra_urls)
return UV_PYTHON_INSTALL_COMMAND_TEMPLATE.substitute(PIP_INSTALL_ARGS=pip_install_args)

pip_install_args = " ".join(pip_install_args)

uv_python_install_command = UV_PYTHON_INSTALL_COMMAND_TEMPLATE.substitute(PIP_INSTALL_ARGS=pip_install_args)
def create_docker_context(image_spec: ImageSpec, tmp_dir: Path):
"""Populate tmp_dir with Dockerfile as specified by the `image_spec`."""
base_image = image_spec.base_image or "debian:bookworm-slim"

if image_spec.cuda is not None or image_spec.cudnn is not None:
msg = (
"cuda and cudnn do not need to be specified. If you are installed "
"a GPU accelerated library on PyPI, then it likely will install cuda "
"from PyPI."
"With conda you can installed cuda from the `nvidia` channel by adding `nvidia` to "
"ImageSpec.conda_channels and adding packages from "
"https://anaconda.org/nvidia into ImageSpec.conda_packages. If you require "
"cuda for non-python dependencies, you can set a `base_image` with cuda "
"preinstalled."
)
raise ValueError(msg)

uv_python_install_command = prepare_python_install(image_spec, tmp_dir)
env_dict = {"PYTHONPATH": "/root", _F_IMG_ID: image_spec.id}

if image_spec.env:
Expand Down
2 changes: 1 addition & 1 deletion flytekit/tools/translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,7 @@ def get_serializable_array_node_map_task(
)
node = workflow_model.Node(
id=entity.name,
metadata=entity.construct_node_metadata(),
metadata=entity.construct_sub_node_metadata(),
inputs=node.bindings,
upstream_node_ids=[],
output_aliases=[],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,9 @@ def setup_nim_pod_template(self):
model_server_container.volume_mounts = [V1VolumeMount(name="dshm", mount_path="/dev/shm")]
model_server_container.security_context = V1SecurityContext(run_as_user=1000)

self.pod_template.pod_spec.volumes.append(V1Volume(name="cache", empty_dir={}))
model_server_container.volume_mounts.append(V1VolumeMount(name="cache", mount_path="/opt/nim/.cache"))

# Download HF LoRA adapters
if self._hf_repo_ids:
if not self._lora_adapter_mem:
Expand All @@ -131,12 +134,13 @@ def setup_nim_pod_template(self):
None,
)
if local_peft_dir_env:
mount_path = local_peft_dir_env.value
peft_mount_path = local_peft_dir_env.value
else:
# This is the directory where all LoRAs are stored for a particular model.
raise ValueError("NIM_PEFT_SOURCE environment variable must be set.")

self.pod_template.pod_spec.volumes.append(V1Volume(name="lora", empty_dir={}))
model_server_container.volume_mounts.append(V1VolumeMount(name="lora", mount_path=mount_path))
model_server_container.volume_mounts.append(V1VolumeMount(name="lora", mount_path=peft_mount_path))

self.pod_template.pod_spec.init_containers.insert(
0,
Expand All @@ -149,7 +153,7 @@ def setup_nim_pod_template(self):
f"""
pip install -U "huggingface_hub[cli]"
export LOCAL_PEFT_DIRECTORY={mount_path}
export LOCAL_PEFT_DIRECTORY={peft_mount_path}
mkdir -p $LOCAL_PEFT_DIRECTORY
TOKEN_VAR_NAME={self._secrets.secrets_prefix}{hf_key}
Expand All @@ -175,7 +179,7 @@ def setup_nim_pod_template(self):
volume_mounts=[
V1VolumeMount(
name="lora",
mount_path=mount_path,
mount_path=peft_mount_path,
)
],
),
Expand Down
6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,10 @@ dependencies = [
"fsspec>=2023.3.0",
"gcsfs>=2023.3.0",
"googleapis-common-protos>=1.57",
"grpcio",
"grpcio-status",
# Skipping those versions to account for the unwanted output coming from grpcio and grpcio-status.
# Issue being tracked in https://github.com/flyteorg/flyte/issues/6082.
"grpcio!=1.68.0,!=1.68.1",
"grpcio-status!=1.68.0,!=1.68.1",
"importlib-metadata",
"joblib",
"jsonlines",
Expand Down
75 changes: 75 additions & 0 deletions tests/flytekit/unit/core/image_spec/test_default_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,3 +217,78 @@ def test_should_push_env(monkeypatch, push_image_spec):
assert "--push" not in call_args[0]
else:
assert "--push" in call_args[0]


def test_create_docker_context_uv_lock(tmp_path):
docker_context_path = tmp_path / "builder_root"
docker_context_path.mkdir()

uv_lock_file = tmp_path / "uv.lock"
uv_lock_file.write_text("this is a lock file")

pyproject_file = tmp_path / "pyproject.toml"
pyproject_file.write_text("this is a pyproject.toml file")

image_spec = ImageSpec(
name="FLYTEKIT",
python_version="3.12",
requirements=os.fspath(uv_lock_file),
pip_index="https://url.com",
pip_extra_index_url=["https://extra-url.com"],
)

warning_msg = "uv.lock support is experimental"
with pytest.warns(UserWarning, match=warning_msg):
create_docker_context(image_spec, docker_context_path)

dockerfile_path = docker_context_path / "Dockerfile"
assert dockerfile_path.exists()
dockerfile_content = dockerfile_path.read_text()

assert (
"uv sync --index-url https://url.com --extra-index-url "
"https://extra-url.com --locked --no-dev --no-install-project"
) in dockerfile_content


@pytest.mark.filterwarnings("ignore::UserWarning")
def test_uv_lock_errors_no_pyproject_toml(monkeypatch, tmp_path):
run_mock = Mock()
monkeypatch.setattr("flytekit.image_spec.default_builder.run", run_mock)

uv_lock_file = tmp_path / "uv.lock"
uv_lock_file.write_text("this is a lock file")

image_spec = ImageSpec(
name="FLYTEKIT",
python_version="3.12",
requirements=os.fspath(uv_lock_file),
)

builder = DefaultImageBuilder()

with pytest.raises(ValueError, match="To use uv.lock"):
builder.build_image(image_spec)


@pytest.mark.filterwarnings("ignore::UserWarning")
@pytest.mark.parametrize("invalid_param", ["packages"])
def test_uv_lock_error_no_packages(monkeypatch, tmp_path, invalid_param):
run_mock = Mock()
monkeypatch.setattr("flytekit.image_spec.default_builder.run", run_mock)

uv_lock_file = tmp_path / "uv.lock"
uv_lock_file.write_text("this is a lock file")

image_spec = ImageSpec(
name="FLYTEKIT",
python_version="3.12",
requirements=os.fspath(uv_lock_file),
packages=["ruff"],
)
builder = DefaultImageBuilder()

with pytest.raises(ValueError, match="Support for uv.lock files and packages is mutually exclusive"):
builder.build_image(image_spec)

run_mock.assert_not_called()
12 changes: 9 additions & 3 deletions tests/flytekit/unit/core/test_array_node_map_task.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import functools
from datetime import timedelta
import os
import tempfile
import typing
Expand Down Expand Up @@ -386,7 +387,12 @@ def test_serialization_metadata2(serialization_settings):
def t1(a: int) -> typing.Optional[int]:
return a + 1

arraynode_maptask = map_task(t1, min_success_ratio=0.9, concurrency=10, metadata=TaskMetadata(retries=2, interruptible=True))
arraynode_maptask = map_task(
t1,
min_success_ratio=0.9,
concurrency=10,
metadata=TaskMetadata(retries=2, interruptible=True, timeout=timedelta(seconds=10))
)
assert arraynode_maptask.metadata.interruptible

@workflow
Expand All @@ -402,16 +408,16 @@ def wf1(x: typing.List[int]):
od = OrderedDict()
wf_spec = get_serializable(od, serialization_settings, wf)

assert arraynode_maptask.construct_node_metadata().interruptible
array_node = wf_spec.template.nodes[0]
assert array_node.metadata.interruptible
assert array_node.metadata.timeout == timedelta()
assert array_node.array_node._min_success_ratio == 0.9
assert array_node.array_node._parallelism == 10
assert not array_node.array_node._is_original_sub_node_interface
assert array_node.array_node._execution_mode == _core_workflow.ArrayNode.MINIMAL_STATE
task_spec = od[arraynode_maptask]
assert task_spec.template.metadata.retries.retries == 2
assert task_spec.template.metadata.interruptible
assert task_spec.template.metadata.timeout == timedelta(seconds=10)

wf1_spec = get_serializable(od, serialization_settings, wf1)
array_node = wf1_spec.template.nodes[0]
Expand Down

0 comments on commit ba9b1b6

Please sign in to comment.