Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BREAKING CHANGE: Delete taskgraph.util.memoize (fixes #491) #515

Merged
merged 1 commit into from
Jun 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 0 additions & 8 deletions docs/reference/source/taskgraph.util.rst
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,6 @@ taskgraph.util.keyed\_by module
:undoc-members:
:show-inheritance:

taskgraph.util.memoize module
-----------------------------

.. automodule:: taskgraph.util.memoize
:members:
:undoc-members:
:show-inheritance:

taskgraph.util.parameterization module
--------------------------------------

Expand Down
8 changes: 4 additions & 4 deletions src/taskgraph/actions/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.


import functools
import json
from collections import namedtuple
from types import FunctionType
Expand All @@ -13,7 +14,6 @@
from taskgraph.config import load_graph_config
from taskgraph.parameters import Parameters
from taskgraph.util import hash, taskcluster, yaml
from taskgraph.util.memoize import memoize
from taskgraph.util.python_path import import_sibling_modules

actions = []
Expand All @@ -31,13 +31,13 @@ def is_json(data):
return True


@memoize
@functools.lru_cache(maxsize=None)
def read_taskcluster_yml(filename):
"""Load and parse .taskcluster.yml, memoized to save some time"""
"""Load and parse .taskcluster.yml, cached to save some time"""
return yaml.load_yaml(filename)


@memoize
@functools.lru_cache(maxsize=None)
def hash_taskcluster_yml(filename):
"""
Generate a hash of the given .taskcluster.yml. This is the first 10 digits
Expand Down
4 changes: 2 additions & 2 deletions src/taskgraph/transforms/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.


import functools
import re
from dataclasses import dataclass, field
from typing import Dict, List, Union
Expand All @@ -11,7 +12,6 @@

from ..config import GraphConfig
from ..parameters import Parameters
from ..util.memoize import memoize
from ..util.schema import Schema, validate_schema


Expand Down Expand Up @@ -58,7 +58,7 @@ class TransformConfig:
write_artifacts: bool

@property
@memoize
@functools.lru_cache(maxsize=None)
def repo_configs(self):
repositories = self.graph_config["taskgraph"]["repositories"]
if len(repositories) == 1:
Expand Down
8 changes: 4 additions & 4 deletions src/taskgraph/transforms/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"""


import functools
import hashlib
import os
import re
Expand All @@ -23,7 +24,6 @@
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.hash import hash_path
from taskgraph.util.keyed_by import evaluate_keyed_by
from taskgraph.util.memoize import memoize
from taskgraph.util.schema import (
OptimizationSchema,
Schema,
Expand All @@ -43,7 +43,7 @@
)


@memoize
@functools.lru_cache(maxsize=None)
def _run_task_suffix():
"""String to append to cache names under control of run-task."""
return hash_path(RUN_TASK)[0:20]
Expand Down Expand Up @@ -214,14 +214,14 @@ def get_branch_rev(config):
return config.params["head_rev"]


@memoize
@functools.lru_cache(maxsize=None)
def get_default_priority(graph_config, project):
return evaluate_keyed_by(
graph_config["task-priority"], "Graph Config", {"project": project}
)


@memoize
@functools.lru_cache(maxsize=None)
def get_default_deadline(graph_config, project):
return evaluate_keyed_by(
graph_config["task-deadline-after"], "Graph Config", {"project": project}
Expand Down
6 changes: 3 additions & 3 deletions src/taskgraph/util/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.


import functools
import hashlib
import io
import os
import re
from typing import Optional

from taskgraph.util.archive import create_tar_gz_from_files
from taskgraph.util.memoize import memoize

IMAGE_DIR = os.path.join(".", "taskcluster", "docker")

Expand Down Expand Up @@ -205,7 +205,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N
return writer.hexdigest()


@memoize
@functools.lru_cache(maxsize=None)
def image_paths():
"""Return a map of image name to paths containing their Dockerfile."""
config = load_yaml("taskcluster", "kinds", "docker-image", "kind.yml")
Expand All @@ -222,7 +222,7 @@ def image_path(name):
return os.path.join(IMAGE_DIR, name)


@memoize
@functools.lru_cache(maxsize=None)
def parse_volumes(image):
"""Parse VOLUME entries from a Dockerfile for an image."""
volumes = set()
Expand Down
8 changes: 4 additions & 4 deletions src/taskgraph/util/hash.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import functools
import hashlib
from pathlib import Path

from taskgraph.util import path as mozpath
from taskgraph.util.memoize import memoize


@memoize
@functools.lru_cache(maxsize=None)
def hash_path(path):
"""Hash a single file.

Expand Down Expand Up @@ -44,13 +44,13 @@ def hash_paths(base_path, patterns):
return h.hexdigest()


@memoize
@functools.lru_cache(maxsize=None)
def _find_matching_files(base_path, pattern):
files = _get_all_files(base_path)
return [path for path in files if mozpath.match(path, pattern)]


@memoize
@functools.lru_cache(maxsize=None)
def _get_all_files(base_path):
return [
mozpath.normsep(str(path))
Expand Down
7 changes: 0 additions & 7 deletions src/taskgraph/util/memoize.py

This file was deleted.

9 changes: 4 additions & 5 deletions src/taskgraph/util/taskcluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

from taskgraph.task import Task
from taskgraph.util import yaml
from taskgraph.util.memoize import memoize

logger = logging.getLogger(__name__)

Expand All @@ -31,7 +30,7 @@
CONCURRENCY = 50


@memoize
@functools.lru_cache(maxsize=None)
def get_root_url(use_proxy):
"""Get the current TASKCLUSTER_ROOT_URL.

Expand Down Expand Up @@ -106,7 +105,7 @@ def requests_retry_session(
return session


@memoize
@functools.lru_cache(maxsize=None)
def get_session():
return requests_retry_session(retries=5)

Expand Down Expand Up @@ -277,7 +276,7 @@ def get_task_url(task_id, use_proxy=False):
return task_tmpl.format(task_id)


@memoize
@functools.lru_cache(maxsize=None)
def get_task_definition(task_id, use_proxy=False):
response = _do_request(get_task_url(task_id, use_proxy))
return response.json()
Expand Down Expand Up @@ -446,7 +445,7 @@ def list_task_group_incomplete_tasks(task_group_id):
break


@memoize
@functools.lru_cache(maxsize=None)
def _get_deps(task_ids, use_proxy):
upstream_tasks = {}
for task_id in task_ids:
Expand Down
6 changes: 3 additions & 3 deletions src/taskgraph/util/workertypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import functools
from dataclasses import dataclass

from .keyed_by import evaluate_keyed_by
from .memoize import memoize


@dataclass
Expand All @@ -29,7 +29,7 @@ def implementation(self):
}


@memoize
@functools.lru_cache(maxsize=None)
def worker_type_implementation(graph_config, worker_type):
"""Get the worker implementation and OS for the given workerType, where the
OS represents the host system, not the target OS, in the case of
Expand All @@ -46,7 +46,7 @@ def worker_type_implementation(graph_config, worker_type):
return worker_config["implementation"], worker_config.get("os")


@memoize
@functools.lru_cache(maxsize=None)
def get_worker_type(graph_config, alias, level):
"""
Get the worker type based, evaluating aliases from the graph config.
Expand Down
Loading