From 3bac8d32749d2e839b2bd386b3356149d5507a5a Mon Sep 17 00:00:00 2001 From: guillaume Date: Mon, 13 Nov 2023 12:16:53 +0000 Subject: [PATCH] noop --- .github/workflows/test.yml | 45 +++++ .gitignore | 2 + LICENSE | 21 ++ README.md | 235 ++++++++++++++++++++++ codecov.yml | 1 + iter_pipes/__init__.py | 1 + iter_pipes/functional.py | 207 +++++++++++++++++++ iter_pipes/main.py | 172 ++++++++++++++++ poetry.lock | 297 ++++++++++++++++++++++++++++ pyproject.toml | 36 ++++ tests/docs/test_01_functions.py | 21 ++ tests/docs/test_02_classes.py | 30 +++ tests/docs/test_03_batches.py | 28 +++ tests/docs/test_04_split.py | 36 ++++ tests/docs/test_05_filters.py | 32 +++ tests/docs/test_06_pipe_overload.py | 33 ++++ tests/docs/test_07_for.py | 31 +++ tests/docs/test_08_resumability.py | 74 +++++++ tests/test_consume.py | 18 ++ tests/test_filter_typeguard.py | 17 ++ tests/test_fork.py | 40 ++++ tests/test_processing_errors.py | 14 ++ 22 files changed, 1391 insertions(+) create mode 100644 .github/workflows/test.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 codecov.yml create mode 100644 iter_pipes/__init__.py create mode 100644 iter_pipes/functional.py create mode 100644 iter_pipes/main.py create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 tests/docs/test_01_functions.py create mode 100644 tests/docs/test_02_classes.py create mode 100644 tests/docs/test_03_batches.py create mode 100644 tests/docs/test_04_split.py create mode 100644 tests/docs/test_05_filters.py create mode 100644 tests/docs/test_06_pipe_overload.py create mode 100644 tests/docs/test_07_for.py create mode 100644 tests/docs/test_08_resumability.py create mode 100644 tests/test_consume.py create mode 100644 tests/test_filter_typeguard.py create mode 100644 tests/test_fork.py create mode 100644 tests/test_processing_errors.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..18437ac --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,45 @@ +name: test + +permissions: + contents: read + +on: + - pull_request + - push + + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Install poetry + run: curl -sSL https://install.python-poetry.org | python3 - + env: + POETRY_VERSION: 1.7.0 + - name: Add Poetry to path + run: echo "${HOME}/.poetry/bin" >> $GITHUB_PATH + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: "3.11" + cache: "poetry" + - name: Install Poetry Packages + run: | + poetry env use "3.11" + poetry install --only dev + - name: Add venv to path + run: echo `poetry env info --path`/bin/ >> $GITHUB_PATH + + - run: ruff check --output-format github . + - run: ruff format --check . + - run: mypy --check-untyped-defs . + - run: pyright --warnings . + - run: pytest -s --cov=iter_pipes --cov-report=html + - uses: actions/upload-artifact@v3 + with: + name: code-coverage + path: htmlcov/ + - name: Upload coverage to codecov + uses: codecov/codecov-action@v3 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d8a1bb0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +.coverage diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..cf4b192 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Bright Network + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..2b52a41 --- /dev/null +++ b/README.md @@ -0,0 +1,235 @@ +[![Code Coverage](https://img.shields.io/codecov/c/github/brightnetwork/iter-pipes)](https://app.codecov.io/gh/brightnetwork/iter-pipes) + +## `iter_pipes`: Iterable Pipes + +Functional pythonic pipelines for iterables. + + +```bash +pip install git+https://github.com/brightnetwork/iter-pipes +``` + +### Examples + +#### map / filter: + +```python +import math + +from iter_pipes import PipelineFactory + +pipeline = ( + PipelineFactory[int]() + .map(math.exp) + .filter(lambda x: x > math.exp(2)) + .map(math.log) + .map(str) +) + +assert pipeline(range(5)).to_list() == ["3.0", "4.0"] +``` + +#### Batch operations + +```python +def get_user_names_from_db(user_ids: list[int]) -> list[str]: + # typical batch operation: + # - duration is roughly constant for a batch + # - batch size has to be below a fixed threshold + print("processing batch", user_ids) + return [f"user_{user_id}" for user_id in user_ids] + + +pipeline = ( + PipelineFactory[int]() + .batch(get_user_names_from_db, batch_size=3) + .for_each(lambda user_name: print("Hello ", user_name)) +) + +pipeline(range(5)).to_list() +# returns +# ["user_0", "user_1", "user_2", "user_3", "user_4"] +# prints +# processing batch [0, 1, 2] +# Hello user_0 +# Hello user_1 +# Hello user_2 +# processing batch [3, 4] +# Hello user_3 +# Hello user_4 +``` + + +#### Storing state + +Class with a `__call__` method provide a easy way to store a state during the processing. + +```python +class CountUsers: + def __init__(self): + self._count = 0 + + def __call__(self, item: str) -> str: + self._count += 1 + return f"{item} (position {self._count})" + + +pipeline = PipelineFactory[int]().map(lambda x: f"user {x}").map(CountUsers()) + +pipeline.process(range(5)).to_list() +# return +# ['user 0 (position 1)', 'user 1 (position 2)', 'user 2 (position 3)', 'user 3 (position 4)', 'user 4 (position 5)'] +``` + +```python +def count_users: + self._ + def __init__(self): + self._count = 0 + + def __call__(self, item: str) -> str: + self._count += 1 + return f"{item} (position {self._count})" + + +pipeline = PipelineFactory[int]().map(lambda x: f"user {x}").map(CountUsers()) + +pipeline.process(range(5)).to_list() +# return +# ['user 0 (position 1)', 'user 1 (position 2)', 'user 2 (position 3)', 'user 3 (position 4)', 'user 4 (position 5)'] +``` + +One could also use a closure: + +``` +def count_users(): + count = 0 + + def wrapper(item: str) -> str: + nonlocal count + count += 1 + return f"{item} (position {count})" + + return wrapper + + +pipeline = PipelineFactory[int]().map(lambda x: f"user {x}").map(count_users()) + +pipeline.process(range(5)).to_list() +# return +# ['user 0 (position 1)', 'user 1 (position 2)', 'user 2 (position 3)', 'user 3 (position 4)', 'user 4 (position 5)'] +``` + +#### Split + +```python +pipeline = ( + PipelineFactory[int]() + .split( + lambda x: x.filter(lambda x: x % 2 == 0).map(lambda x: x**2), + lambda x: x.map(lambda x: -x), + ) + .map(str) +) + +expected = ["0", "0", "4", "-1", "-2", "16", "-3", "-4", "36", "-5", "-6", "-7"] +assert pipeline(range(8)).to_list() == expected +``` + +Each split "branch" order will be preserved, but there is not guarantee in term of how the two are merged. + +#### Pipe operator overload + +```python +import iter_pipes.functional as itp + +pipeline = ( + PipelineFactory[int]() + | itp.map(math.exp) + | itp.filter(lambda x: x > math.exp(2)) # type checker might complain + | itp.map(math.log) + | itp.map(str) +) + +assert pipeline(range(6)).to_list() == ["3.0", "4.0", "5.0"] +``` + +note that typing of lambda function inside functional map is not as good as the one from the `Pipeline.XXX` methods. To work around this, one should either use the non functional style, either use fully typed function instead of lambda. + + +#### Resumability + +```python +pipeline = PipelineFactory[int]().split( + lambda x: x.filter(lambda x: x % 3 == 0).map(str), + lambda x: x, +) + +print(pipeline.process(range(12)).to_list()) +# return +# ['0', 0, '3', 1, 2, 3, '6', 4, 5, 6, '9', 7, 8, 9, 10, 11] +# note that between each yield from the first branch, the pipeline will yield everything +# from the second branch so that we don't store too many messages in the inflight buffer. + + +def filter_out_everything(items: Iterable[int]) -> Iterable[int]: + print("starting") + for item in items: + if False: + yield item + + +pipeline = PipelineFactory[int]().split( + lambda x: x.pipe(filter_out_everything).map(str), + lambda x: x, + max_inflight=5, +) + +print(pipeline.process(range(9)).to_list()) +# return +# [0, 1, 2, 3, 4, 5, 6, 7, 8] +# print +# starting +# starting +# starting +``` + +### Motivations + +Goal of the library is to provide a structure to work with [collection pipelines](https://martinfowler.com/articles/collection-pipeline/). + +> Collection pipelines are a programming pattern where you organize some computation as a sequence of operations which compose by taking a collection as output of one operation and feeding it into the next. + +In this library, each "operation" is called a "step". We differentiate different subtype for steps: +- `map` steps will operate on each item of the collection, one by one +- `filter` steps will reduce the number of item in the collection, without changing their values +- `for_each` steps will do some processing, but without impacting the following steps (they won't change the input) +- `batch` steps will operate by batch of a fixed size - can be useful for example to batch database calls. + +In addition to that, we also define pipeline `split`, which allow to run several steps after a single one. + +Library goal: +- declarative, expressive syntax for the steps above +- memory efficiency: + - pure python, so it's not optimal at all + - but what we care about is ensuring that the memory used by the pipeline does not scale with the number of items in the collection. +- performant: + - pure python, so the code itself is not really performant + - but the library allow for an optimal usage of the "slow" operations (network calls mainly) that are computed in the pipeline. This is what is meant by "performant" +- lightweight usage, as in existing function can be used as a step without the need for a wrapper +- provide as good of a type experience as possible + + + +### Documentation + +Have a look at the [`docs`](./tests/docs/) part of the test suites for examples. + +### Contributing + +```bash +ruff check --output-format github . +ruff format --check . +mypy --check-untyped-defs . +pytest -s +``` diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..69cb760 --- /dev/null +++ b/codecov.yml @@ -0,0 +1 @@ +comment: false diff --git a/iter_pipes/__init__.py b/iter_pipes/__init__.py new file mode 100644 index 0000000..38f3549 --- /dev/null +++ b/iter_pipes/__init__.py @@ -0,0 +1 @@ +from .main import * # noqa diff --git a/iter_pipes/functional.py b/iter_pipes/functional.py new file mode 100644 index 0000000..c7c6c72 --- /dev/null +++ b/iter_pipes/functional.py @@ -0,0 +1,207 @@ +from __future__ import annotations + +import math +from collections import deque +from collections.abc import Callable, Iterable, Iterator +from functools import partial +from itertools import count, groupby +from typing import Any, Literal, TypeGuard, TypeVar, overload + +__all__ = [ + "map", + "filter", + "for_each", + "for_batch", + "batch", + "fork", +] + +T = TypeVar("T") +V = TypeVar("V") +U = TypeVar("U") +W = TypeVar("W") +X = TypeVar("X") + + +raw_filter = filter + + +Step = Callable[[Iterable[T]], Iterable[V]] + + +def map(step: Callable[[V], W]) -> Step[V, W]: + def f(data: Iterable[V]) -> Iterable[W]: + for item in data: + yield step(item) + + return f + + +def for_each(step: Callable[[V], Any]) -> Step[V, V]: + def f(data: Iterable[V]) -> Iterable[V]: + for item in data: + step(item) + yield item + + return f + + +def for_batch(step: Callable[[list[V]], Any], batch_size: int) -> Step[V, V]: + def f(data: Iterable[V]) -> Iterable[V]: + for _, batch_iterator in groupby( + zip(data, count()), + key=lambda x: math.floor(x[1] / batch_size), + ): + batch = [x[0] for x in batch_iterator] + step(batch) + yield from batch + + return f + + +def batch(step: Callable[[list[V]], Iterable[U]], batch_size: int) -> Step[V, U]: + def f(data: Iterable[V]) -> Iterable[U]: + for _, batch_iterator in groupby( + zip(data, count()), + key=lambda x: math.floor(x[1] / batch_size), + ): + yield from step([x[0] for x in batch_iterator]) + + return f + + +@overload +def filter(step: Callable[[V], TypeGuard[W]]) -> Step[V, W]: + ... + + +@overload +def filter(step: Callable[[V], bool]) -> Step[V, V]: + ... + + +def filter(step: Callable[[V], bool]) -> Step[V, V]: # type: ignore + return partial(raw_filter, step) # type: ignore + + +@overload +def fork( + step1: Step[T, U] | None, + step2: Step[T, V], + pick_first: Literal[True], + max_inflight: int | None = ..., +) -> Step[T, U]: + ... + + +@overload +def fork( + step1: Step[T, U], + step2: Step[T, V], + pick_first: Literal[False] | None, + max_inflight: int | None = ..., +) -> Step[T, V | U]: + ... + + +@overload +def fork( + step1: Step[T, U], + step2: Step[T, V], + step3: Step[T, W], + pick_first: Literal[False] | None, + max_inflight: int | None = ..., +) -> Step[T, V | U | W]: + ... + + +@overload +def fork( # noqa: PLR0913 + step1: Step[T, U], + step2: Step[T, V], + step3: Step[T, W], + step4: Step[T, X], + pick_first: Literal[False] | None, + max_inflight: int | None = ..., +) -> Step[T, V | U | W | X]: + ... + + +@overload +def fork( + *steps: Step[T, Any] | None, + pick_first: Literal[False] | None, + max_inflight: int | None = ..., +) -> Step[T, Any]: + ... + + +def fork( # type: ignore + *steps: Step[T, Any] | None, + max_inflight: int = 1000, + pick_first: bool = False, +) -> Step[T, Any]: + """ + Returns a step that forks the input iterable into multiple iterables, + each one being processed by a different step. The output iterable is the + concatenation of the output of each step. + + If `pick_first` is True, the output iterable is the concatenation of the + output of the first step only. + """ + + def wrapper(iterable: Iterable[T]) -> Iterable[Any]: + it = iter(iterable) + + queues: list[deque[T]] = [deque() for _ in steps] + # could be rewritten with a single deque to be more memory efficient + + # the set of iterators that are paused because we have too many inflight items + # they should be resumed when the number of inflight items goes down + paused_iterators: set[int] = set() + + def gen(i: int) -> Iterator[T]: + while True: + mydeque = queues[i] + if not mydeque: # when the current deque is empty + try: + newval = next(it) # fetch a new value and + except StopIteration: + return + for d in queues: # load it to all the deques + d.append(newval) + + # if there are too many inflight items, pause the iterator + nb_inflights = sum(len(q) for q in queues) + if nb_inflights > max_inflight: + paused_iterators.add(i) + return + + yield mydeque.popleft() + + iterators = [iter((steps[i] or identity)(gen(i))) for i in range(len(steps))] + + # the set of iterators that are not done yet + pending_iterators = set(range(len(iterators))) + + while len(pending_iterators): + i = max( # the index of the iterator with the most inflight items + pending_iterators, + key=lambda i: len(queues[i]), + ) + try: + val = next(iterators[i]) + if not pick_first or i == 0: + yield val + except StopIteration: + if i in paused_iterators: # resume the iterator + iterators[i] = iter((steps[i] or identity)(gen(i))) + paused_iterators.remove(i) + else: + pending_iterators.remove(i) + + return wrapper + + +def identity(item: W) -> W: + return item diff --git a/iter_pipes/main.py b/iter_pipes/main.py new file mode 100644 index 0000000..d88f7a8 --- /dev/null +++ b/iter_pipes/main.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +from collections import deque +from collections.abc import Callable, Iterator +from typing import Any, Generic, Iterable, TypeGuard, TypeVar, overload + +from iter_pipes.functional import ( + batch, + filter, + for_batch, + for_each, + fork, + identity, + map, +) + +T = TypeVar("T") +U = TypeVar("U") +V = TypeVar("V") +W = TypeVar("W") +X = TypeVar("X") +Y = TypeVar("Y") + +__all__ = ["Pipeline", "PipelineFactory"] + + +raw_filter = filter + + +Step = Callable[[Iterable[T]], Iterable[U]] + + +def compose_steps(step1: Step[T, U] | None, step2: Step[U, V]) -> Step[T, V]: + if step1 is None: + return step2 # type: ignore + + def composed(items: Iterable[T]) -> Iterable[V]: + return step2(step1(items)) + + return composed + + +class IterableWrapper(Generic[T]): + def __init__(self, iterable: Iterable[T]): + self._iterable = iterable + + def __iter__(self) -> Iterator[T]: + return iter(self._iterable) + + def consume(self) -> None: + deque(self._iterable) + + def to_list(self) -> list[T]: + return list(self._iterable) + + +class Pipeline(Generic[T, U]): + step: Step[T, U] | None + items: Iterable[T] | None + + def __init__( + self, + step: Step[T, U] | None = None, + items: Iterable[T] | None = None, + ): + self.step = step + self.items = items + + def for_each(self, step: Callable[[U], Any]) -> Pipeline[T, U]: + return self | for_each(step) + + def map(self, step: Callable[[U], W]) -> Pipeline[T, W]: + return self | map(step) + + def pipe(self, step: Step[U, V]) -> Pipeline[T, V]: + return Pipeline(compose_steps(self.step, step), self.items) + + def for_batch( + self, step: Callable[[list[U]], Any], batch_size: int + ) -> Pipeline[T, U]: + return self | for_batch(step, batch_size) + + def batch( + self, step: Callable[[list[U]], Iterable[V]], batch_size: int + ) -> Pipeline[T, V]: + return self | batch(step, batch_size) + + @overload + def filter(self, step: Callable[[U], TypeGuard[W]]) -> Pipeline[T, W]: + ... + + @overload + def filter(self, step: Callable[[U], bool]) -> Pipeline[T, U]: + ... + + def filter(self, step): # type: ignore + return self | filter(step) # type: ignore + + def filter_not_none(self: Pipeline[T, X | None]) -> Pipeline[T, X]: + return self | filter(lambda item: item is not None) # type: ignore + + @overload + def split( + self, + f1: Callable[[Pipeline[U, U]], Pipeline[U, W]], + max_inflight: int = ..., + ) -> Pipeline[U, W]: + ... + + @overload + def split( + self, + f1: Callable[[Pipeline[U, U]], Pipeline[U, V]], + f2: Callable[[Pipeline[U, U]], Pipeline[U, W]], + max_inflight: int = ..., + ) -> Pipeline[U, W | V]: + ... + + @overload + def split( + self, + f1: Callable[[Pipeline[U, U]], Pipeline[U, V]], + f2: Callable[[Pipeline[U, U]], Pipeline[U, W]], + f3: Callable[[Pipeline[U, U]], Pipeline[U, X]], + max_inflight: int = ..., + ) -> Pipeline[U, W | V | X]: + ... + + @overload + def split( # noqa W291 + self, + f1: Callable[[Pipeline[U, U]], Pipeline[U, V]], + f2: Callable[[Pipeline[U, U]], Pipeline[U, W]], + f3: Callable[[Pipeline[U, U]], Pipeline[U, X]], + f4: Callable[[Pipeline[U, U]], Pipeline[U, Y]], + max_inflight: int = ..., + ) -> Pipeline[U, W | V | X | Y]: + ... + + def split( # type: ignore + self, + *functions: Callable[[Pipeline[U, U]], Pipeline[U, Any]], + max_inflight: int = 1000, + ) -> Pipeline[U, Any]: + steps = [f(Pipeline()).step or identity for f in functions] + return self | fork(*steps, max_inflight=max_inflight, pick_first=False) # type: ignore + + def subpipeline( + self, + *functions: Callable[[Pipeline[U, U]], Pipeline[U, Any]], + max_inflight: int = 1000, + ) -> Pipeline[T, U]: + steps = [f(Pipeline()).step or identity for f in functions] + return self | fork(identity, *steps, max_inflight=max_inflight, pick_first=True) # type: ignore + + def process(self, items: Iterable[T] | None = None) -> IterableWrapper[U]: + input_ = items or self.items + if not input_: + raise ValueError("input is None") + if not self.step: + raise ValueError("step is None") + return IterableWrapper(self.step(input_)) + + def __call__(self, items: Iterable[T] | None = None) -> IterableWrapper[U]: + return self.process(items) + + def __or__(self, step: Step[U, V]) -> Pipeline[T, V]: + return self.pipe(step) + + +class PipelineFactory(Generic[V], Pipeline[V, V]): + pass diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..86a0d99 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,297 @@ +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "mypy" +version = "1.6.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyright" +version = "1.1.336" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.336-py3-none-any.whl", hash = "sha256:8f6a8f365730c8d6c1af840d937371fd5cf0137b6e1827b8b066bc0bb7327aa6"}, + {file = "pyright-1.1.336.tar.gz", hash = "sha256:f92d6d6845e4175833ea60dee5b1ef4d5d66663438fdaedccc1c3ba0f8efa3e3"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" + +[package.extras] +all = ["twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] + +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "ruff" +version = "0.1.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.4-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:864958706b669cce31d629902175138ad8a069d99ca53514611521f532d91495"}, + {file = "ruff-0.1.4-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9fdd61883bb34317c788af87f4cd75dfee3a73f5ded714b77ba928e418d6e39e"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4eaca8c9cc39aa7f0f0d7b8fe24ecb51232d1bb620fc4441a61161be4a17539"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9a1301dc43cbf633fb603242bccd0aaa34834750a14a4c1817e2e5c8d60de17"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e8db8ab6f100f02e28b3d713270c857d370b8d61871d5c7d1702ae411df683"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:80fea754eaae06335784b8ea053d6eb8e9aac75359ebddd6fee0858e87c8d510"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bc02a480d4bfffd163a723698da15d1a9aec2fced4c06f2a753f87f4ce6969c"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862811b403063765b03e716dac0fda8fdbe78b675cd947ed5873506448acea4"}, + {file = "ruff-0.1.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58826efb8b3efbb59bb306f4b19640b7e366967a31c049d49311d9eb3a4c60cb"}, + {file = "ruff-0.1.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fdfd453fc91d9d86d6aaa33b1bafa69d114cf7421057868f0b79104079d3e66e"}, + {file = "ruff-0.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e8791482d508bd0b36c76481ad3117987301b86072158bdb69d796503e1c84a8"}, + {file = "ruff-0.1.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01206e361021426e3c1b7fba06ddcb20dbc5037d64f6841e5f2b21084dc51800"}, + {file = "ruff-0.1.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:645591a613a42cb7e5c2b667cbefd3877b21e0252b59272ba7212c3d35a5819f"}, + {file = "ruff-0.1.4-py3-none-win32.whl", hash = "sha256:99908ca2b3b85bffe7e1414275d004917d1e0dfc99d497ccd2ecd19ad115fd0d"}, + {file = "ruff-0.1.4-py3-none-win_amd64.whl", hash = "sha256:1dfd6bf8f6ad0a4ac99333f437e0ec168989adc5d837ecd38ddb2cc4a2e3db8a"}, + {file = "ruff-0.1.4-py3-none-win_arm64.whl", hash = "sha256:d98ae9ebf56444e18a3e3652b3383204748f73e247dea6caaf8b52d37e6b32da"}, + {file = "ruff-0.1.4.tar.gz", hash = "sha256:21520ecca4cc555162068d87c747b8f95e1e95f8ecfcbbe59e8dd00710586315"}, +] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "2e0d1a269c2eee498bbac8eae332452d10ae0e7b6188138e0b0fc57fd8b1e5ad" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a328d62 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,36 @@ +[tool.poetry] +name = "iter-pipes" +version = "0.1.0" +description = "" +authors = ["guillaume "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" + +[tool.poetry.group.dev.dependencies] +ruff = "^0.1.4" +mypy = "^1.6.1" +pytest = "^7.4.3" +pyright = "^1.1.336" +pytest-cov = "^4.1.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +select = ["ALL"] +ignore = ["INP001", "D", "COM", "PGH", "ANN101", "ANN204", "A003", "TRY", "EM101", "A001", "ISC001", "C901"] + +[tool.ruff.lint.extend-per-file-ignores] +"tests/**" = ["ANN201", "S101", "FA102", "PLR2004"] + +[tool.coverage.report] +exclude_lines = ["@overload"] + +[tool.pytest.ini_options] +pythonpath = "." + +[tool.pyright] +strict = ["iter_pipes/*.py"] diff --git a/tests/docs/test_01_functions.py b/tests/docs/test_01_functions.py new file mode 100644 index 0000000..fc3ed71 --- /dev/null +++ b/tests/docs/test_01_functions.py @@ -0,0 +1,21 @@ +from iter_pipes.main import PipelineFactory + + +def minus(item: int) -> int: + return -item + + +def to_str(item: int) -> str: + return str(item) + + +def test_main(): + p = ( + PipelineFactory[int]() # + .map(minus) + .map(to_str) + .process(range(10)) + .to_list() + ) + + assert p == ["0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9"] diff --git a/tests/docs/test_02_classes.py b/tests/docs/test_02_classes.py new file mode 100644 index 0000000..f585421 --- /dev/null +++ b/tests/docs/test_02_classes.py @@ -0,0 +1,30 @@ +from iter_pipes.main import PipelineFactory + +# you can use classes instead of functions +# this is useful for: +# - store state +# - dependency injection +# - break down complex logic into smaller pieces + + +class ToStr: + def __call__(self, item: int) -> str: + return str(item) + + +class Multiply: + def __init__(self, value: int): + self.value = value + + def __call__(self, item: int) -> int: + return item * self.value + + +def test_main(): + p = ( + PipelineFactory[int]() # + .map(Multiply(-1)) + .map(ToStr()) + )(range(10)).to_list() + + assert p == ["0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9"] diff --git a/tests/docs/test_03_batches.py b/tests/docs/test_03_batches.py new file mode 100644 index 0000000..c8a5fd1 --- /dev/null +++ b/tests/docs/test_03_batches.py @@ -0,0 +1,28 @@ +from functools import reduce + +from iter_pipes.main import PipelineFactory + +# you can use process events by batches +# especially useful to batch db requests / network calls + + +class ToStr: + def __call__(self, item: int) -> str: + return str(item) + + +class MultiplyTogether: + def __call__(self, items: list[int]) -> list[int]: + return [reduce(lambda x, y: x * y, items, 1) for _ in items] + + +def test_main(): + p = ( + PipelineFactory[int]() # + .batch(MultiplyTogether(), batch_size=3) + .map(ToStr()) + .process(range(10)) + .to_list() + ) + + assert p == ["0", "0", "0", "60", "60", "60", "336", "336", "336", "9"] diff --git a/tests/docs/test_04_split.py b/tests/docs/test_04_split.py new file mode 100644 index 0000000..54dba66 --- /dev/null +++ b/tests/docs/test_04_split.py @@ -0,0 +1,36 @@ +from iter_pipes.main import PipelineFactory + + +def minus(item: int) -> int: + return -item + + +class Add: + def __init__(self, value: int): + self.value = value + + def __call__(self, item: int) -> int: + return item + self.value + + +def join(item: tuple[int | None, int | None]) -> str: + return f"{item[0]}:{item[1]}" + + +def to_str(item: int) -> str: + return str(item) + + +def test_main(): + p = ( + PipelineFactory[int]() # + .map(minus) + .split( + lambda pipeline: pipeline.map(Add(1)), + lambda pipeline: pipeline.map(Add(2)).map(minus), + ) + .process(range(10)) + .to_list() + ) + + assert p == [1, -2, 0, -1, -1, 0, -2, 1, -3, 2, -4, 3, -5, 4, -6, 5, -7, 6, -8, 7] diff --git a/tests/docs/test_05_filters.py b/tests/docs/test_05_filters.py new file mode 100644 index 0000000..0950126 --- /dev/null +++ b/tests/docs/test_05_filters.py @@ -0,0 +1,32 @@ +from iter_pipes.main import PipelineFactory + + +def minus(item: int) -> int: + return -item + + +def lte_4(item: int) -> bool: + return item <= 4 + + +class Add: + def __init__(self, value: int): + self.value = value + + def __call__(self, item: int) -> int: + return item + self.value + + +def test_main(): + p = ( + PipelineFactory[int]() # + .map(minus) + .split( + lambda pipeline: pipeline.map(Add(1)), + lambda pipeline: pipeline.map(Add(2)).map(minus).filter(lte_4), + ) + .process(range(9)) + .to_list() + ) + + assert p == [1, -2, 0, -1, -1, 0, -2, 1, -3, 2, -4, 3, -5, 4, -6, -7] diff --git a/tests/docs/test_06_pipe_overload.py b/tests/docs/test_06_pipe_overload.py new file mode 100644 index 0000000..811e776 --- /dev/null +++ b/tests/docs/test_06_pipe_overload.py @@ -0,0 +1,33 @@ +import iter_pipes.functional as itp +from iter_pipes.main import PipelineFactory + +# support the pipe operator (|) overload +# in particular, the auto-formating in subpipelines is way prettier + + +def minus(item: int) -> int: + return -item + + +def lte_4(item: int) -> bool: + return item <= 4 + + +def test_main(): + p = ( + PipelineFactory[int]() # + .map(minus) + .split( + lambda pipeline: pipeline # + | itp.map(minus) + | itp.filter(lte_4), + lambda pipeline: pipeline # + | itp.map(minus) + | itp.map(minus) + | itp.filter(lte_4), + ) + .process(range(9)) + .to_list() + ) + + assert p == [0, 0, 1, -1, 2, -2, 3, -3, 4, -4, -5, -6, -7, -8] diff --git a/tests/docs/test_07_for.py b/tests/docs/test_07_for.py new file mode 100644 index 0000000..687efae --- /dev/null +++ b/tests/docs/test_07_for.py @@ -0,0 +1,31 @@ +from iter_pipes.main import PipelineFactory + +# `for` / `for_each` allow you to run functions at certain point of the +# pipeline, but ignore the return value + + +def minus(item: int) -> int: + return -item + + +def to_str(item: int) -> str: + return str(item) + + +def test_main(): + myset = set() + + def concatenate_and_print(items: list[int]) -> None: + myset.add("".join([str(i) for i in items])) + + p = ( + PipelineFactory[int]() # + .map(minus) + .for_each(to_str) + .map(minus) # still an iterable of int + .for_batch(concatenate_and_print, batch_size=3) + .map(to_str) + )(range(10)).to_list() + + assert p == ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] + assert myset == {"012", "345", "678", "9"} diff --git a/tests/docs/test_08_resumability.py b/tests/docs/test_08_resumability.py new file mode 100644 index 0000000..0ccb108 --- /dev/null +++ b/tests/docs/test_08_resumability.py @@ -0,0 +1,74 @@ +from iter_pipes.functional import filter +from iter_pipes.main import PipelineFactory + +# resumability: +# when the steps change the size of the iterables in a subpipeline or a split, it's +# tricky to avoid memory leaks. +# for example, if a subpipeline is having a filter that filters out all the items, +# the subpipeline will consume all the messages without ever releasing the thread to +# the main pipeline. The messages will be stored in the inflights buffer for the +# main pipeline, and the main pipeline will never be able to consume them. +# +# The solution is to have a max_inflight parameter that will limit the number of +# inflight messages. When the number of inflight messages reaches the limit, the +# subpipeline will be "paused" to let the main pipeline consume some messages. +# The subpipeline will then be "resumed". +# +# The "pause" and "resume" behavior is implemented by stopping the subpipeline +# source iterator, and then re-applying the subpipeline steps to the source iterator + + +def minus(item: int) -> int: + return -item + + +class Add: + def __init__(self, value: int): + self.value = value + + def __call__(self, item: int) -> int: + return item + self.value + + +def join(item: tuple[int | None, int | None]) -> str: + return f"{item[0]}:{item[1]}" + + +def counter(): + memory = set() + + def inc(x: int) -> None: + memory.add(x) + + def dec(x: int) -> None: + memory.remove(x) + + def get() -> int: + return len(memory) + + return inc, dec, get + + +def return_false(_item: int) -> bool: + return False + + +def test_main(): + max_inflight = 30 + inc, dec, get = counter() + p = ( + PipelineFactory[int]() # + .for_each(inc) + .subpipeline( + lambda pipeline: pipeline # + | filter(return_false), + max_inflight=max_inflight, + ) + .for_each(dec) + .process(range(10**3)) + ) + result = [] + for x in p: + assert get() <= max_inflight + result.append(x) + assert result == list(range(10**3)) diff --git a/tests/test_consume.py b/tests/test_consume.py new file mode 100644 index 0000000..1f52a8b --- /dev/null +++ b/tests/test_consume.py @@ -0,0 +1,18 @@ +from iter_pipes import PipelineFactory + + +def even_or_none(item: int) -> int | None: + return item if item % 2 == 0 else None + + +def test_consume(): + queue = set() + ( + PipelineFactory[int]() + .map(even_or_none) + .filter_not_none() + .for_each(lambda item: queue.add(item)) + .process(range(10)) + .consume() + ) + assert queue == {0, 2, 4, 6, 8} diff --git a/tests/test_filter_typeguard.py b/tests/test_filter_typeguard.py new file mode 100644 index 0000000..8e72604 --- /dev/null +++ b/tests/test_filter_typeguard.py @@ -0,0 +1,17 @@ +from iter_pipes import PipelineFactory + + +def even_or_none(item: int) -> int | None: + return item if item % 2 == 0 else None + + +def test_filter_typeguard(): + to_list = ( + PipelineFactory[int]() + .map(even_or_none) + .filter_not_none() + .process(range(10)) + .to_list + ) + _a: list[int] = to_list() # type hinting removed the None + assert _a == [0, 2, 4, 6, 8] diff --git a/tests/test_fork.py b/tests/test_fork.py new file mode 100644 index 0000000..1bfaccb --- /dev/null +++ b/tests/test_fork.py @@ -0,0 +1,40 @@ +from iter_pipes.functional import filter, map +from iter_pipes.main import PipelineFactory + + +def minus(item: int) -> int: + return -item + + +def to_str(item: int) -> str: + return str(item) + + +class Add: + def __init__(self, value: int): + self.value = value + + def __call__(self, item: int) -> int: + return item + self.value + + +def filter_multiples_of_3(item: int) -> bool: + return item % 3 == 0 + + +def test_inflight_balancing(): + max_inflight = 3 + p = ( + PipelineFactory[int]() + .split( + lambda pipeline: pipeline # + | filter(filter_multiples_of_3) + | map(to_str), + lambda pipeline: pipeline # + | map(minus), + max_inflight=max_inflight, + ) + .process(range(12)) + .to_list() + ) + assert p == ["0", 0, -1, -2, "3", -3, -4, -5, "6", -6, -7, -8, "9", -9, -10, -11] diff --git a/tests/test_processing_errors.py b/tests/test_processing_errors.py new file mode 100644 index 0000000..ac67417 --- /dev/null +++ b/tests/test_processing_errors.py @@ -0,0 +1,14 @@ +import pytest + +from iter_pipes.main import PipelineFactory + + +def test_empty_step(): + with pytest.raises(ValueError): # noqa + PipelineFactory[int]().process() + + with pytest.raises(ValueError): # noqa + PipelineFactory[int]().map(lambda x: x).process() + + with pytest.raises(ValueError): # noqa + PipelineFactory[int]().process(range(10))