Skip to content

Commit

Permalink
[pre-commit.ci] pre-commit autoupdate (#1231)
Browse files Browse the repository at this point in the history
* [pre-commit.ci] pre-commit autoupdate

updates:
- [github.com/psf/black: 22.12.0 → 23.1.0](psf/black@22.12.0...23.1.0)

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
pre-commit-ci[bot] authored Feb 7, 2023
1 parent 9091c04 commit bb4ef93
Show file tree
Hide file tree
Showing 38 changed files with 2 additions and 56 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ repos:
types_or: [python, pyi]

- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.1.0
hooks:
- id: black
types: [file]
Expand Down
6 changes: 0 additions & 6 deletions benchmark/pytest/analysis/test_optimization_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def setup_optimization_group(scheme):


def test_benchmark_align_data(benchmark):

model = setup_model(False, True)
assert model.valid()

Expand All @@ -111,7 +110,6 @@ def test_benchmark_align_data(benchmark):
@pytest.mark.parametrize("link_clp", [True, False])
@pytest.mark.parametrize("index_dependent", [True, False])
def test_benchmark_calculate_matrix(benchmark, link_clp, index_dependent):

model = setup_model(index_dependent, link_clp)
assert model.valid()

Expand All @@ -124,7 +122,6 @@ def test_benchmark_calculate_matrix(benchmark, link_clp, index_dependent):
@pytest.mark.parametrize("link_clp", [True, False])
@pytest.mark.parametrize("index_dependent", [True, False])
def test_benchmark_calculate_residual(benchmark, link_clp, index_dependent):

model = setup_model(index_dependent, link_clp)
assert model.valid()

Expand All @@ -139,7 +136,6 @@ def test_benchmark_calculate_residual(benchmark, link_clp, index_dependent):
@pytest.mark.parametrize("link_clp", [True, False])
@pytest.mark.parametrize("index_dependent", [True, False])
def test_benchmark_calculate_result_data(benchmark, link_clp, index_dependent):

model = setup_model(index_dependent, link_clp)
assert model.valid()

Expand All @@ -155,7 +151,6 @@ def test_benchmark_calculate_result_data(benchmark, link_clp, index_dependent):
@pytest.mark.parametrize("link_clp", [True, False])
@pytest.mark.parametrize("index_dependent", [True, False])
def test_benchmark_optimize_20_runs(benchmark, link_clp, index_dependent):

model = setup_model(index_dependent, link_clp)
assert model.valid()

Expand All @@ -164,7 +159,6 @@ def test_benchmark_optimize_20_runs(benchmark, link_clp, index_dependent):

@benchmark
def run():

for _ in range(20):
optimization_group.calculate(scheme.parameters)

Expand Down
1 change: 0 additions & 1 deletion glotaran/builtin/io/sdt/sdt_file_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def load_dataset(
)

if flim:

if orig_time_axis_index != 2:
np.swapaxes(raw_data, 2, orig_time_axis_index)

Expand Down
1 change: 0 additions & 1 deletion glotaran/builtin/io/sdt/test/test_file_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
)
@pytest.mark.filterwarnings("ignore:There was no `index`:UserWarning")
def test_read_sdt(test_file_path, result_file_path, index):

sdt_reader = SdtDataIo("sdt")
test_dataset = sdt_reader.load_dataset(test_file_path, index=index)
result_df = pd.read_csv(
Expand Down
2 changes: 0 additions & 2 deletions glotaran/builtin/io/yml/test/test_model_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,6 @@ def test_k_matrices(model):


def test_weight(model):

weight = model.weights[0]
assert isinstance(weight, Weight)
assert weight.datasets == ["d1", "d2"]
Expand All @@ -153,7 +152,6 @@ def test_weight(model):


def test_shapes(model):

assert "shape1" in model.shape

shape = model.shape["shape1"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@


def test_clp_guide():

model = Model.create_class_from_megacomplexes(
[DecaySequentialMegacomplex, ClpGuideMegacomplex]
)(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@ def _calculate_coherent_artifact_matrix(

@nb.jit(nopython=True, parallel=True)
def _calculate_coherent_artifact_matrix_on_index(matrix, center, width, axis, order):

matrix[:, 0] = np.exp(-1 * (axis - center) ** 2 / (2 * width**2))
if order > 1:
matrix[:, 1] = matrix[:, 0] * (center - axis) / width**2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ def test_coherent_artifact(spectral_dependence: str):
["irf_disp2", 0.001, {"vary": False, "non-negative": False}],
]
elif spectral_dependence == "shifted":

irf_spec["shift"] = ["irf_shift1", "irf_shift2", "irf_shift3"]
parameter_list += [
["irf_shift1", -2],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ def calculate_matrix(
model_axis: ArrayLike,
**kwargs,
):

clp_label = [f"{label}_cos" for label in self.labels] + [
f"{label}_sin" for label in self.labels
]
Expand Down Expand Up @@ -191,7 +190,6 @@ def finalize_data(

@nb.jit(nopython=True, parallel=True)
def calculate_damped_oscillation_matrix_no_irf(matrix, frequencies, rates, axis):

idx = 0
for frequency, rate in zip(frequencies, rates):
osc = np.exp(-rate * axis - 1j * frequency * axis)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,6 @@ class OneOscillationWithSequentialModel:
],
)
def test_doas_model(suite):

print(suite.sim_model.validate())
assert suite.sim_model.valid()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,6 @@ class ThreeComponentSequential:
)
@pytest.mark.parametrize("nnls", [True, False])
def test_kinetic_model(suite, nnls):

model = suite.model
print(model.validate())
assert model.valid()
Expand Down
3 changes: 0 additions & 3 deletions glotaran/builtin/megacomplexes/decay/test/test_k_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,6 @@ class ParallelModelWithEquilibria:
[SequentialModel, SequentialModelWithBacktransfer, ParallelModel, ParallelModelWithEquilibria],
)
def test_a_matrix_general(matrix):

params = Parameters.from_list(matrix.params)

mat = KMatrix(label="", matrix=matrix.matrix)
Expand Down Expand Up @@ -218,7 +217,6 @@ def test_a_matrix_general(matrix):


def test_a_matrix_sequential():

compartments = ["s1", "s2", "s3"]
matrix = {
("s2", "s1"): "1",
Expand Down Expand Up @@ -262,7 +260,6 @@ def test_a_matrix_sequential():


def test_combine_matrices():

matrix1 = {
("s1", "s1"): "1",
("s2", "s2"): "2",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,6 @@ class MultiCenterIrfDispersion:
],
)
def test_spectral_irf(suite):

model = suite.model
assert model.valid(), model.validate()

Expand Down
3 changes: 0 additions & 3 deletions glotaran/builtin/megacomplexes/decay/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ def calculate_matrix(
model_axis: ArrayLike,
**kwargs,
):

compartments = megacomplex.get_compartments(dataset_model)
initial_concentration = megacomplex.get_initial_concentration(dataset_model)
k_matrix = megacomplex.get_k_matrix()
Expand Down Expand Up @@ -160,7 +159,6 @@ def decay_matrix_implementation_index_independent(
dataset_model: DatasetModel,
):
if isinstance(dataset_model.irf, IrfMultiGaussian):

(
centers,
widths,
Expand Down Expand Up @@ -285,7 +283,6 @@ def retrieve_initial_concentration(
dataset: xr.Dataset,
species_dimension: str,
):

if (
not hasattr(dataset_model, "initial_concentration")
or dataset_model.initial_concentration is None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def calculate_matrix(
model_axis: ArrayLike,
**kwargs,
):

compartments = []
for compartment in self.shape:
if compartment in compartments:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,6 @@ class ThreeCompartmentModel:
],
)
def test_spectral_model(suite):

model = suite.spectral_model
print(model.validate())
assert model.valid()
Expand Down
1 change: 0 additions & 1 deletion glotaran/builtin/megacomplexes/test/test_spectral_decay.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,6 @@ class ThreeComponentSequential:
)
@pytest.mark.parametrize("nnls", [True, False])
def test_decay_model(suite, nnls):

model = suite.model
print(model.validate())
assert model.valid()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ class ThreeComponentSequential:
)
@pytest.mark.parametrize("nnls", [True, False])
def test_kinetic_model(suite, nnls):

model = suite.model
print(model.validate())
assert model.valid()
Expand Down
1 change: 0 additions & 1 deletion glotaran/cli/commands/explore.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ def export(filename: str, select, out: str, name: str):
stop = True

while not stop:

echo(f"Selected dataset '{name}'.")
echo(f"\nDataset Content\n\n{data}\n")

Expand Down
1 change: 0 additions & 1 deletion glotaran/cli/commands/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ def loader(filename):


def select_name(filename, dataset):

names = list(dataset)
echo(f"\nDataset names in in '{filename}':\n")
for i, n in enumerate(names):
Expand Down
1 change: 0 additions & 1 deletion glotaran/cli/commands/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ def validate_cmd(parameters_file: str, model_file: str, scheme_file: str):
return

if model_file is not None:

model = util.load_model_file(model_file, verbose=True)
parameters = None
if parameters_file is not None:
Expand Down
1 change: 0 additions & 1 deletion glotaran/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ class Cli(click.Group):
"""The glotaran CLI implementation of :class:`click.group`"""

def __init__(self, *args, **kwargs):

click.echo(
"[DEPRECATED] The pyglotaran command line interface will be removed without "
"replacement in version '0.8.0' since it lacks a lot of essential functionality and "
Expand Down
1 change: 0 additions & 1 deletion glotaran/deprecation/deprecation_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -752,7 +752,6 @@ def deprecate_submodule(
)

def warn_getattr(attribute_name: str):

if attribute_name == "__file__":
return new_module.__file__

Expand Down
1 change: 0 additions & 1 deletion glotaran/deprecation/modules/test/test_changed_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@


def check_recwarn(records: WarningsRecorder, warn_nr=1):

for record in records:
print(record)

Expand Down
3 changes: 0 additions & 3 deletions glotaran/deprecation/test/test_deprecation_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,6 @@ def test_deprecate_module_attribute():
"""Same code as the original import and warning"""

with pytest.warns(GlotaranApiDeprecationWarning) as record:

from glotaran.deprecation.test.dummy_package.deprecated_module_attribute import (
deprecated_attribute,
)
Expand All @@ -508,7 +507,6 @@ def test_deprecate_module_attribute_overwrite():
"""Qualname was only used for the warning"""

with pytest.warns(GlotaranApiDeprecationWarning) as record:

from glotaran.deprecation.test.dummy_package.deprecated_module_attribute import foo_bar

assert foo_bar.__code__ == parse_version.__code__
Expand Down Expand Up @@ -560,7 +558,6 @@ def test_deprecate_submodule_import_error(recwarn: WarningsRecorder):
"""Raise warning when Attribute of fake module is imported"""

with pytest.raises(ImportError) as excinfo:

from glotaran.deprecation.test.dummy_package.deprecated_module import ( # noqa: F401
does_not_exists,
)
Expand Down
2 changes: 0 additions & 2 deletions glotaran/model/megacomplex.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from glotaran.plugin_system.megacomplex_registration import register_megacomplex

if TYPE_CHECKING:

from glotaran.model import DatasetModel
from glotaran.typing.types import ArrayLike

Expand Down Expand Up @@ -42,7 +41,6 @@ def megacomplex(
"""

def decorator(cls):

megacomplex_type = item(cls)
megacomplex_type.__dataset_model_type__ = dataset_model_type
megacomplex_type.__is_exclusive__ = exclusive
Expand Down
1 change: 0 additions & 1 deletion glotaran/model/test/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,6 @@ def test_model_create_class():


def test_global_items():

m = Model.create_class([])(
**{
"clp_penalties": [
Expand Down
3 changes: 0 additions & 3 deletions glotaran/optimization/data_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ def __init__(self, scheme: Scheme, dataset_group: DatasetGroup):
self._global_dimensions: dict[str, str] = {}

for label, dataset_model in dataset_group.dataset_models.items():

dataset = scheme.data[label]
model_dimension = get_dataset_model_model_dimension(dataset_model)
self._model_axes[label] = dataset.coords[model_dimension].data
Expand Down Expand Up @@ -197,7 +196,6 @@ def add_model_weight(
),
)
for model_weight in model_weights:

idx = {}
if model_weight.global_interval is not None:
idx[global_dimension] = self.get_axis_slice_from_interval(
Expand Down Expand Up @@ -499,7 +497,6 @@ def create_aligned_global_axes(self, scheme: Scheme) -> dict[str, ArrayLike]:
aligned_axis_values = None
aligned_global_axes = {}
for label, global_axis in self._global_axes.items():

aligned_global_axis = global_axis
if aligned_axis_values is None:
aligned_axis_values = aligned_global_axis
Expand Down
1 change: 0 additions & 1 deletion glotaran/optimization/matrix_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,6 @@ def apply_relations(
if relation.target in clp_labels and self.does_interval_item_apply(
relation, index
):

if relation.source not in clp_labels:
continue

Expand Down
1 change: 0 additions & 1 deletion glotaran/optimization/optimization_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from glotaran.utils.regex import RegexPattern

if TYPE_CHECKING:

from glotaran.typing import StrOrPath


Expand Down
1 change: 0 additions & 1 deletion glotaran/optimization/test/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ def calculate_matrix(
model_axis: ArrayLike,
**kwargs,
):

compartments = ["s1", "s2"]
array = np.zeros((model_axis.size, len(compartments)))

Expand Down
2 changes: 1 addition & 1 deletion glotaran/optimization/test/test_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def test_result_data(model_weight: bool, index_dependent: bool):
("fitted_data", ("model", "global")),
]

for (label, dims) in wanted:
for label, dims in wanted:
print("Check label", label)
assert label in result_data
print("Check dims", result_data[label].dims, dims)
Expand Down
1 change: 0 additions & 1 deletion glotaran/parameter/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from glotaran.utils.sanitize import sanitize_parameter_list

if TYPE_CHECKING:

from glotaran.parameter import Parameters

RESERVED_LABELS: list[str] = list(asteval.make_symbol_table().keys()) + ["parameters", "iteration"]
Expand Down
Loading

0 comments on commit bb4ef93

Please sign in to comment.