Skip to content

Commit

Permalink
Bump mypy from 0.991 to 1.0.0 in /requirements (microsoft#1089)
Browse files Browse the repository at this point in the history
* Bump mypy from 0.991 to 1.0.0 in /requirements

Bumps [mypy](https://github.com/python/mypy) from 0.991 to 1.0.0.
- [Release notes](https://github.com/python/mypy/releases)
- [Commits](python/mypy@v0.991...v1.0.0)

---
updated-dependencies:
- dependency-name: mypy
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <[email protected]>

* Bump upper bound

* mypy fixes

* Ignore undocumented base class

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Adam J. Stewart <[email protected]>
  • Loading branch information
dependabot[bot] and adamjstewart authored Feb 6, 2023
1 parent 002fdac commit 76e9442
Show file tree
Hide file tree
Showing 15 changed files with 34 additions and 31 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ repos:
additional_dependencies: ["toml"]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.991
rev: v1.0.0
hooks:
- id: mypy
args: [--strict, --ignore-missing-imports, --show-error-codes]
Expand Down
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
nitpick_ignore = [
# Undocumented classes
("py:class", "kornia.augmentation._2d.intensity.base.IntensityAugmentationBase2D"),
("py:class", "kornia.augmentation.base._AugmentationBase"),
("py:class", "segmentation_models_pytorch.base.model.SegmentationModel"),
("py:class", "timm.models.resnet.ResNet"),
("py:class", "timm.models.vision_transformer.VisionTransformer"),
Expand Down
3 changes: 1 addition & 2 deletions evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def run_eval_loop(
}
for i in range(len(batch["image"]))
]
with torch.inference_mode():
with torch.inference_mode(): # type: ignore[no-untyped-call]
y_pred = model(x)
metrics(y_pred, y)
results = metrics.compute()
Expand All @@ -143,7 +143,6 @@ def main(args: argparse.Namespace) -> None:
# Loads the saved model from checkpoint based on the `args.task` name that was
# passed as input
model = TASK.load_from_checkpoint(args.input_checkpoint)
model = cast(pl.LightningModule, model)
model.freeze()
model.eval()

Expand Down
2 changes: 1 addition & 1 deletion requirements/tests.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# tests
mypy==0.991
mypy==1.0.0
nbmake==1.3.5
pytest==7.2.1
pytest-cov==4.0.0
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ style =
pyupgrade>=2.4,<4
tests =
# mypy 0.900+ required for pyproject.toml support
mypy>=0.900,<0.992
mypy>=0.900,<2
# nbmake 0.1+ required to fix path_source bug
nbmake>=0.1,<2
# pytest 6.1.2+ required by nbmake
Expand Down
12 changes: 6 additions & 6 deletions tests/models/test_changestar.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@


class TestChangeStar:
@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
def test_changestar_farseg_classes(self) -> None:
model = ChangeStarFarSeg(
classes=4, backbone="resnet50", backbone_pretrained=False
Expand All @@ -27,7 +27,7 @@ def test_changestar_farseg_classes(self) -> None:

assert y["bi_seg_logit"].shape[2] == 4

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
def test_changestar_farseg_output_size(self) -> None:
model = ChangeStarFarSeg(
classes=4, backbone="resnet50", backbone_pretrained=False
Expand Down Expand Up @@ -55,7 +55,7 @@ def test_invalid_changestar_farseg_backbone(self) -> None:
with pytest.raises(ValueError, match=match):
ChangeStarFarSeg(classes=4, backbone="anynet", backbone_pretrained=False)

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize(
"inc,innerc,nc,sf", list(itertools.product(IN_CHANNELS, INNNR_CHANNELS, NC, SF))
)
Expand All @@ -70,7 +70,7 @@ def test_changemixin_output_size(
assert y[0].shape == y[1].shape
assert y[0].shape == (3, 1, 32 * sf, 32 * sf)

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
def test_changestar(self) -> None:
dense_feature_extractor = nn.modules.Sequential(
nn.modules.Conv2d(3, 32, 3, 1, 1),
Expand All @@ -97,7 +97,7 @@ def test_changestar(self) -> None:
assert y["bi_seg_logit"].shape == (3, 2, 2, 64, 64)
assert y["change_prob"].shape == (3, 1, 64, 64)

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
def test_changestar_invalid_inference_mode(self) -> None:
dense_feature_extractor = nn.modules.Sequential(
nn.modules.Conv2d(3, 32, 3, 1, 1),
Expand All @@ -122,7 +122,7 @@ def test_changestar_invalid_inference_mode(self) -> None:
inference_mode="random",
)

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize("inference_mode", ["t1t2", "t2t1", "mean"])
def test_changestar_inference_output_size(self, inference_mode: str) -> None:
dense_feature_extractor = nn.modules.Sequential(
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_farseg.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


class TestFarSeg:
@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize(
"backbone,pretrained",
[
Expand Down
8 changes: 4 additions & 4 deletions tests/models/test_fcsiam.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@


class TestFCSiamConc:
@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize("b, c", list(itertools.product(BATCH_SIZE, CHANNELS)))
def test_in_channels(self, b: int, c: int) -> None:
classes = 2
Expand All @@ -24,7 +24,7 @@ def test_in_channels(self, b: int, c: int) -> None:
y = model(x)
assert y.shape == (b, classes, h, w)

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize("b, classes", list(itertools.product(BATCH_SIZE, CLASSES)))
def test_classes(self, b: int, classes: int) -> None:
t, c, h, w = 2, 3, 64, 64
Expand All @@ -35,7 +35,7 @@ def test_classes(self, b: int, classes: int) -> None:


class TestFCSiamDiff:
@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize("b, c", list(itertools.product(BATCH_SIZE, CHANNELS)))
def test_in_channels(self, b: int, c: int) -> None:
classes = 2
Expand All @@ -45,7 +45,7 @@ def test_in_channels(self, b: int, c: int) -> None:
y = model(x)
assert y.shape == (b, classes, h, w)

@torch.no_grad() # type: ignore[misc]
@torch.no_grad()
@pytest.mark.parametrize("b, classes", list(itertools.product(BATCH_SIZE, CLASSES)))
def test_classes(self, b: int, classes: int) -> None:
t, c, h, w = 2, 3, 64, 64
Expand Down
2 changes: 1 addition & 1 deletion torchgeo/datasets/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def __enter__(self) -> Any:
except ImportError:
# Only supports normal zip files
# https://github.com/python/mypy/issues/1153
import zipfile # type: ignore[no-redef]
import zipfile

return zipfile.ZipFile(*self.args, **self.kwargs)

Expand Down
2 changes: 1 addition & 1 deletion torchgeo/trainers/byol.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__()

# Creates `self.hparams` from kwargs
self.save_hyperparameters() # type: ignore[operator]
self.save_hyperparameters()
self.hyperparams = cast(Dict[str, Any], self.hparams)

self.config_task()
Expand Down
2 changes: 1 addition & 1 deletion torchgeo/trainers/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__()

# Creates `self.hparams` from kwargs
self.save_hyperparameters() # type: ignore[operator]
self.save_hyperparameters()
self.hyperparams = cast(Dict[str, Any], self.hparams)

self.config_task()
Expand Down
2 changes: 1 addition & 1 deletion torchgeo/trainers/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def __init__(self, **kwargs: Any) -> None:
"""
super().__init__()
# Creates `self.hparams` from kwargs
self.save_hyperparameters() # type: ignore[operator]
self.save_hyperparameters()
self.hyperparams = cast(Dict[str, Any], self.hparams)

self.config_task()
Expand Down
2 changes: 1 addition & 1 deletion torchgeo/trainers/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__()

# Creates `self.hparams` from kwargs
self.save_hyperparameters() # type: ignore[operator]
self.save_hyperparameters()
self.hyperparams = cast(Dict[str, Any], self.hparams)
self.config_task()

Expand Down
2 changes: 1 addition & 1 deletion torchgeo/trainers/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__()

# Creates `self.hparams` from kwargs
self.save_hyperparameters() # type: ignore[operator]
self.save_hyperparameters()
self.hyperparams = cast(Dict[str, Any], self.hparams)

if not isinstance(kwargs["ignore_index"], (int, type(None))):
Expand Down
21 changes: 12 additions & 9 deletions torchgeo/transforms/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,10 @@

from typing import Any, Dict, List, Optional, Tuple, Union

import kornia
import kornia.augmentation as K
import torch
from einops import rearrange
from kornia.augmentation import GeometricAugmentationBase2D
from kornia.augmentation.random_generator import CropGenerator
from kornia.constants import DataKey
from kornia.geometry import crop_by_indices
from torch import Tensor
from torch.nn.modules import Module
Expand All @@ -23,17 +22,21 @@ class AugmentationSequential(Module):
Use :class:`kornia.augmentation.container.AugmentationSequential` instead.
"""

def __init__(self, *args: Module, data_keys: List[str]) -> None:
def __init__(
self,
*args: Union[K.base._AugmentationBase, K.ImageSequential],
data_keys: List[str],
) -> None:
"""Initialize a new augmentation sequential instance.
Args:
*args: Sequence of kornia augmentations
data_keys: List of inputs to augment (e.g. ["image", "mask", "boxes"])
data_keys: List of inputs to augment (e.g., ["image", "mask", "boxes"])
"""
super().__init__()
self.data_keys = data_keys

keys = []
keys: List[Union[str, int, DataKey]] = []
for key in data_keys:
if key == "image":
keys.append("input")
Expand All @@ -42,7 +45,7 @@ def __init__(self, *args: Module, data_keys: List[str]) -> None:
else:
keys.append(key)

self.augs = kornia.augmentation.AugmentationSequential(*args, data_keys=keys)
self.augs = K.AugmentationSequential(*args, data_keys=keys)

def forward(self, batch: Dict[str, Tensor]) -> Dict[str, Tensor]:
"""Perform augmentations and update data dict.
Expand Down Expand Up @@ -84,7 +87,7 @@ def forward(self, batch: Dict[str, Tensor]) -> Dict[str, Tensor]:
return batch


class _RandomNCrop(GeometricAugmentationBase2D):
class _RandomNCrop(K.GeometricAugmentationBase2D):
"""Take N random crops of a tensor."""

def __init__(self, size: Tuple[int, int], num: int) -> None:
Expand Down Expand Up @@ -138,7 +141,7 @@ def apply_transform(
return torch.cat(out)


class _NCropGenerator(CropGenerator):
class _NCropGenerator(K.random_generator.CropGenerator):
"""Generate N random crops."""

def __init__(self, size: Union[Tuple[int, int], Tensor], num: int) -> None:
Expand Down

0 comments on commit 76e9442

Please sign in to comment.