Skip to content

Commit

Permalink
Reorganize configuration files (#352)
Browse files Browse the repository at this point in the history
* Reorganize configuration files

* Undo changes to tests/conf files
  • Loading branch information
adamjstewart authored Jan 8, 2022
1 parent f69bdaf commit 5136d81
Show file tree
Hide file tree
Showing 43 changed files with 166 additions and 96 deletions.
12 changes: 7 additions & 5 deletions conf/bigearthnet.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
trainer:
gpus: 1 # single GPU training
gpus: 1
min_epochs: 10
max_epochs: 40
benchmark: True

experiment:
task: "bigearthnet"
module:
loss: "bce"
classification_model: "resnet18"
learning_rate: 1e-3
learning_rate_schedule_patience: 6
weights: "random"
in_channels: 14
datamodule:
num_classes: 19
batch_size: 128
num_workers: 6
datamodule:
root_dir: "data/bigearthnet"
bands: "all"
num_classes: ${experiment.module.num_classes}
batch_size: 128
num_workers: 4
8 changes: 5 additions & 3 deletions conf/byol.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ trainer:
min_epochs: 20
max_epochs: 100
benchmark: True

experiment:
task: "ssl"
name: "test_byol"
Expand All @@ -12,12 +11,15 @@ experiment:
encoder: "resnet18"
input_channels: 4
imagenet_pretraining: True
learning_rate: 1e-3
learning_rate_schedule_patience: 6
datamodule:
batch_size: 64
num_workers: 6
root_dir: "data/chesapeake/cvpr"
train_splits:
- "de-train"
val_splits:
- "de-val"
test_splits:
- "de-test"
batch_size: 64
num_workers: 4
20 changes: 13 additions & 7 deletions conf/chesapeake_cvpr.yaml
Original file line number Diff line number Diff line change
@@ -1,29 +1,35 @@
trainer:
gpus: 1 # single GPU training
gpus: 1
min_epochs: 20
max_epochs: 100
benchmark: True

experiment:
task: "chesapeake_cvpr"
name: "chesapeake_cvpr_example"
module:
loss: "ce" # cross entropy loss
loss: "ce"
segmentation_model: "unet"
encoder_name: "resnet18"
encoder_weights: null # use ImageNet weight initialization
encoder_weights: null
encoder_output_stride: 16
learning_rate: 1e-2
learning_rate: 1e-3
learning_rate_schedule_patience: 6
in_channels: 4
num_classes: 7
num_filters: 256
ignore_zeros: False
imagenet_pretraining: True
datamodule:
batch_size: 64
num_workers: 6
root_dir: "data/chesapeake/cvpr"
train_splits:
- "de-train"
val_splits:
- "de-val"
test_splits:
- "de-test"
patches_per_tile: 200
patch_size: 256
batch_size: 64
num_workers: 4
class_set: ${experiment.module.num_classes}
use_prior_labels: False
14 changes: 6 additions & 8 deletions conf/cowc_counting.yaml
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
program: # These are the arguments that define how the train.py script works
seed: 1337
overwrite: True

trainer:
gpus: 1
min_epochs: 15
experiment:
task: cowc_counting
name: cowc_counting_test
module:
model: resnet18
learning_rate: 1e-3
learning_rate_schedule_patience: 2
pretrained: True
datamodule:
root_dir: "data/cowc_counting"
seed: 0
batch_size: 32
num_workers: 4

trainer:
min_epochs: 15
gpus: 1
16 changes: 7 additions & 9 deletions conf/cyclone.yaml
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
program: # These are the arguments that define how the train.py script works
seed: 1337
overwrite: True

trainer:
gpus: 1
min_epochs: 15
experiment:
task: "cyclone"
name: cyclone_test
name: "cyclone_test"
module:
model: "resnet18"
learning_rate: 1e-3
learning_rate_schedule_patience: 2
pretrained: True
datamodule:
root_dir: "data/cyclone"
seed: 0
batch_size: 32
num_workers: 4

trainer:
min_epochs: 15
gpus: 1
16 changes: 16 additions & 0 deletions conf/etci2021.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
experiment:
task: "etci2021"
module:
loss: "ce"
segmentation_model: "unet"
encoder_name: "resnet18"
encoder_weights: "imagenet"
learning_rate: 1e-3
learning_rate_schedule_patience: 6
in_channels: 7
num_classes: 2
ignore_zeros: True
datamodule:
root_dir: "data/etci2021"
batch_size: 32
num_workers: 4
14 changes: 14 additions & 0 deletions conf/eurosat.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
experiment:
task: "eurosat"
module:
loss: "ce"
classification_model: "resnet18"
learning_rate: 1e-3
learning_rate_schedule_patience: 6
weights: "random"
in_channels: 13
num_classes: 10
datamodule:
root_dir: "data/eurosat"
batch_size: 128
num_workers: 4
12 changes: 6 additions & 6 deletions conf/landcoverai.yaml
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
trainer:
gpus: 1 # single GPU training
gpus: 1
min_epochs: 20
max_epochs: 100
benchmark: True

experiment:
task: "landcoverai"
module:
loss: "ce"
segmentation_model: "deeplabv3+"
segmentation_model: "unet"
encoder_name: "resnet18"
encoder_weights: null
encoder_output_stride: 16
encoder_weights: "imagenet"
learning_rate: 1e-3
learning_rate_schedule_patience: 6
in_channels: 3
num_classes: 6
num_filters: 256
ignore_zeros: False
datamodule:
root_dir: "data/landcoverai"
batch_size: 32
num_workers: 6
num_workers: 4
34 changes: 23 additions & 11 deletions conf/naipchesapeake.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,25 @@
program: # These are experiment level arguments
experiment_name: naip_chesapeake_test
program:
experiment_name: "naip_chesapeake_test"
overwrite: True
naip_data_dir: data/naip
chesapeake_data_dir: data/chesapeake

trainer: # These are all the arguments that will be passed to the pl.Trainer
trainer:
min_epochs: 15

task: # These are all the arguments that will be used to create an appropriate task
name: naipchesapeake
learning_rate: 1e-3
learning_rate_schedule_patience: 2
experiment:
task: "naipchesapeake"
module:
loss: "ce"
segmentation_model: "deeplabv3+"
encoder_name: "resnet34"
encoder_weights: "imagenet"
encoder_output_stride: 16
learning_rate: 1e-3
learning_rate_schedule_patience: 2
in_channels: 4
num_classes: 13
num_filters: 64
ignore_zeros: False
datamodule:
naip_root_dir: "data/naip"
chesapeake_root_dir: "data/chesapeake/BAYWIDE"
batch_size: 32
num_workers: 4
patch_size: 32
11 changes: 7 additions & 4 deletions conf/oscd.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ trainer:
min_epochs: 20
max_epochs: 500
benchmark: True

experiment:
task: "oscd"
module:
Expand All @@ -16,11 +15,15 @@ experiment:
verbose: false
in_channels: 26
num_classes: 2
num_filters: 128
num_filters: 256
ignore_zeros: True
datamodule:
train_batch_size: 2
num_workers: 6
root_dir: "data/oscd"
batch_size: 32
num_workers: 4
val_split_pct: 0.1
bands: "all"
pad_size:
- 1028
- 1028
num_patches_per_tile: 128
8 changes: 5 additions & 3 deletions conf/resisc45.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
trainer:
gpus: 1 # single GPU training
gpus: 1
min_epochs: 10
max_epochs: 40
benchmark: True

experiment:
task: "resisc45"
module:
Expand All @@ -12,6 +11,9 @@ experiment:
learning_rate: 1e-3
learning_rate_schedule_patience: 6
weights: "random"
in_channels: 3
num_classes: 45
datamodule:
root_dir: "data/resisc45"
batch_size: 128
num_workers: 6
num_workers: 4
29 changes: 21 additions & 8 deletions conf/sen12ms.yaml
Original file line number Diff line number Diff line change
@@ -1,11 +1,24 @@
program: # These are experiment level arguments
program:
experiment_name: sen12ms_test
overwrite: True

trainer: # These are all the arguments that will be passed to the pl.Trainer
trainer:
min_epochs: 15

task: # These are all the arguments that will be used to create an appropriate task
name: sen12ms
learning_rate: 1e-3
learning_rate_schedule_patience: 2
experiment:
task: "sen12ms"
module:
loss: "ce"
segmentation_model: "unet"
encoder_name: "resnet18"
encoder_weights: null
encoder_output_stride: 16
learning_rate: 1e-3
learning_rate_schedule_patience: 2
in_channels: 15
num_classes: 11
ignore_zeros: False
datamodule:
root_dir: "data/sen12ms"
band_set: "all"
batch_size: 32
num_workers: 4
seed: 0
9 changes: 6 additions & 3 deletions conf/so2sat.yaml
Original file line number Diff line number Diff line change
@@ -1,18 +1,21 @@
trainer:
gpus: 1 # single GPU training
gpus: 1
min_epochs: 10
max_epochs: 40
benchmark: True

experiment:
task: "so2sat"
module:
loss: "ce"
classification_model: "resnet18"
learning_rate: 1e-3
learning_rate_schedule_patience: 6
weights: "random"
in_channels: 3
num_classes: 17
datamodule:
root_dir: "data/so2sat"
batch_size: 128
num_workers: 6
num_workers: 4
bands: "rgb"
unsupervised_mode: False
14 changes: 14 additions & 0 deletions conf/ucmerced.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
experiment:
task: "ucmerced"
module:
loss: "ce"
classification_model: "resnet18"
weights: null
learning_rate: 1e-3
learning_rate_schedule_patience: 6
in_channels: 3
num_classes: 21
datamodule:
root_dir: "data/ucmerced"
batch_size: 128
num_workers: 4
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
4 changes: 1 addition & 3 deletions tests/datamodules/test_chesapeake.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@
class TestChesapeakeCVPRDataModule:
@pytest.fixture(scope="class")
def datamodule(self) -> ChesapeakeCVPRDataModule:
conf = OmegaConf.load(
os.path.join("conf", "task_defaults", "chesapeake_cvpr_5.yaml")
)
conf = OmegaConf.load(os.path.join("tests", "conf", "chesapeake_cvpr_5.yaml"))
kwargs = OmegaConf.to_object(conf.experiment.datamodule)
kwargs = cast(Dict[str, Any], kwargs)

Expand Down
2 changes: 1 addition & 1 deletion tests/trainers/test_byol.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class TestBYOLTask:
],
)
def test_trainer(self, name: str, classname: Type[LightningDataModule]) -> None:
conf = OmegaConf.load(os.path.join("conf", "task_defaults", name + ".yaml"))
conf = OmegaConf.load(os.path.join("tests", "conf", name + ".yaml"))
conf_dict = OmegaConf.to_object(conf.experiment)
conf_dict = cast(Dict[Any, Dict[Any, Any]], conf_dict)

Expand Down
Loading

0 comments on commit 5136d81

Please sign in to comment.