Skip to content

Commit

Permalink
Added num threads as an argument
Browse files Browse the repository at this point in the history
  • Loading branch information
ancestor-mithril committed Apr 29, 2024
1 parent b3d7fed commit d81721a
Showing 1 changed file with 7 additions and 4 deletions.
11 changes: 7 additions & 4 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
import argparse
import json
import multiprocessing

import torch
from torch.multiprocessing import freeze_support

from utils.trainer import Trainer

if __name__ == '__main__':
freeze_support()

# Namespace(device='cuda:0', lr=0.001, bs=10, epochs=200, dataset='cifar10', scheduler='ReduceLROnPlateau',
# scheduler_params={'mode': 'min', 'factor': 0.5}, cutout=True, autoaug=True, model='preresnet', fill=0.5,
# tta=True, half=True) -> 0.0

parser = argparse.ArgumentParser(description='PyTorch Pipeline')
parser.add_argument('-device', default='cuda:0', type=str, help='device')
parser.add_argument('-lr', default=0.001, type=float, help='learning rate')
Expand All @@ -24,6 +22,7 @@
help='scheduler_params')
parser.add_argument('-model', default='preresnet18_c10', type=str, help='model')
parser.add_argument('-fill', default=None, type=float, help='fill value for transformations')
parser.add_argument('-num_threads', default=None, type=int, help='default number of threads used by pytorch')
parser.add_argument('--cutout', action='store_true', default=False, help='apply cutout')
parser.add_argument('--autoaug', action='store_true', default=False, help='apply autoaugment')
parser.add_argument('--tta', action='store_true', default=False, help='use TTA')
Expand All @@ -32,6 +31,10 @@
args = parser.parse_args()
args.scheduler_params = json.loads(args.scheduler_params.replace('\'', '"'))

if args.num_threads is None:
args.num_threads = multiprocessing.cpu_count() // 2 # use half the available threads
torch.set_num_threads(args.num_threads)

print(args)
Trainer(args).run()

Expand Down

0 comments on commit d81721a

Please sign in to comment.