Skip to content

Commit

Permalink
run format.sh
Browse files Browse the repository at this point in the history
  • Loading branch information
DavdGao committed May 25, 2022
1 parent df9ab25 commit 7631c25
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 13 deletions.
3 changes: 2 additions & 1 deletion federatedscope/core/configs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ class CN(CfgNode):
"""
def __init__(self, init_dict=None, key_list=None, new_allowed=False):
super().__init__(init_dict, key_list, new_allowed)
self.__dict__["cfg_check_funcs"] = list() # to check the config values validity
self.__dict__["cfg_check_funcs"] = list(
) # to check the config values validity

def __getattr__(self, name):
if name in self:
Expand Down
5 changes: 1 addition & 4 deletions federatedscope/core/trainers/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,7 @@ def setup_vars(self):
self.criterion = get_criterion(self.cfg.criterion.type,
self.device)
self.regularizer = get_regularizer(self.cfg.regularizer.type)
self.optimizer = get_optimizer(
self.model,
**self.cfg.optimizer
)
self.optimizer = get_optimizer(self.model, **self.cfg.optimizer)
self.grad_clip = self.cfg.grad.grad_clip
elif self.cfg.backend == 'tensorflow':
self.trainable_para_names = self.model.trainable_variables()
Expand Down
10 changes: 4 additions & 6 deletions federatedscope/core/trainers/trainer_Ditto.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,10 @@ def init_Ditto_ctx(base_trainer):
ctx.global_model = copy.deepcopy(ctx.model)
ctx.local_model = copy.deepcopy(ctx.model) # the personalized model

ctx.optimizer_for_global_model = get_optimizer(
ctx.global_model,
**cfg.optimizer)
ctx.optimizer_for_local_model = get_optimizer(
ctx.local_model,
**cfg.optimizer)
ctx.optimizer_for_global_model = get_optimizer(ctx.global_model,
**cfg.optimizer)
ctx.optimizer_for_local_model = get_optimizer(ctx.local_model,
**cfg.optimizer)
ctx.optimizer_for_local_model = wrap_regularized_optimizer(
ctx.optimizer_for_local_model, cfg.personalization.regular_weight)

Expand Down
3 changes: 1 addition & 2 deletions federatedscope/core/trainers/trainer_multi_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,7 @@ def init_multiple_models(self):
self.ctx.models = [self.ctx.model] + additional_models

additional_optimizers = [
get_optimizer(self.ctx.models[i],
**self.cfg.optimizer)
get_optimizer(self.ctx.models[i], **self.cfg.optimizer)
for i in range(1, self.model_nums)
]
self.ctx.optimizers = [self.ctx.optimizer] + additional_optimizers
Expand Down

0 comments on commit 7631c25

Please sign in to comment.