import torch import ignite.distributed as idist from omegaconf import OmegaConf from model import MODEL import torch.optim as optim def build_model(cfg): cfg = OmegaConf.to_container(cfg) bn_to_sync_bn = cfg.pop("_bn_to_sync_bn", False) model = MODEL.build_with(cfg) if bn_to_sync_bn: model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) return idist.auto_model(model) def build_optimizer(params, cfg): assert "_type" in cfg cfg = OmegaConf.to_container(cfg) optimizer = getattr(optim, cfg.pop("_type"))(params=params, **cfg) return idist.auto_optim(optimizer)