From f70658eaed19a2e93b91215b3f50404d409ffa7d Mon Sep 17 00:00:00 2001 From: Ray Wong Date: Fri, 11 Sep 2020 23:04:26 +0800 Subject: [PATCH] small fix --- engine/util/handler.py | 2 +- main.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/engine/util/handler.py b/engine/util/handler.py index b2334cc..1bc0c44 100644 --- a/engine/util/handler.py +++ b/engine/util/handler.py @@ -85,7 +85,7 @@ def setup_common_handlers(trainer: Engine, config, stop_on_nan=True, clear_cuda_ if not checkpoint_path.exists(): raise FileNotFoundError(f"Checkpoint '{checkpoint_path}' is not found") ckp = torch.load(checkpoint_path.as_posix(), map_location="cpu") - trainer.logger.info(f"load state_dict for {ckp.keys()}") + trainer.logger.info(f"load state_dict for {to_save.keys()}") Checkpoint.load_objects(to_load=to_save, checkpoint=ckp) engine.logger.info(f"resume from a checkpoint {checkpoint_path}") trainer.add_event_handler( diff --git a/main.py b/main.py index c4a010e..e3f3c40 100644 --- a/main.py +++ b/main.py @@ -32,8 +32,7 @@ def running(local_rank, config, task, backup_config=False, setup_output_dir=Fals if setup_output_dir and config.resume_from is None: if output_dir.exists(): - assert len(list(output_dir.glob("events*"))) == 0 - assert len(list(output_dir.glob("*.pt"))) == 0 + assert len(list(output_dir.glob("events*"))) == 0, f"{output_dir} containers tensorboard event" if (output_dir / "train.log").exists() and idist.get_rank() == 0: (output_dir / "train.log").unlink() else: