From 541837eb0f6196e791ad540d54dab8fb56775015 Mon Sep 17 00:00:00 2001 From: liujing04 <129054828+liujing04@users.noreply.github.com> Date: Sun, 9 Apr 2023 15:24:13 +0000 Subject: [PATCH] Update utils.py --- train/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/train/utils.py b/train/utils.py index 746adfa..de6ceef 100644 --- a/train/utils.py +++ b/train/utils.py @@ -121,7 +121,7 @@ def load_checkpoint(checkpoint_path, model, optimizer=None,load_opt=1): def save_checkpoint(model, optimizer, learning_rate, iteration, checkpoint_path): - logger.info("Saving model and optimizer state at iteration {} to {}".format( + logger.info("Saving model and optimizer state at epoch {} to {}".format( iteration, checkpoint_path)) if hasattr(model, 'module'): state_dict = model.module.state_dict() @@ -132,7 +132,7 @@ def save_checkpoint(model, optimizer, learning_rate, iteration, checkpoint_path) 'optimizer': optimizer.state_dict(), 'learning_rate': learning_rate}, checkpoint_path) def save_checkpoint_d(combd, sbd, optimizer, learning_rate, iteration, checkpoint_path): - logger.info("Saving model and optimizer state at iteration {} to {}".format( + logger.info("Saving model and optimizer state at epoch {} to {}".format( iteration, checkpoint_path)) if hasattr(combd, 'module'): state_dict_combd = combd.module.state_dict() else:state_dict_combd = combd.state_dict()