From 93b8d4a7afd9d525069d9065bab5aebc97063a1d Mon Sep 17 00:00:00 2001 From: Xerxes-2 Date: Fri, 12 May 2023 21:11:34 +1000 Subject: [PATCH] add timestamp and epoch elapsed time --- train_nsf_sim_cache_sid_load_pretrain.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/train_nsf_sim_cache_sid_load_pretrain.py b/train_nsf_sim_cache_sid_load_pretrain.py index 4ba6b65..b4a6565 100644 --- a/train_nsf_sim_cache_sid_load_pretrain.py +++ b/train_nsf_sim_cache_sid_load_pretrain.py @@ -41,6 +41,18 @@ from mel_processing import mel_spectrogram_torch, spec_to_mel_torch global_step = 0 +import datetime +class EpochRecorder: + def __init__(self): + self.last_time = ttime() + + + def record(self): + now_time = ttime() + elapsed_time = now_time - self.last_time + hr_and_min: str = str(datetime.timedelta(seconds=elapsed_time)) + current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + return f"[{current_time}] | ({hr_and_min})" def main(): # n_gpus = torch.cuda.device_count() @@ -314,6 +326,7 @@ def train_and_evaluate( data_iterator = enumerate(train_loader) # Run steps + epoch_recorder = EpochRecorder() for batch_idx, info in data_iterator: # Data ## Unpack @@ -513,7 +526,7 @@ def train_and_evaluate( ) if rank == 0: - logger.info("====> Epoch: {}".format(epoch)) + logger.info("====> Epoch: {} {}".format(epoch, epoch_recorder.record())) if epoch >= hps.total_epoch and rank == 0: logger.info("Training is done. The program is closed.") from process_ckpt import savee # def savee(ckpt,sr,if_f0,name,epoch):