From 94a995defe223eed0898f25d2332ba6178a92abe Mon Sep 17 00:00:00 2001 From: lufficc Date: Thu, 20 Dec 2018 15:18:10 +0800 Subject: [PATCH] build log_str dynamicly --- ssd/engine/trainer.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/ssd/engine/trainer.py b/ssd/engine/trainer.py index 226c83e7..876cca1f 100644 --- a/ssd/engine/trainer.py +++ b/ssd/engine/trainer.py @@ -86,19 +86,21 @@ def do_train(cfg, model, end = time.time() if iteration % args.log_step == 0: eta_seconds = int((trained_time / iteration) * (max_iter - iteration)) - logger.info( - "Iter: {:06d}, Lr: {:.5f}, Cost: {:.2f}s, Eta: {}, ".format(iteration, optimizer.param_groups[0]['lr'], - time.time() - tic, - str(datetime.timedelta(seconds=eta_seconds))) + - "Loss: {:.3f}, ".format(losses_reduced.item()) + - "Regression Loss {:.3f}, ".format(loss_dict_reduced['regression_loss'].item()) + - "Classification Loss: {:.3f}".format(loss_dict_reduced['classification_loss'].item())) - + log_str = [ + "Iter: {:06d}, Lr: {:.5f}, Cost: {:.2f}s, Eta: {}".format(iteration, + optimizer.param_groups[0]['lr'], + time.time() - tic, str(datetime.timedelta(seconds=eta_seconds))), + "total_loss: {:.3f}".format(losses_reduced.item()) + ] + for loss_name, loss_item in loss_dict_reduced.items(): + log_str.append("{}: {:.3f}".format(loss_name, loss_item.item())) + log_str = ', '.join(log_str) + logger.info(log_str) if summary_writer: global_step = iteration - summary_writer.add_scalar('losses/total_loss', losses_reduced.item(), global_step=global_step) - summary_writer.add_scalar('losses/location_loss', loss_dict_reduced['regression_loss'].item(), global_step=global_step) - summary_writer.add_scalar('losses/class_loss', loss_dict_reduced['classification_loss'].item(), global_step=global_step) + summary_writer.add_scalar('losses/total_loss', losses_reduced, global_step=global_step) + for loss_name, loss_item in loss_dict_reduced.items(): + summary_writer.add_scalar('losses/{}'.format(loss_name), loss_item, global_step=global_step) summary_writer.add_scalar('lr', optimizer.param_groups[0]['lr'], global_step=global_step) tic = time.time()