From 5fb7fc9fee6cf41b24007aca06f0cb733ae7f0fc Mon Sep 17 00:00:00 2001 From: Dusan Varis Date: Wed, 1 Aug 2018 17:24:05 +0200 Subject: [PATCH] generic_trainer always adds l2 values to summaries --- neuralmonkey/trainers/generic_trainer.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/neuralmonkey/trainers/generic_trainer.py b/neuralmonkey/trainers/generic_trainer.py index d69dc1b5b..474e84f62 100644 --- a/neuralmonkey/trainers/generic_trainer.py +++ b/neuralmonkey/trainers/generic_trainer.py @@ -7,7 +7,8 @@ from neuralmonkey.model.model_part import ModelPart from neuralmonkey.runners.base_runner import ( Executable, ExecutionResult, NextExecute) -from neuralmonkey.trainers.regularizers import Regularizer +from neuralmonkey.trainers.regularizers import ( + Regularizer, L2Regularizer) # pylint: disable=invalid-name Gradients = List[Tuple[tf.Tensor, tf.Variable]] @@ -99,6 +100,9 @@ def __init__(self, # unweighted losses for fetching self.losses = [o.loss for o in objectives] + reg_values + # we always want to include l2 values in the summary + if L2Regularizer not in [type(r) for r in self.regularizers]: + reg_values.append(L2Regularizer().value(regularizable)) for reg, reg_value in zip(self.regularizers, reg_values): tf.summary.scalar(reg.name, reg_value, collections=["summary_train"])