From 6565dee05136af08bd19b81da2fb0cd59b4c6a78 Mon Sep 17 00:00:00 2001 From: mertyg Date: Sun, 7 Jul 2024 11:00:21 -0700 Subject: [PATCH] remove redundant logging --- textgrad/optimizer/optimizer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/textgrad/optimizer/optimizer.py b/textgrad/optimizer/optimizer.py index 5ebec04..971fc4c 100644 --- a/textgrad/optimizer/optimizer.py +++ b/textgrad/optimizer/optimizer.py @@ -276,5 +276,4 @@ def step(self): logger.error(f"TextualGradientDescent optimizer response could not be indexed", extra={"optimizer.response": new_text}) raise IndexError(f"TextualGradientDescent optimizer response could not be indexed. This can happen if the optimizer model cannot follow the instructions. You can try using a stronger model, or somehow reducing the context of the optimization. Response: {new_text}") parameter.set_value(new_value) - logger.info(f"TextualGradientDescent updated text", extra={"parameter.value": parameter.value}) logger.info(f"TextualGradientDescentwithMomentum updated text", extra={"parameter.value": parameter.value})