Skip to content

Commit

Permalink
Remove advisory
Browse files Browse the repository at this point in the history
  • Loading branch information
joyce-chen-uni authored Aug 21, 2024
1 parent 4b732fa commit 88a5417
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions llmfoundry/callbacks/kill_loss_spike_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def batch_end(self, state: State, logger: Logger) -> None:
# This will be replaced with the hard error LossSpikeError.
for destination in logger.destinations:
if isinstance(destination, MosaicMLLogger):
destination.log_metadata({'loss_spike': f'Training loss spike detected for {self.outlier_counter} consecutive steps. Try lowering the learning rate.'})
destination.log_metadata({'loss_spike': f'Training loss spike detected for {self.outlier_counter} consecutive steps.'})
# raise LossSpikeError(self.outlier_multiplier, round(running_loss_avg), self.outlier_counter)

# Previous step loss was an outlier, current step loss is not. Reset outlier counter.
Expand All @@ -58,7 +58,7 @@ def batch_end(self, state: State, logger: Logger) -> None:
log.info(f'High losses >{self.loss_cap} detected.')
for destination in logger.destinations:
if isinstance(destination, MosaicMLLogger):
destination.log_metadata({'high_loss': f'Persistently high (>{self.loss_cap}) training losses detected. Try lowering the learning rate.'})
destination.log_metadata({'high_loss': f'Persistently high (>{self.loss_cap}) training losses detected.'})
# raise LossSpikeError()

else:
Expand Down

0 comments on commit 88a5417

Please sign in to comment.