Skip to content
This repository was archived by the owner on Jun 3, 2025. It is now read-only.

Commit 9902945

Browse files
bfineranKSGulin
andauthored
Allow modifier loggers to be overwritten by subsequent calls (#896) (#899)
Co-authored-by: Konstantin Gulin <[email protected]>
1 parent f6251f3 commit 9902945

File tree

1 file changed

+2
-5
lines changed

1 file changed

+2
-5
lines changed

src/sparseml/pytorch/sparsification/modifier.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -234,9 +234,6 @@ def initialize_loggers(self, loggers: Union[None, LoggerManager, List[BaseLogger
234234
:param loggers: the logger maanger to setup this modifier with for logging
235235
important info and milestones to
236236
"""
237-
if self._loggers_initialized and self._loggers:
238-
return
239-
240237
loggers = loggers or []
241238
if isinstance(loggers, List):
242239
loggers = LoggerManager(loggers)
@@ -709,7 +706,7 @@ def log_string(
709706
level = level or LOGGING_LEVELS["debug"]
710707
step = (
711708
loggers.epoch_to_step(epoch, steps_per_epoch)
712-
if (epoch and steps_per_epoch)
709+
if (epoch is not None) and (steps_per_epoch is not None)
713710
else None
714711
)
715712
loggers.log_string(tag=tag, string=string, step=step, level=level)
@@ -727,7 +724,7 @@ def log_scalar(
727724
loggers = loggers or self.loggers
728725
step = (
729726
loggers.epoch_to_step(epoch, steps_per_epoch)
730-
if (epoch and steps_per_epoch)
727+
if (epoch is not None) and (steps_per_epoch is not None)
731728
else None
732729
)
733730
loggers.log_scalar(tag=tag, value=value, step=step, level=level)

0 commit comments

Comments
 (0)