From e49bebf5c816823835339de2f27cd4d58de3e66b Mon Sep 17 00:00:00 2001 From: Romeo Kienzler <5694071+romeokienzler@users.noreply.github.com> Date: Fri, 11 Apr 2025 12:44:36 +0000 Subject: [PATCH] default detect_anomaly to True default detect_anomaly to True as NaN or Inf losses usually don't make any sense --- src/lightning/pytorch/trainer/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lightning/pytorch/trainer/trainer.py b/src/lightning/pytorch/trainer/trainer.py index 8e4e2de97fd6a..1551914c4429d 100644 --- a/src/lightning/pytorch/trainer/trainer.py +++ b/src/lightning/pytorch/trainer/trainer.py @@ -124,7 +124,7 @@ def __init__( inference_mode: bool = True, use_distributed_sampler: bool = True, profiler: Optional[Union[Profiler, str]] = None, - detect_anomaly: bool = False, + detect_anomaly: bool = True, barebones: bool = False, plugins: Optional[Union[_PLUGIN_INPUT, list[_PLUGIN_INPUT]]] = None, sync_batchnorm: bool = False,