Skip to content

Commit 78d52b5

Browse files
author
SeanNaren
committed
Disable loss scaler for now
1 parent efa81ab commit 78d52b5

File tree

1 file changed

+0
-5
lines changed

1 file changed

+0
-5
lines changed

pytorch_lightning/plugins/precision/fully_sharded_native_amp.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,17 +20,12 @@
2020
from pytorch_lightning.utilities import _FAIRSCALE_FULLY_SHARDED_AVAILABLE, GradClipAlgorithmType
2121

2222
if _FAIRSCALE_FULLY_SHARDED_AVAILABLE:
23-
from fairscale.experimental.optim import DynamicLossScaler
2423
from fairscale.nn.data_parallel import FullyShardedDataParallel
2524

2625

2726
class FullyShardedNativeMixedPrecisionPlugin(ShardedNativeMixedPrecisionPlugin):
2827
"""Mixed Precision for Full Sharded Training"""
2928

30-
def __init__(self) -> None:
31-
super().__init__()
32-
self.scaler = DynamicLossScaler()
33-
3429
def clip_gradients(
3530
self,
3631
optimizer: 'Optimizer',

0 commit comments

Comments
 (0)