Skip to content

Commit 2d582e7

Browse files
committed
Remove extra references to adamw_hf
1 parent 6ea50d5 commit 2d582e7

File tree

3 files changed

+3
-14
lines changed

3 files changed

+3
-14
lines changed

src/transformers/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1420,7 +1420,7 @@ def optimizer_hook(param):
14201420
if args.optim == OptimizerNames.ADAFACTOR:
14211421
optimizer_cls = Adafactor
14221422
optimizer_kwargs.update({"scale_parameter": False, "relative_step": False})
1423-
elif args.optim in [OptimizerNames.ADAMW_TORCH, OptimizerNames.ADAMW_TORCH_FUSED, OptimizerNames.ADAMW_HF]:
1423+
elif args.optim in [OptimizerNames.ADAMW_TORCH, OptimizerNames.ADAMW_TORCH_FUSED]:
14241424
from torch.optim import AdamW
14251425

14261426
optimizer_cls = AdamW

src/transformers/training_args.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,6 @@ class OptimizerNames(ExplicitEnum):
145145
Stores the acceptable string identifiers for optimizers.
146146
"""
147147

148-
ADAMW_HF = "adamw_hf"
149148
ADAMW_TORCH = "adamw_torch"
150149
ADAMW_TORCH_FUSED = "adamw_torch_fused"
151150
ADAMW_TORCH_XLA = "adamw_torch_xla"
@@ -627,7 +626,7 @@ class TrainingArguments:
627626
628627
The options should be separated by whitespaces.
629628
optim (`str` or [`training_args.OptimizerNames`], *optional*, defaults to `"adamw_torch"`):
630-
The optimizer to use, such as "adamw_hf", "adamw_torch", "adamw_torch_fused", "adamw_apex_fused", "adamw_anyprecision",
629+
The optimizer to use, such as "adamw_torch", "adamw_torch_fused", "adamw_apex_fused", "adamw_anyprecision",
631630
"adafactor". See `OptimizerNames` in [training_args.py](https://github.com/huggingface/transformers/blob/main/src/transformers/training_args.py)
632631
for a full list of optimizers.
633632
optim_args (`str`, *optional*):
@@ -2979,7 +2978,7 @@ def set_optimizer(
29792978
29802979
Args:
29812980
name (`str` or [`training_args.OptimizerNames`], *optional*, defaults to `"adamw_torch"`):
2982-
The optimizer to use: `"adamw_hf"`, `"adamw_torch"`, `"adamw_torch_fused"`, `"adamw_apex_fused"`,
2981+
The optimizer to use: `"adamw_torch"`, `"adamw_torch_fused"`, `"adamw_apex_fused"`,
29832982
`"adamw_anyprecision"` or `"adafactor"`.
29842983
learning_rate (`float`, *optional*, defaults to 5e-5):
29852984
The initial learning rate.

tests/trainer/test_trainer.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5174,16 +5174,6 @@ def hp_name(trial):
51745174
}
51755175

51765176
optim_test_params = [
5177-
(
5178-
OptimizerNames.ADAMW_HF,
5179-
torch.optim.AdamW,
5180-
default_adam_kwargs,
5181-
),
5182-
(
5183-
OptimizerNames.ADAMW_HF.value,
5184-
torch.optim.AdamW,
5185-
default_adam_kwargs,
5186-
),
51875177
(
51885178
OptimizerNames.ADAMW_TORCH,
51895179
torch.optim.AdamW,

0 commit comments

Comments
 (0)