Skip to content

Remove profile("training_step_and_backward") #11222

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Jan 4, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 9 additions & 13 deletions pytorch_lightning/loops/optimization/optimizer_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
_extract_hiddens,
check_finite_loss,
)
from pytorch_lightning.profiler import BaseProfiler, PassThroughProfiler
from pytorch_lightning.profiler import BaseProfiler
from pytorch_lightning.trainer.progress import OptimizationProgress
from pytorch_lightning.utilities import _AcceleratorType, AMPType
from pytorch_lightning.utilities.exceptions import MisconfigurationException
Expand Down Expand Up @@ -132,22 +132,18 @@ def __init__(
self._step_fn = step_fn
self._backward_fn = backward_fn
self._zero_grad_fn = zero_grad_fn
self._profiler = PassThroughProfiler() if profiler is None else profiler

def closure(self, *args: Any, **kwargs: Any) -> ClosureResult:
with self._profiler.profile("training_step_and_backward"):
step_output = self._step_fn()
step_output = self._step_fn()

if step_output.closure_loss is None:
self.warning_cache.warn(
"`training_step` returned `None`. If this was on purpose, ignore this warning..."
)
if step_output.closure_loss is None:
self.warning_cache.warn("`training_step` returned `None`. If this was on purpose, ignore this warning...")

if self._zero_grad_fn is not None:
self._zero_grad_fn()
if self._zero_grad_fn is not None:
self._zero_grad_fn()

if self._backward_fn is not None and step_output.closure_loss is not None:
self._backward_fn(step_output.closure_loss)
if self._backward_fn is not None and step_output.closure_loss is not None:
self._backward_fn(step_output.closure_loss)

return step_output

Expand Down Expand Up @@ -400,7 +396,7 @@ def _optimizer_zero_grad(self, batch_idx: int, optimizer: torch.optim.Optimizer,
optimizer: the current optimizer
opt_idx: the index of the current optimizer
"""
self.trainer._call_strategy_hook(
self.trainer._call_lightning_module_hook(
"optimizer_zero_grad", self.trainer.current_epoch, batch_idx, optimizer, opt_idx
)
self.optim_progress.optimizer.zero_grad.increment_completed()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,6 @@ def optimizer_step(
model = model or self.lightning_module
self.precision_plugin.optimizer_step(model, optimizer, opt_idx, closure, **kwargs)

def optimizer_zero_grad(self, current_epoch: int, batch_idx: int, optimizer: Optimizer, opt_idx: int) -> None:
"""Zeros all model parameter's gradients."""
self.lightning_module.optimizer_zero_grad(current_epoch, batch_idx, optimizer, opt_idx)

def _setup_model_and_optimizers(self, model: Module, optimizers: List[Optimizer]) -> Tuple[Module, List[Optimizer]]:
"""Setup a model and multiple optimizers together.

Expand Down
1 change: 0 additions & 1 deletion pytorch_lightning/profiler/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,6 @@ def __call__(self, num_step: int) -> "ProfilerAction":
class PyTorchProfiler(BaseProfiler):

RECORD_FUNCTIONS = {
"training_step_and_backward",
"training_step",
"backward",
"validation_step",
Expand Down
3 changes: 1 addition & 2 deletions pytorch_lightning/profiler/xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,8 @@

class XLAProfiler(BaseProfiler):

STEP_FUNCTIONS = {"training_step_and_backward", "validation_step", "test_step", "predict_step"}
STEP_FUNCTIONS = {"validation_step", "test_step", "predict_step"}
RECORD_FUNCTIONS = {
"training_step_and_backward",
"training_step",
"backward",
"validation_step",
Expand Down
1 change: 0 additions & 1 deletion tests/profiler/test_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,6 @@ def test_pytorch_profiler_trainer_ddp(tmpdir, pytorch_profiler):
expected = {"[Strategy]DDPStrategy.validation_step"}
if not _KINETO_AVAILABLE:
expected |= {
"training_step_and_backward",
"[Strategy]DDPStrategy.training_step",
"[Strategy]DDPStrategy.backward",
}
Expand Down