Skip to content

Commit

Permalink
[FIX] Incorporate suggestions
Browse files Browse the repository at this point in the history
  • Loading branch information
f-dangel committed Nov 8, 2023
1 parent d4c9921 commit 44ba969
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion singd/optim/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,7 @@ def _register_tensor_hook_on_output_to_accumulate_H_terms(
):
"""Register a tensor hook on the module's output that accumulates the H terms.
This function can be used as a `full_backward_hook`.
This function can be used as a `forward_hook`.
Only installs the hook for steps matching the specified update frequency.
Expand Down
4 changes: 2 additions & 2 deletions test/optim/test_inplace_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from singd.optim.optimizer import SINGD


@mark.parametrize("inplace", [True, False], ids=["inplace=True", "inplce=False"])
@mark.parametrize("inplace", [True, False], ids=["inplace=True", "inplace=False"])
def test_hooks_support_inplace_activations(inplace: bool):
"""Test that SINGD's hooks support in in-place activations.
Expand Down Expand Up @@ -52,7 +52,7 @@ def test_compare_training_using_inplace_activations(reduction: str):

# _inplace indicates that the trained net has in-place activations

# NOTE All parameters of this net are supported by SINGD, no optimizer is involved
# NOTE All parameters of this net are supported by SINGD
model = Sequential(
Conv2d(1, 3, kernel_size=5, stride=2),
ReLU(),
Expand Down

0 comments on commit 44ba969

Please sign in to comment.