Skip to content

Commit

Permalink
Fix grad params
Browse files Browse the repository at this point in the history
  • Loading branch information
vivekmig committed Apr 5, 2021
1 parent 23ebbc8 commit 1b19bcb
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions tests/attr/layer/test_layer_gradient_x_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,9 @@ def test_gradient_activation_embedding_no_grad(self) -> None:
input1 = torch.tensor([2, 5, 0, 1])
input2 = torch.tensor([3, 0, 0, 2])
model = BasicEmbeddingModel()
# for param in model.parameters():
# param.requires_grad = False
for param in model.parameters():
param.requires_grad = False

with torch.no_grad():
layer_act = LayerGradientXActivation(model, model.embedding1)
self.assertEqual(
Expand Down

0 comments on commit 1b19bcb

Please sign in to comment.