From 6ea25a79df328ba0c2e597e4e9c4159c990cac8b Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Tue, 18 Jul 2023 11:22:33 -0700 Subject: [PATCH] Fix require_grad typo Fix require_grad typos (should be requires_grad). Before the fix, the code doesn't cause any errors but doesn't do what it's supposed to do. Also see https://github.com/pytorch/benchmark/pull/1771 --- bert_pytorch/model/embedding/position.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bert_pytorch/model/embedding/position.py b/bert_pytorch/model/embedding/position.py index d55c224..75ce9f8 100644 --- a/bert_pytorch/model/embedding/position.py +++ b/bert_pytorch/model/embedding/position.py @@ -10,7 +10,7 @@ def __init__(self, d_model, max_len=512): # Compute the positional encodings once in log space. pe = torch.zeros(max_len, d_model).float() - pe.require_grad = False + pe.requires_grad = False position = torch.arange(0, max_len).float().unsqueeze(1) div_term = (torch.arange(0, d_model, 2).float() * -(math.log(10000.0) / d_model)).exp()