Add with torch.no_grad() to DistilBERT integration test forward pass (#14979)

* refactor: wrap forward pass around no_grad context

* Update tests/test_modeling_distilbert.py

* fix: rm `no_grad` from non-integration tests

* chore: rm whitespace change
This commit is contained in:
Jake Tae 2022-01-13 00:42:39 +09:00 committed by GitHub
parent 021f2ea987
commit 97f3beed36
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -284,7 +284,8 @@ class DistilBertModelIntergrationTest(unittest.TestCase):
model = DistilBertModel.from_pretrained("distilbert-base-uncased")
input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
output = model(input_ids, attention_mask=attention_mask)[0]
with torch.no_grad():
output = model(input_ids, attention_mask=attention_mask)[0]
expected_shape = torch.Size((1, 11, 768))
self.assertEqual(output.shape, expected_shape)
expected_slice = torch.tensor(