mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Add with torch.no_grad()
to DistilBERT integration test forward pass (#14979)
* refactor: wrap forward pass around no_grad context * Update tests/test_modeling_distilbert.py * fix: rm `no_grad` from non-integration tests * chore: rm whitespace change
This commit is contained in:
parent
021f2ea987
commit
97f3beed36
@ -284,7 +284,8 @@ class DistilBertModelIntergrationTest(unittest.TestCase):
|
||||
model = DistilBertModel.from_pretrained("distilbert-base-uncased")
|
||||
input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
|
||||
attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
|
||||
output = model(input_ids, attention_mask=attention_mask)[0]
|
||||
with torch.no_grad():
|
||||
output = model(input_ids, attention_mask=attention_mask)[0]
|
||||
expected_shape = torch.Size((1, 11, 768))
|
||||
self.assertEqual(output.shape, expected_shape)
|
||||
expected_slice = torch.tensor(
|
||||
|
Loading…
Reference in New Issue
Block a user