mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Torchscript test for DistilBERT (#13351)
* Torchscript test for DistilBERT * Update tests/test_modeling_distilbert.py
This commit is contained in:
parent
73a0381282
commit
680733a7c4
@ -12,12 +12,12 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from transformers import DistilBertConfig, is_torch_available
|
||||
from transformers.testing_utils import require_torch, slow, torch_device
|
||||
from transformers.testing_utils import require_torch, require_torch_gpu, slow, torch_device
|
||||
|
||||
from .test_configuration_common import ConfigTester
|
||||
from .test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
|
||||
@ -252,6 +252,29 @@ class DistilBertModelTest(ModelTesterMixin, unittest.TestCase):
|
||||
model = DistilBertModel.from_pretrained(model_name)
|
||||
self.assertIsNotNone(model)
|
||||
|
||||
@slow
|
||||
@require_torch_gpu
|
||||
def test_torchscript_device_change(self):
|
||||
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
|
||||
for model_class in self.all_model_classes:
|
||||
|
||||
# BertForMultipleChoice behaves incorrectly in JIT environments.
|
||||
if model_class == DistilBertForMultipleChoice:
|
||||
return
|
||||
|
||||
config.torchscript = True
|
||||
model = model_class(config=config)
|
||||
|
||||
inputs_dict = self._prepare_for_class(inputs_dict, model_class)
|
||||
traced_model = torch.jit.trace(
|
||||
model, (inputs_dict["input_ids"].to("cpu"), inputs_dict["attention_mask"].to("cpu"))
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
torch.jit.save(traced_model, os.path.join(tmp, "traced_model.pt"))
|
||||
loaded = torch.jit.load(os.path.join(tmp, "bert.pt"), map_location=torch_device)
|
||||
loaded(inputs_dict["input_ids"].to(torch_device), inputs_dict["attention_mask"].to(torch_device))
|
||||
|
||||
|
||||
@require_torch
|
||||
class DistilBertModelIntergrationTest(unittest.TestCase):
|
||||
|
Loading…
Reference in New Issue
Block a user