From 11c5c7329fc75e617355cbc3b020f5526d5d26e6 Mon Sep 17 00:00:00 2001 From: rightyonghu Date: Sat, 27 Aug 2022 23:29:07 +0800 Subject: [PATCH] add test --- tests/models/ernie/test_modeling_ernie.py | 48 +---------------------- 1 file changed, 1 insertion(+), 47 deletions(-) diff --git a/tests/models/ernie/test_modeling_ernie.py b/tests/models/ernie/test_modeling_ernie.py index 68760dc121af5..be9af7d7688d0 100644 --- a/tests/models/ernie/test_modeling_ernie.py +++ b/tests/models/ernie/test_modeling_ernie.py @@ -576,50 +576,4 @@ def test_torchscript_device_change(self): with tempfile.TemporaryDirectory() as tmp: torch.jit.save(traced_model, os.path.join(tmp, "ernie.pt")) loaded = torch.jit.load(os.path.join(tmp, "ernie.pt"), map_location=torch_device) - loaded(inputs_dict["input_ids"].to(torch_device), inputs_dict["attention_mask"].to(torch_device)) - - -@require_torch -class ErnieModelIntegrationTest(unittest.TestCase): - @slow - def test_inference_no_head_absolute_embedding(self): - model = ErnieModel.from_pretrained("nghuyong/ernie-3.0-base-zh") - input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) - attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) - with torch.no_grad(): - output = model(input_ids, attention_mask=attention_mask)[0] - expected_shape = torch.Size((1, 11, 768)) - self.assertEqual(output.shape, expected_shape) - expected_slice = torch.tensor([[[0.4249, 0.1008, 0.7531], [0.3771, 0.1188, 0.7467], [0.4152, 0.1098, 0.7108]]]) - - self.assertTrue(torch.allclose(output[:, 1:4, 1:4], expected_slice, atol=1e-4)) - - @slow - def test_inference_no_head_relative_embedding_key(self): - model = ErnieModel.from_pretrained("zhiheng-huang/nghuyong/ernie-3.0-base-zh-embedding-relative-key") - input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) - attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) - with torch.no_grad(): - output = model(input_ids, attention_mask=attention_mask)[0] - expected_shape = torch.Size((1, 11, 768)) - self.assertEqual(output.shape, expected_shape) - expected_slice = torch.tensor( - [[[0.0756, 0.3142, -0.5128], [0.3761, 0.3462, -0.5477], [0.2052, 0.3760, -0.1240]]] - ) - - self.assertTrue(torch.allclose(output[:, 1:4, 1:4], expected_slice, atol=1e-4)) - - @slow - def test_inference_no_head_relative_embedding_key_query(self): - model = ErnieModel.from_pretrained("zhiheng-huang/nghuyong/ernie-3.0-base-zh-embedding-relative-key-query") - input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) - attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) - with torch.no_grad(): - output = model(input_ids, attention_mask=attention_mask)[0] - expected_shape = torch.Size((1, 11, 768)) - self.assertEqual(output.shape, expected_shape) - expected_slice = torch.tensor( - [[[0.6496, 0.3784, 0.8203], [0.8148, 0.5656, 0.2636], [-0.0681, 0.5597, 0.7045]]] - ) - - self.assertTrue(torch.allclose(output[:, 1:4, 1:4], expected_slice, atol=1e-4)) + loaded(inputs_dict["input_ids"].to(torch_device), inputs_dict["attention_mask"].to(torch_device)) \ No newline at end of file