mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix XPU UT error case brough by RNG difference btw XPU and CUDA (#37121)
* fix XPU UT error case brough by RNG difference btw XPU and CUDA
Signed-off-by: YAO Matrix <matrix.yao@intel.com>
* enable tests/models/llama/test_modeling_llama.py::LlamaIntegrationTest::test_model_7b_logits and tests/models/llama/test_modeling_llama.py::LlamaIntegrationTest::test_model_7b_logits_bf16 on xpu
Signed-off-by: YAO Matrix <matrix.yao@intel.com>
* Revert "enable tests/models/llama/test_modeling_llama.py::LlamaIntegrationTest::test_model_7b_logits and tests/models/llama/test_modeling_llama.py::LlamaIntegrationTest::test_model_7b_logits_bf16 on xpu"
This reverts commit 3ef83a4f02
.
---------
Signed-off-by: YAO Matrix <matrix.yao@intel.com>
This commit is contained in:
parent
897ff9af0e
commit
24e311f42b
@ -976,7 +976,8 @@ class LogitsProcessorTest(unittest.TestCase):
|
||||
input_ids[:, -1] = 10
|
||||
scores_wo_bias = scores[:, -1].clone()
|
||||
out = watermark(input_ids=input_ids, scores=scores)
|
||||
self.assertTrue((out[:, 1] == scores_wo_bias + watermark.bias).all())
|
||||
greenlist_id = 3 if torch_device == "xpu" else 1
|
||||
self.assertTrue((out[:, greenlist_id] == scores_wo_bias + watermark.bias).all())
|
||||
|
||||
@parameterized.expand([(5, 3, 10000), (10, 5, 1000)])
|
||||
def test_synthidtext_watermarking_processor_bias_uniformity(self, ngram_len, num_layers, vocab_size):
|
||||
|
Loading…
Reference in New Issue
Block a user