This commit is contained in:
Quentin Lhoest 2025-06-20 17:42:17 +02:00
parent 005459827e
commit e2ed15c465
6 changed files with 6 additions and 22 deletions

View File

@ -117,7 +117,7 @@ class LayoutLMv2ImageProcessingTest(ImageProcessingTestMixin, unittest.TestCase)
# with apply_OCR = True
image_processing = image_processing_class()
image = Image.open(ds[0]["file"]).convert("RGB")
image = ds[0]["image"]
encoding = image_processing(image, return_tensors="pt")

View File

@ -193,11 +193,7 @@ class LayoutLMv2ProcessorIntegrationTests(unittest.TestCase):
from datasets import load_dataset
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
image_1 = Image.open(ds[0]["file"]).convert("RGB")
image_2 = Image.open(ds[1]["file"]).convert("RGB")
return image_1, image_2
return ds[0]["image"], ds[1]["image"]
@cached_property
def get_tokenizers(self):

View File

@ -112,7 +112,7 @@ class LayoutLMv3ImageProcessingTest(ImageProcessingTestMixin, unittest.TestCase)
for image_processing_class in self.image_processor_list:
image_processor = image_processing_class()
image = Image.open(ds[0]["file"]).convert("RGB")
image = ds[0]["image"]
encoding = image_processor(image, return_tensors="pt")

View File

@ -173,11 +173,7 @@ class LayoutLMv3ProcessorIntegrationTests(unittest.TestCase):
from datasets import load_dataset
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
image_1 = Image.open(ds[0]["file"]).convert("RGB")
image_2 = Image.open(ds[1]["file"]).convert("RGB")
return image_1, image_2
return ds[0]["image"], ds[1]["image"]
@cached_property
def get_tokenizers(self):

View File

@ -201,11 +201,7 @@ class LayoutXLMProcessorIntegrationTests(unittest.TestCase):
from datasets import load_dataset
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
image_1 = Image.open(ds[0]["file"]).convert("RGB")
image_2 = Image.open(ds[1]["file"]).convert("RGB")
return image_1, image_2
return ds[0]["image"], ds[1]["image"]
@cached_property
def get_tokenizers(self):

View File

@ -223,11 +223,7 @@ class UdopProcessorIntegrationTests(unittest.TestCase):
from datasets import load_dataset
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
image_1 = Image.open(ds[0]["file"]).convert("RGB")
image_2 = Image.open(ds[1]["file"]).convert("RGB")
return image_1, image_2
return ds[0]["image"], ds[1]["image"]
@cached_property
def get_tokenizers(self):