mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
again
This commit is contained in:
parent
005459827e
commit
e2ed15c465
@ -117,7 +117,7 @@ class LayoutLMv2ImageProcessingTest(ImageProcessingTestMixin, unittest.TestCase)
|
||||
# with apply_OCR = True
|
||||
image_processing = image_processing_class()
|
||||
|
||||
image = Image.open(ds[0]["file"]).convert("RGB")
|
||||
image = ds[0]["image"]
|
||||
|
||||
encoding = image_processing(image, return_tensors="pt")
|
||||
|
||||
|
@ -193,11 +193,7 @@ class LayoutLMv2ProcessorIntegrationTests(unittest.TestCase):
|
||||
from datasets import load_dataset
|
||||
|
||||
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
|
||||
|
||||
image_1 = Image.open(ds[0]["file"]).convert("RGB")
|
||||
image_2 = Image.open(ds[1]["file"]).convert("RGB")
|
||||
|
||||
return image_1, image_2
|
||||
return ds[0]["image"], ds[1]["image"]
|
||||
|
||||
@cached_property
|
||||
def get_tokenizers(self):
|
||||
|
@ -112,7 +112,7 @@ class LayoutLMv3ImageProcessingTest(ImageProcessingTestMixin, unittest.TestCase)
|
||||
for image_processing_class in self.image_processor_list:
|
||||
image_processor = image_processing_class()
|
||||
|
||||
image = Image.open(ds[0]["file"]).convert("RGB")
|
||||
image = ds[0]["image"]
|
||||
|
||||
encoding = image_processor(image, return_tensors="pt")
|
||||
|
||||
|
@ -173,11 +173,7 @@ class LayoutLMv3ProcessorIntegrationTests(unittest.TestCase):
|
||||
from datasets import load_dataset
|
||||
|
||||
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
|
||||
|
||||
image_1 = Image.open(ds[0]["file"]).convert("RGB")
|
||||
image_2 = Image.open(ds[1]["file"]).convert("RGB")
|
||||
|
||||
return image_1, image_2
|
||||
return ds[0]["image"], ds[1]["image"]
|
||||
|
||||
@cached_property
|
||||
def get_tokenizers(self):
|
||||
|
@ -201,11 +201,7 @@ class LayoutXLMProcessorIntegrationTests(unittest.TestCase):
|
||||
from datasets import load_dataset
|
||||
|
||||
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
|
||||
|
||||
image_1 = Image.open(ds[0]["file"]).convert("RGB")
|
||||
image_2 = Image.open(ds[1]["file"]).convert("RGB")
|
||||
|
||||
return image_1, image_2
|
||||
return ds[0]["image"], ds[1]["image"]
|
||||
|
||||
@cached_property
|
||||
def get_tokenizers(self):
|
||||
|
@ -223,11 +223,7 @@ class UdopProcessorIntegrationTests(unittest.TestCase):
|
||||
from datasets import load_dataset
|
||||
|
||||
ds = load_dataset("hf-internal-testing/fixtures_docvqa", split="test")
|
||||
|
||||
image_1 = Image.open(ds[0]["file"]).convert("RGB")
|
||||
image_2 = Image.open(ds[1]["file"]).convert("RGB")
|
||||
|
||||
return image_1, image_2
|
||||
return ds[0]["image"], ds[1]["image"]
|
||||
|
||||
@cached_property
|
||||
def get_tokenizers(self):
|
||||
|
Loading…
Reference in New Issue
Block a user