mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
[NLLB-MoE] model_type
update for auto mapping (#22470)
edit default model type and testing path set to hf-internal-testing
This commit is contained in:
parent
11426641dc
commit
349e1242d9
@ -125,7 +125,7 @@ class NllbMoeConfig(PretrainedConfig):
|
||||
>>> # Accessing the model configuration
|
||||
>>> configuration = model.config
|
||||
```"""
|
||||
model_type = "nllb_moe"
|
||||
model_type = "nllb-moe"
|
||||
keys_to_ignore_at_inference = ["past_key_values"]
|
||||
attribute_map = {"num_attention_heads": "encoder_attention_heads", "hidden_size": "d_model"}
|
||||
|
||||
|
@ -354,14 +354,14 @@ class NllbMoeModelIntegrationTests(unittest.TestCase):
|
||||
|
||||
@cached_property
|
||||
def tokenizer(self):
|
||||
return NllbTokenizer.from_pretrained("ArthurZ/random-nllb-moe-2-experts")
|
||||
return NllbTokenizer.from_pretrained("hf-internal-testing/random-nllb-moe-2-experts")
|
||||
|
||||
@cached_property
|
||||
def big_model(self):
|
||||
return NllbMoeForConditionalGeneration.from_pretrained("facebook/nllb-moe-54b")
|
||||
|
||||
def inference_no_head(self):
|
||||
model = NllbMoeModel.from_pretrained("ArthurZ/random-nllb-moe-2-experts").eval()
|
||||
model = NllbMoeModel.from_pretrained("hf-internal-testing/random-nllb-moe-2-experts").eval()
|
||||
with torch.no_grad():
|
||||
output = model(**self.model_inputs)
|
||||
# fmt: off
|
||||
@ -382,7 +382,7 @@ class NllbMoeModelIntegrationTests(unittest.TestCase):
|
||||
and `transformers` implementation of NLLB-MoE transformers. We only check the logits
|
||||
of the second sample of the batch, as it is padded.
|
||||
"""
|
||||
model = NllbMoeForConditionalGeneration.from_pretrained("ArthurZ/random-nllb-moe-2-experts").eval()
|
||||
model = NllbMoeForConditionalGeneration.from_pretrained("hf-internal-testing/random-nllb-moe-2-experts").eval()
|
||||
with torch.no_grad():
|
||||
output = model(**self.model_inputs)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user