Conversion scripts shouldn't have relative imports (#6991)

This commit is contained in:
Lysandre Debut 2020-09-07 14:31:06 +02:00 committed by GitHub
parent 1650130b0f
commit 77cd0e13d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 15 additions and 30 deletions

View File

@ -20,8 +20,7 @@ import argparse
import torch
from transformers import AlbertConfig, AlbertForPreTraining, load_tf_weights_in_albert
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -31,8 +31,7 @@ from transformers import (
BartTokenizer,
)
from transformers.modeling_bart import _make_linear_from_emb
from .utils import logging
from transformers.utils import logging
FAIRSEQ_MODELS = ["bart.large", "bart.large.mnli", "bart.large.cnn", "bart_xsum/model.pt"]

View File

@ -15,8 +15,7 @@ import tensorflow as tf
import torch
from transformers import BertConfig, BertModel
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -20,8 +20,7 @@ import argparse
import torch
from transformers import BertConfig, BertForPreTraining, load_tf_weights_in_bert
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -20,8 +20,7 @@ import argparse
import torch
from transformers import ElectraConfig, ElectraForMaskedLM, ElectraForPreTraining, load_tf_weights_in_electra
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -20,8 +20,7 @@ import argparse
import torch
from transformers import CONFIG_NAME, WEIGHTS_NAME, GPT2Config, GPT2Model, load_tf_weights_in_gpt2
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -3,8 +3,7 @@ import argparse
import torch
from transformers import MobileBertConfig, MobileBertForPreTraining, load_tf_weights_in_mobilebert
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -20,8 +20,7 @@ import argparse
import torch
from transformers import CONFIG_NAME, WEIGHTS_NAME, OpenAIGPTConfig, OpenAIGPTModel, load_tf_weights_in_openai_gpt
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -78,8 +78,7 @@ from transformers import (
load_pytorch_checkpoint_in_tf2_model,
)
from transformers.file_utils import hf_bucket_url
from .utils import logging
from transformers.utils import logging
if is_torch_available():

View File

@ -22,8 +22,7 @@ import numpy as np
import torch
from transformers import ReformerConfig, ReformerModelWithLMHead
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -26,8 +26,7 @@ from packaging import version
from transformers.modeling_bert import BertIntermediate, BertLayer, BertOutput, BertSelfAttention, BertSelfOutput
from transformers.modeling_roberta import RobertaConfig, RobertaForMaskedLM, RobertaForSequenceClassification
from .utils import logging
from transformers.utils import logging
if version.parse(fairseq.__version__) < version.parse("0.9.0"):

View File

@ -20,8 +20,7 @@ import argparse
import torch
from transformers import T5Config, T5Model, load_tf_weights_in_t5
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -31,8 +31,7 @@ from transformers import (
load_tf_weights_in_transfo_xl,
)
from transformers.tokenization_transfo_xl import CORPUS_NAME, VOCAB_FILES_NAMES
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -23,8 +23,7 @@ import torch
from transformers import CONFIG_NAME, WEIGHTS_NAME
from transformers.tokenization_xlm import VOCAB_FILES_NAMES
from .utils import logging
from transformers.utils import logging
logging.set_verbosity_info()

View File

@ -29,8 +29,7 @@ from transformers import (
XLNetLMHeadModel,
load_tf_weights_in_xlnet,
)
from .utils import logging
from transformers.utils import logging
GLUE_TASKS_NUM_LABELS = {