mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-04 05:10:06 +06:00

* Allow dynamic modules to use relative imports * Work for configs * Fix last merge conflict * Save code of registered custom objects * Map strings to strings * Fix test * Add tokenizer * Rework tests * Tests * Ignore fixtures py files for tests * Tokenizer test + fix collection * With full path * Rework integration * Fix typo * Remove changes in conftest * Test for tokenizers * Add documentation * Update docs/source/custom_models.mdx Co-authored-by: Lysandre Debut <lysandre@huggingface.co> * Add file structure and file content * Add more doc * Style * Update docs/source/custom_models.mdx Co-authored-by: Suraj Patil <surajp815@gmail.com> * Address review comments Co-authored-by: Lysandre Debut <lysandre@huggingface.co> Co-authored-by: Suraj Patil <surajp815@gmail.com>
21 lines
458 B
Python
21 lines
458 B
Python
import torch
|
|
|
|
from transformers import PreTrainedModel
|
|
|
|
from .custom_configuration import CustomConfig
|
|
|
|
|
|
class CustomModel(PreTrainedModel):
|
|
config_class = CustomConfig
|
|
base_model_prefix = "custom"
|
|
|
|
def __init__(self, config):
|
|
super().__init__(config)
|
|
self.linear = torch.nn.Linear(config.hidden_size, config.hidden_size)
|
|
|
|
def forward(self, x):
|
|
return self.linear(x)
|
|
|
|
def _init_weights(self, module):
|
|
pass
|