mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 19:21:31 +06:00
[Longformer] Correct longformer docs (#12809)
* fix_torch_device_generate_test * remove @ * correct longformer docs Co-authored-by: Patrick von Platen <patrick@huggingface.co>
This commit is contained in:
parent
13fefdf340
commit
2955d50e0c
@ -1609,7 +1609,6 @@ class LongformerModel(LongformerPreTrainedModel):
|
||||
>>> SAMPLE_TEXT = ' '.join(['Hello world! '] * 1000) # long input document
|
||||
>>> input_ids = torch.tensor(tokenizer.encode(SAMPLE_TEXT)).unsqueeze(0) # batch of size 1
|
||||
|
||||
>>> # Attention mask values -- 0: no attention, 1: local attention, 2: global attention
|
||||
>>> attention_mask = torch.ones(input_ids.shape, dtype=torch.long, device=input_ids.device) # initialize to local attention
|
||||
>>> global_attention_mask = torch.zeros(input_ids.shape, dtype=torch.long, device=input_ids.device) # initialize to global attention to be deactivated for all tokens
|
||||
>>> global_attention_mask[:, [1, 4, 21,]] = 1 # Set global attention to random tokens for the sake of this example
|
||||
|
Loading…
Reference in New Issue
Block a user