mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-03 03:31:05 +06:00
Merge pull request #690 from shashwath94/projadpsftmax_fix
Transformer XL ProjectedAdaptiveLogSoftmax output fix
This commit is contained in:
commit
80684f6f86
@ -114,10 +114,10 @@ class ProjectedAdaptiveLogSoftmax(nn.Module):
|
|||||||
logit = self._compute_logit(hidden, self.out_layers[0].weight,
|
logit = self._compute_logit(hidden, self.out_layers[0].weight,
|
||||||
self.out_layers[0].bias, self.out_projs[0])
|
self.out_layers[0].bias, self.out_projs[0])
|
||||||
if target is not None:
|
if target is not None:
|
||||||
output = -F.log_softmax(logit, dim=-1) \
|
out = -F.log_softmax(logit, dim=-1) \
|
||||||
.gather(1, target.unsqueeze(1)).squeeze(1)
|
.gather(1, target.unsqueeze(1)).squeeze(1)
|
||||||
else:
|
else:
|
||||||
output = F.log_softmax(logit, dim=-1)
|
out = F.log_softmax(logit, dim=-1)
|
||||||
else:
|
else:
|
||||||
# construct weights and biases
|
# construct weights and biases
|
||||||
weights, biases = [], []
|
weights, biases = [], []
|
||||||
|
Loading…
Reference in New Issue
Block a user