Skip to content

Commit

Permalink
switch to stable softmax when sampling
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Dec 1, 2021
1 parent 2df5ff0 commit 1ad3ab8
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion dalle_pytorch/dalle_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from dalle_pytorch import distributed_utils
from dalle_pytorch.vae import OpenAIDiscreteVAE, VQGanVAE
from dalle_pytorch.transformer import Transformer, DivideMax
from dalle_pytorch.attention import stable_softmax

# helpers

Expand Down Expand Up @@ -441,7 +442,7 @@ def generate_texts(
logits = logits[:, -1, :]

filtered_logits = top_k(logits, thres = filter_thres)
probs = F.softmax(filtered_logits / temperature, dim = -1)
probs = stable_softmax(filtered_logits / temperature, dim = -1)
sample = torch.multinomial(probs, 1)

text_tokens = torch.cat((text_tokens, sample), dim=-1)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
name = 'dalle-pytorch',
packages = find_packages(),
include_package_data = True,
version = '1.1.4',
version = '1.1.5',
license='MIT',
description = 'DALL-E - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 1ad3ab8

Please sign in to comment.