Skip to content

Commit

Permalink
fix post attn layer norm when using embedding factorization
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Nov 4, 2020
1 parent cea7de0 commit e8b6988
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion reformer_pytorch/reformer_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,7 @@ def __init__(self, num_tokens, dim, depth, max_seq_len, heads = 8, dim_head = No
self.pos_emb = AxialPositionalEmbedding(emb_dim, axial_position_shape)

self.reformer = Reformer(dim, depth, max_seq_len, heads = heads, dim_head = dim_head, bucket_size = bucket_size, n_hashes = n_hashes, ff_chunks = ff_chunks, attn_chunks = attn_chunks, causal = causal, weight_tie = weight_tie, lsh_dropout = lsh_dropout, ff_mult = ff_mult, ff_activation = ff_activation, ff_glu = ff_glu, ff_dropout = ff_dropout, post_attn_dropout = 0., layer_dropout = layer_dropout, random_rotations_per_head = random_rotations_per_head, twin_attention = twin_attention, use_scale_norm = use_scale_norm, use_rezero = use_rezero, use_full_attn = use_full_attn, full_attn_thres = full_attn_thres, reverse_thres = reverse_thres, num_mem_kv = num_mem_kv, one_value_head = one_value_head, n_local_attn_heads = n_local_attn_heads, pkm_layers = pkm_layers, pkm_num_keys = pkm_num_keys)
self.norm = nn.LayerNorm(dim)
self.norm = nn.LayerNorm(emb_dim)

if return_embeddings:
self.out = Identity()
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'reformer_pytorch',
packages = find_packages(exclude=['examples', 'pretraining']),
version = '1.2.1',
version = '1.2.2',
license='MIT',
description = 'Reformer, the Efficient Transformer, Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit e8b6988

Please sign in to comment.