Skip to content

Commit fefd4aa

Browse files
committed
rename transformer dimension so it does not collide
1 parent 5c4d2e8 commit fefd4aa

File tree

1 file changed

+6
-5
lines changed

1 file changed

+6
-5
lines changed

neuralmonkey/decoders/transformer.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -107,9 +107,9 @@ def __init__(self,
107107

108108
self.encoder_states = get_attention_states(self.encoder)
109109
self.encoder_mask = get_attention_mask(self.encoder)
110-
self.dimension = self.encoder_states.get_shape()[2].value
110+
self.model_dimension = self.encoder_states.get_shape()[2].value
111111

112-
if self.embedding_size != self.dimension:
112+
if self.embedding_size != self.model_dimension:
113113
raise ValueError("Model dimension and input embedding size"
114114
"do not match")
115115

@@ -120,12 +120,12 @@ def __init__(self,
120120

121121
@property
122122
def output_dimension(self) -> int:
123-
return self.dimension
123+
return self.model_dimension
124124

125125
def embed_inputs(self, inputs: tf.Tensor) -> tf.Tensor:
126126
embedded = tf.nn.embedding_lookup(self.embedding_matrix, inputs)
127127
length = tf.shape(inputs)[1]
128-
return embedded + position_signal(self.dimension, length)
128+
return embedded + position_signal(self.model_dimension, length)
129129

130130
@tensor
131131
def embedded_train_inputs(self) -> tf.Tensor:
@@ -216,7 +216,8 @@ def layer(self, level: int, inputs: tf.Tensor,
216216

217217
# Feed-forward output projection + dropout
218218
ff_output = tf.layers.dense(
219-
ff_hidden_drop, self.dimension, name="ff_out_{}".format(level))
219+
ff_hidden_drop, self.model_dimension,
220+
name="ff_out_{}".format(level))
220221
ff_output = dropout(ff_output, self.dropout_keep_prob, self.train_mode)
221222

222223
# Residual connections + layer normalization

0 commit comments

Comments
 (0)