Skip to content

Commit e6b7bd7

Browse files
committed
word2vec: self.embedding for restoration in the future
1 parent 604e504 commit e6b7bd7

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

032-word2vec/model.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ class Word2vec:
66
embedding_size = 2 # word embedding size
77
num_sampled = 15 # Number of negative examples to sample.
88
vocab_size = 0 # Vocab size created from the data API
9+
embedding = None
910
gpu_dynamic_memory_growth = False
1011
loss = None # TF Loss
1112
optimizer = None # Optimizer
@@ -33,8 +34,8 @@ def compile(self):
3334
# Look up embeddings for inputs.
3435
# The conversion of 10,000 columned matrix into a 200 columned matrix is called word embedding.
3536
with tf.device("/cpu:0"):
36-
embeddings = tf.Variable(tf.random_uniform([self.vocab_size, self.embedding_size], -1.0, 1.0))
37-
embed = tf.nn.embedding_lookup(embeddings, self.X) # lookup table
37+
self.embeddings = tf.Variable(tf.random_uniform([self.vocab_size, self.embedding_size], -1.0, 1.0))
38+
embed = tf.nn.embedding_lookup(self.embeddings, self.X) # lookup table
3839

3940
# Construct the variables for the NCE loss
4041
nce_weights = tf.Variable(tf.random_uniform([self.vocab_size, self.embedding_size], -1.0, 1.0))

0 commit comments

Comments
 (0)