Skip to content

Commit

Permalink
Merge pull request #1508 from microsoft/bug/vae_keras
Browse files Browse the repository at this point in the history
Missing import in multinomial_vae.py and standard_vae.py
  • Loading branch information
miguelgfierro authored Aug 27, 2021
2 parents 3e23511 + 987957e commit dbc49ca
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 8 deletions.
10 changes: 5 additions & 5 deletions recommenders/models/vae/multinomial_vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,8 +287,8 @@ def _create_model(self):
self.h = Dense(
self.intermediate_dim,
activation="tanh",
kernel_initializer=keras.initializers.glorot_uniform(seed=self.seed),
bias_initializer=keras.initializers.truncated_normal(
kernel_initializer=tf.keras.initializers.glorot_uniform(seed=self.seed),
bias_initializer=tf.keras.initializers.truncated_normal(
stddev=0.001, seed=self.seed
),
)(self.dropout_encoder)
Expand All @@ -304,8 +304,8 @@ def _create_model(self):
self.h_decoder = Dense(
self.intermediate_dim,
activation="tanh",
kernel_initializer=keras.initializers.glorot_uniform(seed=self.seed),
bias_initializer=keras.initializers.truncated_normal(
kernel_initializer=tf.keras.initializers.glorot_uniform(seed=self.seed),
bias_initializer=tf.keras.initializers.truncated_normal(
stddev=0.001, seed=self.seed
),
)
Expand All @@ -318,7 +318,7 @@ def _create_model(self):
# Training
self.model = Model(self.x, self.x_decoded)
self.model.compile(
optimizer=keras.optimizers.Adam(learning_rate=0.001),
optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss=self._get_vae_loss,
)

Expand Down
7 changes: 4 additions & 3 deletions recommenders/models/vae/standard_vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from recommenders.evaluation.python_evaluation import ndcg_at_k

import bottleneck as bn
import tensorflow as tf
from tensorflow.keras.layers import *
from tensorflow.keras.models import Model
from tensorflow.keras.losses import binary_crossentropy
from tensorflow.keras import backend as K
from tensorflow.keras.callbacks import ReduceLROnPlateau, ModelCheckpoint, Callback

from recommenders.evaluation.python_evaluation import ndcg_at_k


class LossHistory(Callback):
"""This class is used for saving the validation loss and the training loss per epoch."""
Expand Down Expand Up @@ -292,7 +293,7 @@ def _create_model(self):
# Training
self.model = Model(self.x, self.x_decoded)
self.model.compile(
optimizer=keras.optimizers.Adam(learning_rate=0.001),
optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss=self._get_vae_loss,
)

Expand Down

0 comments on commit dbc49ca

Please sign in to comment.