Skip to content

Commit

Permalink
try #1 to fix CTL
Browse files Browse the repository at this point in the history
  • Loading branch information
guptapriya committed Jun 3, 2019
1 parent 70704b9 commit f0a8be5
Showing 1 changed file with 4 additions and 6 deletions.
10 changes: 4 additions & 6 deletions official/recommendation/ncf_keras_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def call(self, inputs):
return inputs[0]


def _get_train_and_eval_data(producer, params):
def _get_train_and_eval_data(producer, params):
"""Returns the datasets for training and evalutating."""

def preprocess_train_input(features, labels):
Expand Down Expand Up @@ -313,8 +313,7 @@ def step_fn(inputs):
"""Computes loss and applied gradient per replica."""
features, labels = inputs
with tf.GradientTape() as tape:
softmax_logits = keras_model([features[movielens.USER_COLUMN],
features[movielens.ITEM_COLUMN]])
softmax_logits = keras_model(features)
loss = loss_object(labels, softmax_logits,
sample_weight=features[rconst.VALID_POINT_MASK])
loss *= (1.0 / (batch_size*strategy.num_replicas_in_sync))
Expand All @@ -336,8 +335,7 @@ def eval_step():
def step_fn(inputs):
"""Computes eval metrics per replica."""
features, _ = inputs
softmax_logits = keras_model([features[movielens.USER_COLUMN],
features[movielens.ITEM_COLUMN]])
softmax_logits = keras_model(features)
logits = tf.slice(softmax_logits, [0, 0, 1], [-1, -1, -1])
dup_mask = features[rconst.DUPLICATE_MASK]
in_top_k, _, metric_weights, _ = neumf_model.compute_top_k_and_ndcg(
Expand Down Expand Up @@ -412,7 +410,7 @@ def step_fn(inputs):
train_history = history.history
train_loss = train_history["loss"][-1]

stats = build_stats(train_loss, eval_results, time_callback)
stats = build_stats(train_loss, eval_results, None) #, time_callback)
return stats


Expand Down

0 comments on commit f0a8be5

Please sign in to comment.