Skip to content

Commit

Permalink
fix use_softmax=False does not work, test=develop
Browse files Browse the repository at this point in the history
  • Loading branch information
chajchaj committed Apr 2, 2021
1 parent e7542a4 commit 06de04f
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions python/paddle/nn/functional/loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -1388,6 +1388,8 @@ def cross_entropy(input,
"should be '-100', but received %s, which is not allowed." %
ignore_index)

softmax_switch = use_softmax

input_dims = len(list(input.shape))
label_dims = len(list(label.shape))
if input_dims - 1 != label_dims and input_dims != label_dims:
Expand All @@ -1400,7 +1402,7 @@ def cross_entropy(input,
_, out = core.ops.softmax_with_cross_entropy(
input, label, 'soft_label', soft_label, 'ignore_index',
ignore_index, 'numeric_stable_mode', True, 'axis', axis,
'use_softmax', use_softmax)
'softmax_switch', softmax_switch)

if weight is not None:

Expand Down Expand Up @@ -1482,7 +1484,7 @@ def cross_entropy(input,
'ignore_index': ignore_index,
'numeric_stable_mode': True,
'axis': axis,
'use_softmax': use_softmax
'softmax_switch': softmax_switch
}
helper = LayerHelper('softmax_with_cross_entropy', **locals())
softmax = helper.create_variable_for_type_inference(dtype=input.dtype)
Expand Down

1 comment on commit 06de04f

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.