From 55cb01288602448ea169c2a65dfd04ddcb46a5d0 Mon Sep 17 00:00:00 2001 From: Bart Gawrych Date: Mon, 20 Jul 2020 12:06:05 +0200 Subject: [PATCH] Fix softmax, logsoftmax backward failed on empty ndarray (#18710) --- src/operator/nn/log_softmax.cc | 1 + src/operator/nn/softmax.cc | 1 + tests/python/unittest/test_numpy_op.py | 1 + 3 files changed, 3 insertions(+) diff --git a/src/operator/nn/log_softmax.cc b/src/operator/nn/log_softmax.cc index f3ef4abb9f6d..28ae8cf361ec 100644 --- a/src/operator/nn/log_softmax.cc +++ b/src/operator/nn/log_softmax.cc @@ -58,6 +58,7 @@ static void LogSoftmaxGradComputeExCPU(const nnvm::NodeAttrs& attrs, const std::vector& inputs, const std::vector& req, const std::vector& outputs) { + if (inputs[0].shape().Size() == 0U) return; const SoftmaxParam& param = nnvm::get(attrs.parsed); if (SupportMKLDNNLogSoftmax(param, inputs[1], outputs[0])) { MKLDNN_OPCHECK_INIT(false, outputs.size(), inputs, outputs); diff --git a/src/operator/nn/softmax.cc b/src/operator/nn/softmax.cc index b95e159f9862..9b28b71560bd 100644 --- a/src/operator/nn/softmax.cc +++ b/src/operator/nn/softmax.cc @@ -59,6 +59,7 @@ static void SoftmaxGradComputeExCPU(const nnvm::NodeAttrs& attrs, const std::vector& inputs, const std::vector& req, const std::vector& outputs) { + if (inputs[0].shape().Size() == 0U) return; const SoftmaxParam& param = nnvm::get(attrs.parsed); if (SupportMKLDNNSoftmax(param, inputs[1], outputs[0])) { MKLDNN_OPCHECK_INIT(false, outputs.size(), inputs, outputs); diff --git a/tests/python/unittest/test_numpy_op.py b/tests/python/unittest/test_numpy_op.py index 91f84bb27eb0..97c7d8675495 100644 --- a/tests/python/unittest/test_numpy_op.py +++ b/tests/python/unittest/test_numpy_op.py @@ -1612,6 +1612,7 @@ def np_log_softmax(x, axis=-1): assert_almost_equal(mx_out.asnumpy(), np_out, rtol=1e-3, atol=1e-5, equal_nan=True) mx_out.backward() + mx_a.grad.wait_to_read() assert_almost_equal(mx_a.grad.asnumpy(), _np.zeros(shape), rtol=1e-3, atol=1e-5)