Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【PaddlePaddle Hackathon 2】23、为 Paddle 新增 Softmax2D 组网API #40910

Merged
merged 11 commits into from Apr 21, 2022
111 changes: 111 additions & 0 deletions python/paddle/fluid/tests/unittests/test_softmax2d.py
@@ -0,0 +1,111 @@
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from test_softmax_op import ref_softmax


class TestSoftmax2DAPI(unittest.TestCase):
def setUp(self):
self.shape = [2, 6, 5, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float64')
self.axis = -3
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()

def test_static_api(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.fluid.data('X', self.x_np.shape, self.x_np.dtype)
m = paddle.nn.Softmax2D()
out = m(x)
exe = paddle.static.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
out_ref = ref_softmax(self.x_np, self.axis)
self.assertTrue(np.allclose(out_ref, res))

def test_dygraph_api(self):
paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np)
m = paddle.nn.Softmax2D()
out = m(x)
out_ref = ref_softmax(self.x_np, self.axis)
self.assertTrue(np.allclose(out_ref, out.numpy()))
paddle.enable_static()


class TestSoftmax2DShape(TestSoftmax2DAPI):
def setUp(self):
self.shape = [2, 6, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float64')
self.axis = -3
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()


class TestSoftmax2DFloat32(TestSoftmax2DAPI):
def setUp(self):
self.shape = [2, 3, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float32')
self.axis = -3
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

axis可以覆盖不同的值,不需要都是-3,比如0, 正数

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

对于softmax2d来说,需要将softmax的axis固定为-3,这里只是显式的写出来了,实际上不需要变动

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里主要因为 softmax2D 的功能需求是固定了axis=-3的,如果不设置成-3,numpy的实现ref_softmax的就不是实现softmax2D的功能了。那么不能用于和softmax2D的对比测试了。

self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()


class TestSoftmax2DCPU(TestSoftmax2DAPI):
def setUp(self):
self.shape = [2, 6, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float64')
self.axis = -3
self.place = paddle.CPUPlace()

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

增加GPU上单独测试、shape不是3或4维度的错误测试(可以使用assertRaises(error, func,input))


class TestSoftmax2DRepr(unittest.TestCase):
def setUp(self):
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()

def test_extra_repr(self):
paddle.disable_static(self.place)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里相应修改

m = paddle.nn.Softmax2D(name='test')
self.assertTrue(m.extra_repr() == 'name=test')
paddle.enable_static()


class TestSoftmax2DError(unittest.TestCase):
def setUp(self):
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()

def test_static_error(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.fluid.data('X', [5, 5], 'float32')
m = paddle.nn.Softmax2D()
self.assertRaises(AssertionError, m, x)

def test_dygraph_error(self):
paddle.disable_static(self.place)
x_np = np.random.randn(2, 3, 4, 2, 3)
x = paddle.to_tensor(x_np, dtype='float64')
m = paddle.nn.Softmax2D()
self.assertRaises(AssertionError, m, x)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里为什么是个AssertionError?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

因为代码中使用了assert

assert x.ndim == 3 or x.ndim == 4

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

开发者在python层面实现避免输入维度不符合期望使用了assert,因此维度不匹配会触发AssertionError。



if __name__ == '__main__':
unittest.main()
2 changes: 2 additions & 0 deletions python/paddle/nn/__init__.py
Expand Up @@ -41,6 +41,7 @@
from .layer.activation import Hardsigmoid # noqa: F401
from .layer.activation import LogSigmoid # noqa: F401
from .layer.activation import Softmax # noqa: F401
from .layer.activation import Softmax2D # noqa: F401
from .layer.activation import Softplus # noqa: F401
from .layer.activation import Softshrink # noqa: F401
from .layer.activation import Softsign # noqa: F401
Expand Down Expand Up @@ -260,6 +261,7 @@ def weight_norm(*args):
'AdaptiveMaxPool1D',
'TransformerEncoder',
'Softmax',
'Softmax2D',
'ParameterList',
'Conv2D',
'Softshrink',
Expand Down
1 change: 1 addition & 0 deletions python/paddle/nn/layer/__init__.py
Expand Up @@ -26,6 +26,7 @@
from .activation import Sigmoid # noqa: F401
from .activation import Softmax # noqa: F401
from .activation import LogSoftmax # noqa: F401
from .activation import Softmax2D # noqa: F401
from .common import Bilinear # noqa: F401
from .common import Pad1D # noqa: F401
from .common import Pad2D # noqa: F401
Expand Down
52 changes: 52 additions & 0 deletions python/paddle/nn/layer/activation.py
Expand Up @@ -1338,3 +1338,55 @@ def forward(self, x):
def extra_repr(self):
name_str = ', name={}'.format(self._name) if self._name else ''
return 'groups={}, axis={}{}'.format(self._groups, self._axis, name_str)


class Softmax2D(Layer):
r"""
Softmax2D Activation.
Given a Tensor with shape (B, C, H, W) or (C, H, W), it will apply Softmax to each location (C, h_i, w_j).
The sum of result in each location (C, H_i, W_j) will be one.

Shape:
- Input: :math:`(B, C, H, W)` or :math:`(C, H, W)`
- Output: :math:`(B, C, H, W)` or :math:`(C, H, W)`(same as input)

Return:
A Tensor of the same shape and dtype as input with value in range [0, 1].

Examples:
.. code-block:: python

import paddle

x = paddle.rand([1, 2, 3, 4])
# [[[[0.42496058 0.1172187 0.14664008 0.8151267 ]
# [0.24430142 0.42052492 0.60372984 0.79307914]
# [0.4539401 0.90458065 0.10235776 0.62009853]]

# [[0.11731581 0.16053623 0.05667042 0.91876775]
# [0.9413854 0.30770817 0.6788164 0.9543593 ]
# [0.4145064 0.75909156 0.11598814 0.73599935]]]]
m = paddle.nn.Softmax2D()
out = m(x)
# [[[[0.5763103 0.48917228 0.5224772 0.4741129 ]
# [0.3324591 0.5281743 0.48123717 0.45976716]
# [0.5098571 0.5363083 0.49659243 0.4710572 ]]

# [[0.42368975 0.51082766 0.47752273 0.5258871 ]
# [0.66754097 0.47182566 0.5187628 0.5402329 ]
# [0.49014282 0.46369177 0.50340754 0.5289428 ]]]]
"""

def __init__(self, name=None):
super(Softmax2D, self).__init__()
self._dtype = None
self._name = name

def forward(self, x):
assert x.ndim == 3 or x.ndim == 4, "Softmax2D requires a 3D or 4D tensor as input. Received: {}D.".format(
x.ndim)
return F.softmax(x, axis=-3, dtype=self._dtype, name=self._name)

def extra_repr(self):
name_str = 'name={}'.format(self._name) if self._name else ''
return name_str