Skip to content

Commit

Permalink
【PaddlePaddle Hackathon 2】23、为 Paddle 新增 Softmax2D 组网API (#40910)
Browse files Browse the repository at this point in the history
* Hackathon 23

* fix bug

* fix pylint error

* try

* fix CI-Coverage

* update and add more unittest

* update
  • Loading branch information
Asthestarsfalll committed Apr 21, 2022
1 parent db468d7 commit 920d44d
Show file tree
Hide file tree
Showing 4 changed files with 166 additions and 0 deletions.
111 changes: 111 additions & 0 deletions python/paddle/fluid/tests/unittests/test_softmax2d.py
@@ -0,0 +1,111 @@
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from test_softmax_op import ref_softmax


class TestSoftmax2DAPI(unittest.TestCase):
def setUp(self):
self.shape = [2, 6, 5, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float64')
self.axis = -3
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()

def test_static_api(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.fluid.data('X', self.x_np.shape, self.x_np.dtype)
m = paddle.nn.Softmax2D()
out = m(x)
exe = paddle.static.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
out_ref = ref_softmax(self.x_np, self.axis)
self.assertTrue(np.allclose(out_ref, res))

def test_dygraph_api(self):
paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np)
m = paddle.nn.Softmax2D()
out = m(x)
out_ref = ref_softmax(self.x_np, self.axis)
self.assertTrue(np.allclose(out_ref, out.numpy()))
paddle.enable_static()


class TestSoftmax2DShape(TestSoftmax2DAPI):
def setUp(self):
self.shape = [2, 6, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float64')
self.axis = -3
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()


class TestSoftmax2DFloat32(TestSoftmax2DAPI):
def setUp(self):
self.shape = [2, 3, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float32')
self.axis = -3
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()


class TestSoftmax2DCPU(TestSoftmax2DAPI):
def setUp(self):
self.shape = [2, 6, 4]
self.x_np = np.random.uniform(-1, 1, self.shape).astype('float64')
self.axis = -3
self.place = paddle.CPUPlace()


class TestSoftmax2DRepr(unittest.TestCase):
def setUp(self):
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()

def test_extra_repr(self):
paddle.disable_static(self.place)
m = paddle.nn.Softmax2D(name='test')
self.assertTrue(m.extra_repr() == 'name=test')
paddle.enable_static()


class TestSoftmax2DError(unittest.TestCase):
def setUp(self):
self.place = paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()

def test_static_error(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.fluid.data('X', [5, 5], 'float32')
m = paddle.nn.Softmax2D()
self.assertRaises(AssertionError, m, x)

def test_dygraph_error(self):
paddle.disable_static(self.place)
x_np = np.random.randn(2, 3, 4, 2, 3)
x = paddle.to_tensor(x_np, dtype='float64')
m = paddle.nn.Softmax2D()
self.assertRaises(AssertionError, m, x)


if __name__ == '__main__':
unittest.main()
2 changes: 2 additions & 0 deletions python/paddle/nn/__init__.py
Expand Up @@ -41,6 +41,7 @@
from .layer.activation import Hardsigmoid # noqa: F401
from .layer.activation import LogSigmoid # noqa: F401
from .layer.activation import Softmax # noqa: F401
from .layer.activation import Softmax2D # noqa: F401
from .layer.activation import Softplus # noqa: F401
from .layer.activation import Softshrink # noqa: F401
from .layer.activation import Softsign # noqa: F401
Expand Down Expand Up @@ -260,6 +261,7 @@ def weight_norm(*args):
'AdaptiveMaxPool1D',
'TransformerEncoder',
'Softmax',
'Softmax2D',
'ParameterList',
'Conv2D',
'Softshrink',
Expand Down
1 change: 1 addition & 0 deletions python/paddle/nn/layer/__init__.py
Expand Up @@ -26,6 +26,7 @@
from .activation import Sigmoid # noqa: F401
from .activation import Softmax # noqa: F401
from .activation import LogSoftmax # noqa: F401
from .activation import Softmax2D # noqa: F401
from .common import Bilinear # noqa: F401
from .common import Pad1D # noqa: F401
from .common import Pad2D # noqa: F401
Expand Down
52 changes: 52 additions & 0 deletions python/paddle/nn/layer/activation.py
Expand Up @@ -1338,3 +1338,55 @@ def forward(self, x):
def extra_repr(self):
name_str = ', name={}'.format(self._name) if self._name else ''
return 'groups={}, axis={}{}'.format(self._groups, self._axis, name_str)


class Softmax2D(Layer):
r"""
Softmax2D Activation.
Given a Tensor with shape (B, C, H, W) or (C, H, W), it will apply Softmax to each location (C, h_i, w_j).
The sum of result in each location (C, H_i, W_j) will be one.
Shape:
- Input: :math:`(B, C, H, W)` or :math:`(C, H, W)`
- Output: :math:`(B, C, H, W)` or :math:`(C, H, W)`(same as input)
Return:
A Tensor of the same shape and dtype as input with value in range [0, 1].
Examples:
.. code-block:: python
import paddle
x = paddle.rand([1, 2, 3, 4])
# [[[[0.42496058 0.1172187 0.14664008 0.8151267 ]
# [0.24430142 0.42052492 0.60372984 0.79307914]
# [0.4539401 0.90458065 0.10235776 0.62009853]]
# [[0.11731581 0.16053623 0.05667042 0.91876775]
# [0.9413854 0.30770817 0.6788164 0.9543593 ]
# [0.4145064 0.75909156 0.11598814 0.73599935]]]]
m = paddle.nn.Softmax2D()
out = m(x)
# [[[[0.5763103 0.48917228 0.5224772 0.4741129 ]
# [0.3324591 0.5281743 0.48123717 0.45976716]
# [0.5098571 0.5363083 0.49659243 0.4710572 ]]
# [[0.42368975 0.51082766 0.47752273 0.5258871 ]
# [0.66754097 0.47182566 0.5187628 0.5402329 ]
# [0.49014282 0.46369177 0.50340754 0.5289428 ]]]]
"""

def __init__(self, name=None):
super(Softmax2D, self).__init__()
self._dtype = None
self._name = name

def forward(self, x):
assert x.ndim == 3 or x.ndim == 4, "Softmax2D requires a 3D or 4D tensor as input. Received: {}D.".format(
x.ndim)
return F.softmax(x, axis=-3, dtype=self._dtype, name=self._name)

def extra_repr(self):
name_str = 'name={}'.format(self._name) if self._name else ''
return name_str

0 comments on commit 920d44d

Please sign in to comment.