Skip to content

Commit

Permalink
add r,test=document_fix
Browse files Browse the repository at this point in the history
  • Loading branch information
thunder95 committed May 23, 2022
1 parent 116873b commit 8f626c2
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion python/paddle/nn/functional/activation.py
Expand Up @@ -549,7 +549,7 @@ def prelu(x, weight, data_format="NCHW", name=None):


def rrelu(x, lower=1. / 8., upper=1. / 3., training=True, name=None):
"""
r"""
rrelu activation.
Applies the randomized leaky rectified liner unit function to improve generalization performance,
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/nn/layer/activation.py
Expand Up @@ -437,7 +437,7 @@ def extra_repr(self):


class RReLU(Layer):
"""
r"""
RReLU activation layer.
Applies the randomized leaky rectified liner unit function to improve generalization performance,
Expand Down

0 comments on commit 8f626c2

Please sign in to comment.