diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index a37ed6f91edf2..7af7ae8ddd7ab 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -549,7 +549,7 @@ def prelu(x, weight, data_format="NCHW", name=None): def rrelu(x, lower=1. / 8., upper=1. / 3., training=True, name=None): - """ + r""" rrelu activation. Applies the randomized leaky rectified liner unit function to improve generalization performance, diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index 32c77104f53fb..1a3768e919042 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -437,7 +437,7 @@ def extra_repr(self): class RReLU(Layer): - """ + r""" RReLU activation layer. Applies the randomized leaky rectified liner unit function to improve generalization performance,