From 8f626c2713df285697dc2608f29e456bbfc8bb4b Mon Sep 17 00:00:00 2001 From: thunder95 <290844930@qq.com> Date: Mon, 23 May 2022 03:28:21 +0000 Subject: [PATCH] add r,test=document_fix --- python/paddle/nn/functional/activation.py | 2 +- python/paddle/nn/layer/activation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index a37ed6f91edf2..7af7ae8ddd7ab 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -549,7 +549,7 @@ def prelu(x, weight, data_format="NCHW", name=None): def rrelu(x, lower=1. / 8., upper=1. / 3., training=True, name=None): - """ + r""" rrelu activation. Applies the randomized leaky rectified liner unit function to improve generalization performance, diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index 32c77104f53fb..1a3768e919042 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -437,7 +437,7 @@ def extra_repr(self): class RReLU(Layer): - """ + r""" RReLU activation layer. Applies the randomized leaky rectified liner unit function to improve generalization performance,