Skip to content

Commit

Permalink
change end_lr to end_learning_rate
Browse files Browse the repository at this point in the history
  • Loading branch information
Asthestarsfalll committed May 13, 2022
1 parent 70f97a8 commit ebb04e2
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 18 deletions.
18 changes: 9 additions & 9 deletions python/paddle/fluid/tests/unittests/test_lr_scheduler.py
Expand Up @@ -325,7 +325,7 @@ def one_cycle_lr(epoch_num,
max_learning_rate,
total_steps,
divide_factor=25,
end_lr=0.0001,
end_learning_rate=0.0001,
phase_pct=0.3,
anneal_strategy='cos',
three_phase=False,
Expand All @@ -347,7 +347,7 @@ def one_cycle_lr(epoch_num,
},
{
'start_lr': initial_lr,
'end_lr': end_lr,
'end_lr': end_learning_rate,
},
]
else:
Expand All @@ -359,7 +359,7 @@ def one_cycle_lr(epoch_num,
},
{
'start_lr': max_learning_rate,
'end_lr': end_lr,
'end_lr': end_learning_rate,
},
]

Expand Down Expand Up @@ -539,10 +539,10 @@ def test_scheduler(self):
max_learning_rate=-1.5, total_steps=20)
with self.assertRaises(TypeError):
paddle.optimizer.lr.OneCycleLR(
max_learning_rate=0.1, total_steps=20, end_lr='test')
max_learning_rate=0.1, total_steps=20, end_learning_rate='test')
with self.assertRaises(ValueError):
paddle.optimizer.lr.OneCycleLR(
max_learning_rate=0.1, total_steps=20, end_lr=-1)
max_learning_rate=0.1, total_steps=20, end_learning_rate=-1)
with self.assertRaises(TypeError):
paddle.optimizer.lr.OneCycleLR(
max_learning_rate=0.1, total_steps='test')
Expand Down Expand Up @@ -622,31 +622,31 @@ def test_scheduler(self):
"max_learning_rate": 0.1,
"total_steps": 20,
"divide_factor": 5,
"end_lr": 0.0001,
"end_learning_rate": 0.0001,
"anneal_strategy": 'cos',
"phase_pct": 0.3,
"three_phase": False,
}), (one_cycle_lr, paddle.optimizer.lr.OneCycleLR, {
"max_learning_rate": 0.5,
"total_steps": 20,
"divide_factor": 10,
"end_lr": 0.001,
"end_learning_rate": 0.001,
"anneal_strategy": 'linear',
"phase_pct": 0.4,
"three_phase": False,
}), (one_cycle_lr, paddle.optimizer.lr.OneCycleLR, {
"max_learning_rate": 1.0,
"total_steps": 20,
"divide_factor": 9,
"end_lr": 0.0001,
"end_learning_rate": 0.0001,
"anneal_strategy": 'cos',
"phase_pct": 0.3,
"three_phase": True,
}), (one_cycle_lr, paddle.optimizer.lr.OneCycleLR, {
"max_learning_rate": 0.3,
"total_steps": 20,
"divide_factor": 25,
"end_lr": 0.0005,
"end_learning_rate": 0.0005,
"anneal_strategy": 'linear',
"phase_pct": 0.2,
"three_phase": True,
Expand Down
18 changes: 9 additions & 9 deletions python/paddle/optimizer/lr.py
Expand Up @@ -1613,7 +1613,7 @@ class OneCycleLR(LRScheduler):
Functionally, it defines the initial learning rate by ``divide_factor`` .
total_steps (int): Number of total training steps.
divide_factor (float): Initial learning rate will be determined by initial_learning_rate = max_learning_rate / divide_factor. Default: 25.
end_lr (float, optional): The minimum learning rate during training, it should be much less than initial learning rate.
end_learning_rate (float, optional): The minimum learning rate during training, it should be much less than initial learning rate.
phase_pct (float): The percentage of total steps which used to increasing learning rate. Default: 0.3.
anneal_strategy (str, optional): Strategy of adjusting learning rate.'cos' for cosine annealing,
'linear' for linear annealing. Default: 'cos'.
Expand Down Expand Up @@ -1682,7 +1682,7 @@ def __init__(self,
max_learning_rate,
total_steps,
divide_factor=25.,
end_lr=0.0001,
end_learning_rate=0.0001,
phase_pct=0.3,
anneal_strategy='cos',
three_phase=False,
Expand All @@ -1696,13 +1696,13 @@ def __init__(self,
if max_learning_rate < 0:
raise ValueError("'max_learning_rate' must be a positive integer.")

# Check type and value of end_lr
if not isinstance(end_lr, (float, int)):
# Check type and value of end_learning_rate
if not isinstance(end_learning_rate, (float, int)):
raise TypeError(
"'end_lr' must be 'float' or 'int', but received {}".format(
type(total_steps)))
if end_lr < 0:
raise ValueError("'end_lr' must be a positive integer.")
"'end_learning_rate' must be 'float' or 'int', but received {}".
format(type(total_steps)))
if end_learning_rate < 0:
raise ValueError("'end_learning_rate' must be a positive integer.")

# Check type and value of total_steps
if not isinstance(total_steps, int):
Expand All @@ -1728,7 +1728,7 @@ def __init__(self,
format(type(divide_factor)))

initial_lr = max_learning_rate / float(divide_factor)
min_lr = float(end_lr)
min_lr = float(end_learning_rate)

if three_phase:
if phase_pct >= 0.5:
Expand Down

0 comments on commit ebb04e2

Please sign in to comment.