Skip to content
This repository has been archived by the owner on Jul 1, 2024. It is now read-only.

Commit

Permalink
Remove deprecated code to handle lr schedulers (#419)
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: #419

Reviewed By: vreis

Differential Revision: D20257571

fbshipit-source-id: 36ba3d36ff26ddbf8ba4795b47dfc2301be08486
  • Loading branch information
mannatsingh authored and facebook-github-bot committed Mar 5, 2020
1 parent 8f6b512 commit d10fb97
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 21 deletions.
11 changes: 0 additions & 11 deletions classy_vision/optim/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,17 +41,6 @@ def build_optimizer(config):
param_scheduler_config = copy.deepcopy(config.get("param_schedulers", {}))

# build the param schedulers
if "lr" in config and isinstance(config["lr"], dict):
message = (
'Passing an lr schedule in the config using "lr" is deprecated and '
"will be removed in version 0.2.0. See the docs for build_optimizer for "
"the recommended format."
)
warnings.warn(message, DeprecationWarning, stacklevel=2)
assert (
param_scheduler_config == {}
), 'Cannot pass both "lr" and "param_schedulers" to the config'
param_scheduler_config = {"lr": config["lr"]}
for cfg in param_scheduler_config.values():
cfg["num_epochs"] = config["num_epochs"]

Expand Down
24 changes: 14 additions & 10 deletions test/generic/optim_test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,9 @@ def _test_lr_schedule(optimizer, num_epochs, epochs, targets):
_test_lr_schedule(opt, num_epochs, epochs, targets)

# Test step learning schedule
config["lr"] = {"name": "step", "values": [0.1, 0.01, 0.001]}
config["param_schedulers"] = {
"lr": {"name": "step", "values": [0.1, 0.01, 0.001]}
}
opt = build_optimizer(config)
opt.init_pytorch_optimizer(mock_classy_vision_model)
targets = [0.1] * 8 + [0.01] * 3 + [0.001] * 4
Expand All @@ -209,15 +211,17 @@ def _test_lr_schedule(optimizer, num_epochs, epochs, targets):
# Test step learning schedule with warmup
init_lr = 0.01
warmup_epochs = 0.1
config["lr"] = {
"name": "composite",
"schedulers": [
{"name": "linear", "start_value": init_lr, "end_value": 0.1},
{"name": "step", "values": [0.1, 0.01, 0.001]},
],
"update_interval": "epoch",
"interval_scaling": ["rescaled", "fixed"],
"lengths": [warmup_epochs / num_epochs, 1 - warmup_epochs / num_epochs],
config["param_schedulers"] = {
"lr": {
"name": "composite",
"schedulers": [
{"name": "linear", "start_value": init_lr, "end_value": 0.1},
{"name": "step", "values": [0.1, 0.01, 0.001]},
],
"update_interval": "epoch",
"interval_scaling": ["rescaled", "fixed"],
"lengths": [warmup_epochs / num_epochs, 1 - warmup_epochs / num_epochs],
}
}

opt = build_optimizer(config)
Expand Down

0 comments on commit d10fb97

Please sign in to comment.