diff --git a/classy_vision/optim/__init__.py b/classy_vision/optim/__init__.py index 8337b64ecb..d149b4350b 100644 --- a/classy_vision/optim/__init__.py +++ b/classy_vision/optim/__init__.py @@ -41,17 +41,6 @@ def build_optimizer(config): param_scheduler_config = copy.deepcopy(config.get("param_schedulers", {})) # build the param schedulers - if "lr" in config and isinstance(config["lr"], dict): - message = ( - 'Passing an lr schedule in the config using "lr" is deprecated and ' - "will be removed in version 0.2.0. See the docs for build_optimizer for " - "the recommended format." - ) - warnings.warn(message, DeprecationWarning, stacklevel=2) - assert ( - param_scheduler_config == {} - ), 'Cannot pass both "lr" and "param_schedulers" to the config' - param_scheduler_config = {"lr": config["lr"]} for cfg in param_scheduler_config.values(): cfg["num_epochs"] = config["num_epochs"] diff --git a/test/generic/optim_test_util.py b/test/generic/optim_test_util.py index 60ca92a6cd..f9c992057f 100644 --- a/test/generic/optim_test_util.py +++ b/test/generic/optim_test_util.py @@ -200,7 +200,9 @@ def _test_lr_schedule(optimizer, num_epochs, epochs, targets): _test_lr_schedule(opt, num_epochs, epochs, targets) # Test step learning schedule - config["lr"] = {"name": "step", "values": [0.1, 0.01, 0.001]} + config["param_schedulers"] = { + "lr": {"name": "step", "values": [0.1, 0.01, 0.001]} + } opt = build_optimizer(config) opt.init_pytorch_optimizer(mock_classy_vision_model) targets = [0.1] * 8 + [0.01] * 3 + [0.001] * 4 @@ -209,15 +211,17 @@ def _test_lr_schedule(optimizer, num_epochs, epochs, targets): # Test step learning schedule with warmup init_lr = 0.01 warmup_epochs = 0.1 - config["lr"] = { - "name": "composite", - "schedulers": [ - {"name": "linear", "start_value": init_lr, "end_value": 0.1}, - {"name": "step", "values": [0.1, 0.01, 0.001]}, - ], - "update_interval": "epoch", - "interval_scaling": ["rescaled", "fixed"], - "lengths": [warmup_epochs / num_epochs, 1 - warmup_epochs / num_epochs], + config["param_schedulers"] = { + "lr": { + "name": "composite", + "schedulers": [ + {"name": "linear", "start_value": init_lr, "end_value": 0.1}, + {"name": "step", "values": [0.1, 0.01, 0.001]}, + ], + "update_interval": "epoch", + "interval_scaling": ["rescaled", "fixed"], + "lengths": [warmup_epochs / num_epochs, 1 - warmup_epochs / num_epochs], + } } opt = build_optimizer(config)