Skip to content
This repository has been archived by the owner on Feb 3, 2021. It is now read-only.

Commit

Permalink
Feature: 0.9.0 deprecated code removal (#645)
Browse files Browse the repository at this point in the history
* remove deprecated code

* remove deprecated tests, yapf test directory

* add import

* remove unused test

* remove deprecated field name in tests

* update test parameter to non deprecated name
  • Loading branch information
jafreck committed Aug 16, 2018
1 parent 9d554c3 commit eef36dc
Show file tree
Hide file tree
Showing 18 changed files with 150 additions and 220 deletions.
35 changes: 1 addition & 34 deletions aztk/models/cluster_configuration.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import aztk.error as error
from aztk.core.models import Model, fields
from aztk.utils import deprecate, deprecated, helpers
from aztk.utils import helpers

from .custom_script import CustomScript
from .file_share import FileShare
Expand Down Expand Up @@ -41,37 +41,8 @@ class ClusterConfiguration(Model):
scheduling_target = fields.Enum(SchedulingTarget, default=None)

def __init__(self, *args, **kwargs):
if 'vm_count' in kwargs:
deprecate("0.9.0", "vm_count is deprecated for ClusterConfiguration.", "Please use size instead.")
kwargs['size'] = kwargs.pop('vm_count')

if 'vm_low_pri_count' in kwargs:
deprecate("vm_low_pri_count is deprecated for ClusterConfiguration.",
"Please use size_low_priority instead.")
kwargs['size_low_priority'] = kwargs.pop('vm_low_pri_count')

super().__init__(*args, **kwargs)

@property
@deprecated("0.9.0")
def vm_count(self):
return self.size

@vm_count.setter
@deprecated("0.9.0")
def vm_count(self, value):
self.size = value

@property
@deprecated("0.9.0")
def vm_low_pri_count(self):
return self.size_low_priority

@vm_low_pri_count.setter
@deprecated("0.9.0")
def vm_low_pri_count(self, value):
self.size_low_priority = value

def mixed_mode(self) -> bool:
"""
Return:
Expand Down Expand Up @@ -103,9 +74,5 @@ def __validate__(self) -> bool:
"You must configure a VNET to use AZTK in mixed mode (dedicated and low priority nodes). Set the VNET's subnet_id in your cluster.yaml or with a parameter (--subnet-id)."
)

if self.custom_scripts:
deprecate("0.9.0", "Custom scripts are DEPRECATED.",
"Use plugins instead. See https://aztk.readthedocs.io/en/v0.7.0/15-plugins.html.")

if self.scheduling_target == SchedulingTarget.Dedicated and self.size == 0:
raise error.InvalidModelError("Scheduling target cannot be Dedicated if dedicated vm size is 0")
4 changes: 1 addition & 3 deletions aztk/models/toolkit.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from aztk.core.models import Model, fields
from aztk.error import InvalidModelError
from aztk.utils import constants, deprecate
from aztk.utils import constants


class ToolkitDefinition:
Expand Down Expand Up @@ -60,8 +60,6 @@ def __validate__(self):
if self.version not in toolkit_def.versions:
raise InvalidModelError("Toolkit '{0}' with version '{1}' is not available. Use one of: {2}".format(
self.software, self.version, toolkit_def.versions))
if self.version == "1.6":
deprecate("0.9.0", "Spark version 1.6 is being deprecated for Aztk.", "Please use 2.1 and above.")

if self.environment:
if self.environment not in toolkit_def.environments:
Expand Down
2 changes: 1 addition & 1 deletion aztk/spark/helpers/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
# ALL FILES IN THIS DIRECTORY ARE DEPRECATED, WILL BE REMOTE IN v0.9.0
# ALL FILES IN THIS DIRECTORY ARE DEPRECATED, WILL BE REMOTE IN v0.10.0
14 changes: 3 additions & 11 deletions aztk_cli/config.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import os

import yaml

import aztk.spark
from aztk.spark.models import (
SecretsConfiguration,
ClusterConfiguration,
SchedulingTarget,
)
from aztk.utils import deprecate
from aztk.models import Toolkit
from aztk.models.plugins.internal import PluginReference
from aztk.spark.models import (ClusterConfiguration, SchedulingTarget, SecretsConfiguration)


def load_aztk_secrets() -> SecretsConfiguration:
Expand Down Expand Up @@ -46,11 +43,6 @@ def _load_config_file(path: str):


def _merge_secrets_dict(secrets: SecretsConfiguration, secrets_config):
if 'default' in secrets_config:
deprecate("0.9.0", "default key in secrets.yaml is deprecated.",
"Place all child parameters directly at the root")
secrets_config = dict(**secrets_config, **secrets_config.pop('default'))

other = SecretsConfiguration.from_dict(secrets_config)
secrets.merge(other)

Expand Down
4 changes: 0 additions & 4 deletions aztk_cli/spark/endpoints/cluster/cluster_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import aztk.spark
from aztk.spark.models import ClusterConfiguration, UserConfiguration
from aztk.utils import deprecate
from aztk_cli import config, log, utils
from aztk_cli.config import load_aztk_spark_config

Expand Down Expand Up @@ -39,9 +38,6 @@ def execute(args: typing.NamedTuple):
# read cluster.yaml configuration file, overwrite values with args
file_config, wait = config.read_cluster_config()
cluster_conf.merge(file_config)
if args.size_low_pri is not None:
deprecate("0.9.0", "--size-low-pri has been deprecated.", "Please use --size-low-priority.")
args.size_low_priority = args.size_low_pri

cluster_conf.merge(
ClusterConfiguration(
Expand Down
9 changes: 5 additions & 4 deletions tests/core/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ class SimpleStateModel(Model):

with pytest.raises(
InvalidModelFieldError,
match="SimpleStateModel state unknown is not a valid option. Use one of \\['creating', 'ready', 'deleting'\\]"):
match=
"SimpleStateModel state unknown is not a valid option. Use one of \\['creating', 'ready', 'deleting'\\]"):

obj = SimpleStateModel(state="unknown")
obj.validate()
Expand Down Expand Up @@ -234,15 +235,15 @@ class UserList(Model):
obj = UserList()
obj.validate()

assert isinstance(obj.infos, (list, ))
assert isinstance(obj.infos, (list,))
assert len(obj.infos) == 0

infos = obj.infos
infos.append(UserInfo())
assert len(obj.infos) == 1

obj2 = UserList(infos=None)
assert isinstance(obj2.infos, (list, ))
assert isinstance(obj2.infos, (list,))
assert len(obj2.infos) == 0


Expand All @@ -253,7 +254,7 @@ class UserList(Model):
obj = UserList(infos=[None, None])
obj.validate()

assert isinstance(obj.infos, (list, ))
assert isinstance(obj.infos, (list,))
assert len(obj.infos) == 0


Expand Down
34 changes: 17 additions & 17 deletions tests/integration_tests/spark/sdk/cluster/test_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from aztk.error import AztkError
from aztk.utils import constants
from aztk_cli import config
from tests.integration_tests.spark.sdk.get_client import get_spark_client, get_test_suffix
from tests.integration_tests.spark.sdk.get_client import (get_spark_client, get_test_suffix)

base_cluster_id = get_test_suffix("cluster")
spark_client = get_spark_client()
Expand Down Expand Up @@ -70,8 +70,8 @@ def test_create_cluster():
# TODO: make Cluster Configuration more robust, test each value
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -101,8 +101,8 @@ def test_list_clusters():
test_id = "test-list-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand All @@ -126,8 +126,8 @@ def test_get_remote_login_settings():
test_id = "test-get-remote-login-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand All @@ -154,8 +154,8 @@ def test_submit():
test_id = "test-submit-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -195,8 +195,8 @@ def test_get_application_log():
test_id = "test-get-app-log-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -256,8 +256,8 @@ def test_get_application_status_complete():
test_id = "test-app-status-complete-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -300,8 +300,8 @@ def test_delete_cluster():
test_id = "test-delete-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand All @@ -326,8 +326,8 @@ def test_spark_processes_up():
test_id = "test-spark-processes-up-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
from aztk.error import AztkError
from aztk.utils import constants
from aztk_cli import config
from tests.integration_tests.spark.sdk.get_client import get_spark_client, get_test_suffix

from tests.integration_tests.spark.sdk.get_client import (get_spark_client, get_test_suffix)

base_cluster_id = get_test_suffix("cluster")
spark_client = get_spark_client()
Expand Down Expand Up @@ -71,8 +70,8 @@ def test_create_cluster():
# TODO: make Cluster Configuration more robust, test each value
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -103,8 +102,8 @@ def test_get_cluster():
test_id = "test-get-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -137,8 +136,8 @@ def test_list_clusters():
test_id = "test-list-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand All @@ -164,8 +163,8 @@ def test_get_remote_login_settings():
test_id = "test-get-remote-login-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -195,8 +194,8 @@ def test_submit():
test_id = "test-submit-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -225,7 +224,7 @@ def test_submit():
with pytest.warns(DeprecationWarning):
spark_client.submit(
cluster_id=cluster_configuration.cluster_id, application=application_configuration, wait=True)

assert True

except (AztkError, BatchErrorException):
Expand All @@ -239,8 +238,8 @@ def test_get_application_log():
test_id = "test-get-app-log-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -303,8 +302,8 @@ def test_get_application_status_complete():
test_id = "test-app-status-complete-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down Expand Up @@ -349,8 +348,8 @@ def test_delete_cluster():
test_id = "test-delete-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand All @@ -376,8 +375,8 @@ def test_spark_processes_up():
test_id = "test-spark-processes-up-"
cluster_configuration = aztk.spark.models.ClusterConfiguration(
cluster_id=test_id + base_cluster_id,
vm_count=2,
vm_low_pri_count=0,
size=2,
size_low_priority=0,
vm_size="standard_f2",
subnet_id=None,
custom_scripts=None,
Expand Down
Loading

0 comments on commit eef36dc

Please sign in to comment.