Skip to content
This repository has been archived by the owner on Feb 3, 2021. It is now read-only.

Allow submitting jobs into a VNET #365

Merged
merged 3 commits into from
Feb 5, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions aztk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,12 @@ def __submit_job(self,
helpers.select_latest_verified_vm_image_with_node_agent_sku(
vm_image_model.publisher, vm_image_model.offer, vm_image_model.sku, self.batch_client)

# set up subnet if necessary
network_conf = None
if job_configuration.subnet_id:
network_conf = batch_models.NetworkConfiguration(
subnet_id=job_configuration.subnet_id)

# set up a schedule for a recurring job
auto_pool_specification = batch_models.AutoPoolSpecification(
pool_lifetime_option=batch_models.PoolLifetimeOption.job_schedule,
Expand All @@ -195,6 +201,7 @@ def __submit_job(self,
auto_scale_evaluation_interval=timedelta(minutes=5),
start_task=start_task,
enable_inter_node_communication=True,
network_configuration=network_conf,
max_tasks_per_node=1,
metadata=[
batch_models.MetadataItem(
Expand Down
4 changes: 3 additions & 1 deletion aztk/spark/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,8 @@ def __init__(
spark_configuration=None,
docker_repo=None,
max_dedicated_nodes=None,
max_low_pri_nodes=None):
max_low_pri_nodes=None,
subnet_id=None):
self.id = id
self.applications = applications
self.custom_scripts = custom_scripts
Expand All @@ -188,6 +189,7 @@ def __init__(
self.docker_repo = docker_repo
self.max_dedicated_nodes = max_dedicated_nodes
self.max_low_pri_nodes = max_low_pri_nodes
self.subnet_id = subnet_id


class JobState():
Expand Down
2 changes: 2 additions & 0 deletions cli/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ def __init__(self):
self.spark_defaults_conf = None
self.spark_env_sh = None
self.core_site_xml = None
self.subnet_id = None

def _merge_dict(self, config):
config = config.get('job')
Expand All @@ -347,6 +348,7 @@ def _merge_dict(self, config):
self.max_dedicated_nodes = cluster_configuration.get('size')
self.max_low_pri_nodes = cluster_configuration.get('size_low_pri')
self.custom_scripts = cluster_configuration.get('custom_scripts')
self.subnet_id = cluster_configuration.get('subnet_id')

self.applications = config.get('applications')

Expand Down
3 changes: 2 additions & 1 deletion cli/spark/endpoints/job/submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ def execute(args: typing.NamedTuple):
vm_size=job_conf.vm_size,
docker_repo=job_conf.docker_repo,
max_dedicated_nodes=job_conf.max_dedicated_nodes,
max_low_pri_nodes=job_conf.max_low_pri_nodes
max_low_pri_nodes=job_conf.max_low_pri_nodes,
subnet_id=job_conf.subnet_id
)

#TODO: utils.print_job_conf(job_configuration)
Expand Down
1 change: 1 addition & 0 deletions config/job.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ job:
vm_size: standard_d2_v2
size: 2
size_low_pri: 0
subnet_id:
docker_repo: # defaults to aztk/base:spark2.2.0
# custom_scripts:
# - script: </path/to/script.sh or /path/to/script/directory/>
Expand Down
Loading