Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial extension for storage preview #138

Merged
merged 6 commits into from
Apr 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,6 @@

/src/managementpartner/ @jeffrey-ace

/src/dns/ @muwaqar
/src/dns/ @muwaqar

/src/storage-preview/ @williexu
2 changes: 1 addition & 1 deletion pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ disable=missing-docstring,locally-disabled,fixme,cyclic-import,too-many-argument

[TYPECHECK]
# For Azure CLI extensions, we ignore some import errors as they'll be available in the environment of the CLI
ignored-modules=azure,azure.cli,azure.cli.core,azure.cli.core.commands,knack
ignored-modules=azure,azure.cli,azure.cli.core,azure.cli.core.commands,knack,msrestazure,argcomplete

[FORMAT]
max-line-length=120
Expand Down
3 changes: 2 additions & 1 deletion scripts/ci/test_static.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ proc_number=`python -c 'import multiprocessing; print(multiprocessing.cpu_count(

# Run pylint/flake8 on extensions
# - We ignore 'models', 'operations' and files with suffix '_client.py' as they typically come from vendored Azure SDKs
pylint ./src/*/azext_*/ --ignore=models,operations,service_bus_management_client,subscription_client,managementgroups,managementpartner --ignore-patterns=[a-zA-Z_]+_client.py --rcfile=./pylintrc -j $proc_number
pylint ./src/*/azext_*/ --ignore=models,operations,service_bus_management_client,subscription_client,managementgroups,\
managementpartner --ignore-patterns=test_*,[a-zA-Z_]+_client.py --rcfile=./pylintrc -j $proc_number
flake8 --statistics --exclude=models,operations,*_client.py,managementgroups,managementpartner --append-config=./.flake8 ./src/*/azext_*/

# Run pylint/flake8 on CI files
Expand Down
187 changes: 187 additions & 0 deletions src/storage-preview/azext_storage_preview/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------

from azure.cli.core import AzCommandsLoader
from azure.cli.core.profiles import ResourceType
from azure.cli.core.commands import AzCommandGroup, AzArgumentContext

import azext_storage_preview._help # pylint: disable=unused-import


class StorageCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType

storage_custom = CliCommandType(operations_tmpl='azext_storage_preview.custom#{}')
super(StorageCommandsLoader, self).__init__(cli_ctx=cli_ctx,
resource_type=ResourceType.DATA_STORAGE,
custom_command_type=storage_custom,
command_group_cls=StorageCommandGroup,
argument_context_cls=StorageArgumentContext)

def load_command_table(self, args):
super(StorageCommandsLoader, self).load_command_table(args)
from .commands import load_command_table
load_command_table(self, args)
return self.command_table

def load_arguments(self, command):
super(StorageCommandsLoader, self).load_arguments(command)
from ._params import load_arguments
load_arguments(self, command)


class StorageArgumentContext(AzArgumentContext):
def register_sas_arguments(self):
from ._validators import ipv4_range_type, get_datetime_type
self.argument('ip', type=ipv4_range_type,
help='Specifies the IP address or range of IP addresses from which to accept requests. Supports '
'only IPv4 style addresses.')
self.argument('expiry', type=get_datetime_type(True),
help='Specifies the UTC datetime (Y-m-d\'T\'H:M\'Z\') at which the SAS becomes invalid. Do not '
'use if a stored access policy is referenced with --id that specifies this value.')
self.argument('start', type=get_datetime_type(True),
help='Specifies the UTC datetime (Y-m-d\'T\'H:M\'Z\') at which the SAS becomes valid. Do not use '
'if a stored access policy is referenced with --id that specifies this value. Defaults to '
'the time of the request.')
self.argument('protocol', options_list=('--https-only',), action='store_const', const='https',
help='Only permit requests made with the HTTPS protocol. If omitted, requests from both the HTTP '
'and HTTPS protocol are permitted.')

def register_content_settings_argument(self, settings_class, update, arg_group=None, guess_from_file=None):
from ._validators import get_content_setting_validator

self.ignore('content_settings')
self.extra('content_type', default=None, help='The content MIME type.', arg_group=arg_group,
validator=get_content_setting_validator(settings_class, update, guess_from_file=guess_from_file))
self.extra('content_encoding', default=None, help='The content encoding type.', arg_group=arg_group)
self.extra('content_language', default=None, help='The content language.', arg_group=arg_group)
self.extra('content_disposition', default=None, arg_group=arg_group,
help='Conveys additional information about how to process the response payload, and can also be '
'used to attach additional metadata.')
self.extra('content_cache_control', default=None, help='The cache control string.', arg_group=arg_group)
self.extra('content_md5', default=None, help='The content\'s MD5 hash.', arg_group=arg_group)

def register_path_argument(self, default_file_param=None, options_list=None):
from ._validators import get_file_path_validator
from .completers import file_path_completer

path_help = 'The path to the file within the file share.'
if default_file_param:
path_help = '{} If the file name is omitted, the source file name will be used.'.format(path_help)
self.extra('path', options_list=options_list or ('--path', '-p'),
required=default_file_param is None, help=path_help,
validator=get_file_path_validator(default_file_param=default_file_param),
completer=file_path_completer)
self.ignore('file_name')
self.ignore('directory_name')

def register_source_uri_arguments(self, validator, blob_only=False):
self.argument('copy_source', options_list=('--source-uri', '-u'), validator=validator, required=False,
arg_group='Copy Source')
self.extra('source_sas', default=None, arg_group='Copy Source',
help='The shared access signature for the source storage account.')
self.extra('source_container', default=None, arg_group='Copy Source',
help='The container name for the source storage account.')
self.extra('source_blob', default=None, arg_group='Copy Source',
help='The blob name for the source storage account.')
self.extra('source_snapshot', default=None, arg_group='Copy Source',
help='The blob snapshot for the source storage account.')
self.extra('source_account_name', default=None, arg_group='Copy Source',
help='The storage account name of the source blob.')
self.extra('source_account_key', default=None, arg_group='Copy Source',
help='The storage account key of the source blob.')
if not blob_only:
self.extra('source_path', default=None, arg_group='Copy Source',
help='The file path for the source storage account.')
self.extra('source_share', default=None, arg_group='Copy Source',
help='The share name for the source storage account.')

def register_common_storage_account_options(self):
from azure.cli.core.commands.parameters import get_three_state_flag, get_enum_type
from ._validators import validate_encryption_services

t_access_tier, t_sku_name, t_encryption_services = self.command_loader.get_models(
'AccessTier', 'SkuName', 'EncryptionServices', resource_type=ResourceType.MGMT_STORAGE)

self.argument('https_only', help='Allows https traffic only to storage service.',
arg_type=get_three_state_flag())
self.argument('sku', help='The storage account SKU.', arg_type=get_enum_type(t_sku_name))
self.argument('assign_identity', action='store_true', resource_type=ResourceType.MGMT_STORAGE,
min_api='2017-06-01',
help='Generate and assign a new Storage Account Identity for this storage account for use '
'with key management services like Azure KeyVault.')
self.argument('access_tier', arg_type=get_enum_type(t_access_tier),
help='The access tier used for billing StandardBlob accounts. Cannot be set for StandardLRS, '
'StandardGRS, StandardRAGRS, or PremiumLRS account types. It is required for '
'StandardBlob accounts during creation')

if t_encryption_services:
encryption_choices = list(
t_encryption_services._attribute_map.keys()) # pylint: disable=protected-access
self.argument('encryption_services', arg_type=get_enum_type(encryption_choices),
resource_type=ResourceType.MGMT_STORAGE, min_api='2016-12-01', nargs='+',
validator=validate_encryption_services, help='Specifies which service(s) to encrypt.')


class StorageCommandGroup(AzCommandGroup):
def storage_command(self, name, method_name=None, command_type=None, **kwargs):
""" Registers an Azure CLI Storage Data Plane command. These commands always include the four parameters which
can be used to obtain a storage client: account-name, account-key, connection-string, and sas-token. """
if command_type:
command_name = self.command(name, method_name, command_type=command_type, **kwargs)
else:
command_name = self.command(name, method_name, **kwargs)
self._register_data_plane_account_arguments(command_name)

def storage_custom_command(self, name, method_name, **kwargs):
command_name = self.custom_command(name, method_name, **kwargs)
self._register_data_plane_account_arguments(command_name)

def get_handler_suppress_404(self):

# pylint: disable=inconsistent-return-statements
def handler(ex):
from azure.cli.core.profiles import get_sdk

t_error = get_sdk(self.command_loader.cli_ctx,
ResourceType.DATA_STORAGE,
'common._error#AzureMissingResourceHttpError')
if isinstance(ex, t_error):
return None
raise ex

return handler

def _register_data_plane_account_arguments(self, command_name):
""" Add parameters required to create a storage client """
from ._validators import validate_client_parameters
command = self.command_loader.command_table.get(command_name, None)
if not command:
return

group_name = 'Storage Account'
command.add_argument('account_name', '--account-name', required=False, default=None,
arg_group=group_name,
help='Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be '
'used in conjunction with either storage account key or a SAS token. If neither are '
'present, the command will try to query the storage account key using the '
'authenticated Azure account. If a large number of storage commands are executed the '
'API quota may be hit')
command.add_argument('account_key', '--account-key', required=False, default=None,
arg_group=group_name,
help='Storage account key. Must be used in conjunction with storage account name. '
'Environment variable: AZURE_STORAGE_KEY')
command.add_argument('connection_string', '--connection-string', required=False, default=None,
validator=validate_client_parameters, arg_group=group_name,
help='Storage account connection string. Environment variable: '
'AZURE_STORAGE_CONNECTION_STRING')
command.add_argument('sas_token', '--sas-token', required=False, default=None,
arg_group=group_name,
help='A Shared Access Signature (SAS). Must be used in conjunction with storage account '
'name. Environment variable: AZURE_STORAGE_SAS_TOKEN')


COMMAND_LOADER_CLS = StorageCommandsLoader
131 changes: 131 additions & 0 deletions src/storage-preview/azext_storage_preview/_client_factory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------

from azure.cli.core.commands.client_factory import get_mgmt_service_client, get_data_service_client
from azure.cli.core.profiles import ResourceType, get_sdk

from .sdkutil import get_table_data_type

NO_CREDENTIALS_ERROR_MESSAGE = """
No credentials specified to access storage service. Please provide any of the following:
(1) account name and key (--account-name and --account-key options or
set AZURE_STORAGE_ACCOUNT and AZURE_STORAGE_KEY environment variables)
(2) connection string (--connection-string option or
set AZURE_STORAGE_CONNECTION_STRING environment variable)
(3) account name and SAS token (--sas-token option used with either the --account-name
option or AZURE_STORAGE_ACCOUNT environment variable)
"""


def get_storage_data_service_client(cli_ctx, service, name=None, key=None, connection_string=None, sas_token=None,
socket_timeout=None):
return get_data_service_client(cli_ctx, service, name, key, connection_string, sas_token,
socket_timeout=socket_timeout,
endpoint_suffix=cli_ctx.cloud.suffixes.storage_endpoint)


def generic_data_service_factory(cli_ctx, service, name=None, key=None, connection_string=None, sas_token=None,
socket_timeout=None):
try:
return get_storage_data_service_client(cli_ctx, service, name, key, connection_string, sas_token,
socket_timeout)
except ValueError as val_exception:
_ERROR_STORAGE_MISSING_INFO = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
'common._error#_ERROR_STORAGE_MISSING_INFO')
message = str(val_exception)
if message == _ERROR_STORAGE_MISSING_INFO:
message = NO_CREDENTIALS_ERROR_MESSAGE
from knack.util import CLIError
raise CLIError(message)


def storage_client_factory(cli_ctx, **_):
return get_mgmt_service_client(cli_ctx, ResourceType.MGMT_STORAGE)


def file_data_service_factory(cli_ctx, kwargs):
t_file_svc = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'file#FileService')
return generic_data_service_factory(cli_ctx, t_file_svc, kwargs.pop('account_name', None),
kwargs.pop('account_key', None),
connection_string=kwargs.pop('connection_string', None),
sas_token=kwargs.pop('sas_token', None))


def page_blob_service_factory(cli_ctx, kwargs):
t_page_blob_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'blob.pageblobservice#PageBlobService')
return generic_data_service_factory(cli_ctx, t_page_blob_service, kwargs.pop('account_name', None),
kwargs.pop('account_key', None),
connection_string=kwargs.pop('connection_string', None),
sas_token=kwargs.pop('sas_token', None))


def blob_data_service_factory(cli_ctx, kwargs):
from .sdkutil import get_blob_service_by_type
blob_type = kwargs.get('blob_type')
blob_service = get_blob_service_by_type(cli_ctx, blob_type) or get_blob_service_by_type(cli_ctx, 'block')

return generic_data_service_factory(cli_ctx, blob_service, kwargs.pop('account_name', None),
kwargs.pop('account_key', None),
connection_string=kwargs.pop('connection_string', None),
sas_token=kwargs.pop('sas_token', None),
socket_timeout=kwargs.pop('socket_timeout', None))


def table_data_service_factory(cli_ctx, kwargs):
return generic_data_service_factory(cli_ctx,
get_table_data_type(cli_ctx, 'table', 'TableService'),
kwargs.pop('account_name', None),
kwargs.pop('account_key', None),
connection_string=kwargs.pop('connection_string', None),
sas_token=kwargs.pop('sas_token', None))


def queue_data_service_factory(cli_ctx, kwargs):
t_queue_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'queue#QueueService')
return generic_data_service_factory(
cli_ctx, t_queue_service,
kwargs.pop('account_name', None),
kwargs.pop('account_key', None),
connection_string=kwargs.pop('connection_string', None),
sas_token=kwargs.pop('sas_token', None))


def cloud_storage_account_service_factory(cli_ctx, kwargs):
t_cloud_storage_account = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'common#CloudStorageAccount')
account_name = kwargs.pop('account_name', None)
account_key = kwargs.pop('account_key', None)
sas_token = kwargs.pop('sas_token', None)
kwargs.pop('connection_string', None)
return t_cloud_storage_account(account_name, account_key, sas_token)


def multi_service_properties_factory(cli_ctx, kwargs):
"""Create multiple data services properties instance based on the services option"""
from .services_wrapper import ServiceProperties

t_base_blob_service, t_file_service, t_queue_service, = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
'blob.baseblobservice#BaseBlobService',
'file#FileService', 'queue#QueueService')

t_table_service = get_table_data_type(cli_ctx, 'table', 'TableService')

account_name = kwargs.pop('account_name', None)
account_key = kwargs.pop('account_key', None)
connection_string = kwargs.pop('connection_string', None)
sas_token = kwargs.pop('sas_token', None)
services = kwargs.pop('services', [])

def get_creator(name, service_type):
return lambda: ServiceProperties(cli_ctx, name, service_type, account_name, account_key, connection_string,
sas_token)

creators = {'b': get_creator('blob', t_base_blob_service), 'f': get_creator('file', t_file_service),
'q': get_creator('queue', t_queue_service), 't': get_creator('table', t_table_service)}

return [creators[s]() for s in services]


def cf_sa(cli_ctx, _):
return storage_client_factory(cli_ctx).storage_accounts
Loading