Skip to content

Commit

Permalink
disable some by design bandit warnings (#11495)
Browse files Browse the repository at this point in the history
* disable some by design bandit warnings

* Packaging update of azure-mgmt-datalake-analytics

Co-authored-by: Azure SDK Bot <aspysdk2@microsoft.com>
  • Loading branch information
xiangyan99 and AutorestCI authored Jun 4, 2020
1 parent 3761730 commit 12e2e34
Show file tree
Hide file tree
Showing 21 changed files with 67 additions and 73 deletions.
16 changes: 8 additions & 8 deletions sdk/cosmos/azure-cosmos/test/test_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def test_sql_query_crud(self):
self.assertEqual(0, len(databases), 'Unexpected number of query results.')

# query with a string.
databases = list(self.client.query_databases('SELECT * FROM root r WHERE r.id="' + db2.id + '"'))
databases = list(self.client.query_databases('SELECT * FROM root r WHERE r.id="' + db2.id + '"')) #nosec
self.assertEqual(1, len(databases), 'Unexpected number of query results.')
self.client.delete_database(db1.id)
self.client.delete_database(db2.id)
Expand Down Expand Up @@ -507,30 +507,30 @@ def test_partitioned_collection_document_crud_and_query(self):
# query document on the partition key specified in the predicate will pass even without setting enableCrossPartitionQuery or passing in the partitionKey value
documentlist = list(created_collection.query_items(
{
'query': 'SELECT * FROM root r WHERE r.id=\'' + replaced_document.get('id') + '\''
'query': 'SELECT * FROM root r WHERE r.id=\'' + replaced_document.get('id') + '\'' #nosec
}))
self.assertEqual(1, len(documentlist))

# query document on any property other than partitionKey will fail without setting enableCrossPartitionQuery or passing in the partitionKey value
try:
list(created_collection.query_items(
{
'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\''
'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'' #nosec
}))
except Exception:
pass

# cross partition query
documentlist = list(created_collection.query_items(
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'',
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'', #nosec
enable_cross_partition_query=True
))

self.assertEqual(1, len(documentlist))

# query document by providing the partitionKey value
documentlist = list(created_collection.query_items(
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'',
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'', #nosec
partition_key=replaced_document.get('id')
))

Expand Down Expand Up @@ -746,14 +746,14 @@ def test_partitioned_collection_conflict_crud_and_query(self):
# query conflicts on any property other than partitionKey will fail without setting enableCrossPartitionQuery or passing in the partitionKey value
try:
list(created_collection.query_conflicts(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get( #nosec
'resourceType') + '\''
))
except Exception:
pass

conflictlist = list(created_collection.query_conflicts(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'',
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'', #nosec
enable_cross_partition_query=True
))

Expand All @@ -762,7 +762,7 @@ def test_partitioned_collection_conflict_crud_and_query(self):
# query conflicts by providing the partitionKey value
options = {'partitionKey': conflict_definition.get('id')}
conflictlist = list(created_collection.query_conflicts(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'',
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'', #nosec
partition_key=conflict_definition['id']
))

Expand Down
4 changes: 2 additions & 2 deletions sdk/cosmos/azure-cosmos/test/test_globaldb.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,15 +89,15 @@ def setUp(self):
self.client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey)

# Create the test database only when it's not already present
query_iterable = self.client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_database_id + '\'')
query_iterable = self.client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_database_id + '\'') #nosec
it = iter(query_iterable)

self.test_db = next(it, None)
if self.test_db is None:
self.test_db = self.client.CreateDatabase({'id' : Test_globaldb_tests.test_database_id})

# Create the test collection only when it's not already present
query_iterable = self.client.QueryContainers(self.test_db['_self'], 'SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_collection_id + '\'')
query_iterable = self.client.QueryContainers(self.test_db['_self'], 'SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_collection_id + '\'') #nosec
it = iter(query_iterable)

self.test_coll = next(it, None)
Expand Down
2 changes: 1 addition & 1 deletion sdk/cosmos/azure-cosmos/test/test_multi_orderby.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def test_multi_orderby_queries(self):
where_string = "WHERE root." + self.NUMBER_FIELD + " % 2 = 0" if has_filter else ""
query = "SELECT " + top_string + " [" + select_item_builder + "] " + \
"FROM root " + where_string + " " + \
"ORDER BY " + orderby_item_builder
"ORDER BY " + orderby_item_builder #nosec

expected_ordered_list = self.top(self.sort(self.filter(self.items, has_filter), composite_index, invert), has_top, top_count)

Expand Down
8 changes: 4 additions & 4 deletions sdk/cosmos/azure-cosmos/test/test_orderby.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def test_orderby_top_query(self):

# an order by query with top, total existing docs more than requested top count
query = {
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count #nosec
}

def get_order_by_key(r):
Expand All @@ -186,7 +186,7 @@ def test_orderby_top_query_less_results_than_top_counts(self):

# an order by query with top, total existing docs less than requested top count
query = {
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count #nosec
}

def get_order_by_key(r):
Expand Down Expand Up @@ -226,7 +226,7 @@ def test_top_query(self):

# a top query, the results will be sorted based on the target partition key range
query = {
'query': 'SELECT top %d * FROM root r' % len(expected_ordered_ids)
'query': 'SELECT top %d * FROM root r' % len(expected_ordered_ids) #nosec
}
self.execute_query_and_validate_results(query, expected_ordered_ids)

Expand Down Expand Up @@ -260,7 +260,7 @@ def test_top_query_as_string(self):
expected_ordered_ids = [d['id'] for d in first_two_ranges_results]

# a top query, the results will be sorted based on the target partition key range
query = 'SELECT top %d * FROM root r' % len(expected_ordered_ids)
query = 'SELECT top %d * FROM root r' % len(expected_ordered_ids) #nosec
self.execute_query_and_validate_results(query, expected_ordered_ids)

def test_parametrized_top_query(self):
Expand Down
18 changes: 9 additions & 9 deletions sdk/cosmos/azure-cosmos/test/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,55 +349,55 @@ def test_distinct(self):
padded_docs = self._pad_with_none(documents, distinct_field)

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field),
query='SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True),
is_select=False,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field),
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, pk_field, distinct_field), distinct_field, pk_field, True),
is_select=False,
fields=[distinct_field, pk_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field),
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, pk_field), distinct_field, pk_field, True),
is_select=False,
fields=[distinct_field, pk_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field),
query='SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True),
is_select=False,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c' % (distinct_field),
query='SELECT distinct c.%s from c' % (distinct_field), #nosec
results=self._get_distinct_docs(padded_docs, distinct_field, None, False),
is_select=True,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field),
query='SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field), #nosec
results=self._get_distinct_docs(padded_docs, distinct_field, pk_field, False),
is_select=True,
fields=[distinct_field, pk_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct value c.%s from c' % (distinct_field),
query='SELECT distinct value c.%s from c' % (distinct_field), #nosec
results=self._get_distinct_docs(padded_docs, distinct_field, None, True),
is_select=True,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field),
query='SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field), #nosec
results=[],
is_select=True,
fields=[different_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c' % (different_field),
query='SELECT distinct c.%s from c' % (different_field), #nosec
results=['None'],
is_select=True,
fields=[different_field])
Expand Down
33 changes: 12 additions & 21 deletions sdk/datalake/azure-mgmt-datalake-analytics/README.md
Original file line number Diff line number Diff line change
@@ -1,30 +1,21 @@
## Microsoft Azure SDK for Python
# Microsoft Azure SDK for Python

This is the Microsoft Azure Data Lake Analytics Management Client
Library.
This is the Microsoft Azure Data Lake Analytics Management Client Library.
This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8.
For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/)

Azure Resource Manager (ARM) is the next generation of management APIs
that replace the old Azure Service Management (ASM).

This package has been tested with Python 2.7, 3.5, 3.6 and 3.7.
# Usage

For the older Azure Service Management (ASM) libraries, see
[azure-servicemanagement-legacy](https://pypi.python.org/pypi/azure-servicemanagement-legacy)
library.

For a more complete set of Azure libraries, see the
[azure](https://pypi.python.org/pypi/azure) bundle package.

## Usage

For code examples, see [Data Lake Analytics
Management](https://docs.microsoft.com/python/api/overview/azure/data-lake-analytics)
For code examples, see [Data Lake Analytics Management](https://docs.microsoft.com/python/api/overview/azure/data-lake-analytics)
on docs.microsoft.com.

## Provide Feedback

If you encounter any bugs or have suggestions, please file an issue in
the [Issues](https://github.com/Azure/azure-sdk-for-python/issues)
# Provide Feedback

If you encounter any bugs or have suggestions, please file an issue in the
[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
section of the project.

![image](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-datalake-analytics%2FREADME.png)

![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-datalake-analytics%2FREADME.png)
5 changes: 4 additions & 1 deletion sdk/datalake/azure-mgmt-datalake-analytics/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@
pass

# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd:
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)

Expand Down Expand Up @@ -67,6 +69,7 @@
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def setUp(self):
)
AS
T(a, b);
END;""".format(self.db_name, self.table_name, self.tvf_name, self.view_name, self.proc_name)
END;""".format(self.db_name, self.table_name, self.tvf_name, self.view_name, self.proc_name) #nosec

# define all the job IDs to be used during execution
if self.is_playback():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def __init__(self, **kwargs): # pylint: disable=unused-argument

@staticmethod
def get_content_md5(data):
md5 = hashlib.md5()
md5 = hashlib.md5() #nosec
if isinstance(data, bytes):
md5.update(data)
elif hasattr(data, 'read'):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def __init__(self, **kwargs): # pylint: disable=unused-argument

@staticmethod
def get_content_md5(data):
md5 = hashlib.md5()
md5 = hashlib.md5() #nosec
if isinstance(data, bytes):
md5.update(data)
elif hasattr(data, 'read'):
Expand Down
4 changes: 2 additions & 2 deletions sdk/identity/azure-identity/tests/test_browser_credential.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,8 +284,8 @@ def test_redirect_server():
thread.start()

# send a request, verify the server exposes the query
url = "http://127.0.0.1:{}/?{}={}".format(port, expected_param, expected_value)
response = urllib.request.urlopen(url)
url = "http://127.0.0.1:{}/?{}={}".format(port, expected_param, expected_value) #nosec
response = urllib.request.urlopen(url) #nosec

assert response.code == 200
assert server.query_params[expected_param] == [expected_value]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def validate_jwt(request, client_id, pem_bytes):
deserialized_header = json.loads(header.decode("utf-8"))
assert deserialized_header["alg"] == "RS256"
assert deserialized_header["typ"] == "JWT"
assert urlsafeb64_decode(deserialized_header["x5t"]) == cert.fingerprint(hashes.SHA1())
assert urlsafeb64_decode(deserialized_header["x5t"]) == cert.fingerprint(hashes.SHA1()) #nosec

assert claims["aud"] == request.url
assert claims["iss"] == claims["sub"] == client_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ def __init__(self, **kwargs): # pylint: disable=unused-argument

@staticmethod
def get_content_md5(data):
md5 = hashlib.md5()
md5 = hashlib.md5() #nosec
if isinstance(data, bytes):
md5.update(data)
elif hasattr(data, 'read'):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ def wrap_key(self, key, algorithm='RSA'):
if algorithm == 'RSA':
return self.public_key.encrypt(key,
OAEP(
mgf=MGF1(algorithm=SHA1()),
algorithm=SHA1(),
mgf=MGF1(algorithm=SHA1()), #nosec
algorithm=SHA1(), #nosec
label=None)
)
raise ValueError('Unknown key wrap algorithm.')
Expand All @@ -97,8 +97,8 @@ def unwrap_key(self, key, algorithm):
if algorithm == 'RSA':
return self.private_key.decrypt(key,
OAEP(
mgf=MGF1(algorithm=SHA1()),
algorithm=SHA1(),
mgf=MGF1(algorithm=SHA1()), #nosec
algorithm=SHA1(), #nosec
label=None)
)
raise ValueError('Unknown key wrap algorithm.')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ def wrap_key(self, key, algorithm='RSA'):
if algorithm == 'RSA':
return self.public_key.encrypt(key,
OAEP(
mgf=MGF1(algorithm=SHA1()),
algorithm=SHA1(),
mgf=MGF1(algorithm=SHA1()), #nosec
algorithm=SHA1(), #nosec
label=None)
)

Expand All @@ -76,8 +76,8 @@ def unwrap_key(self, key, algorithm):
if algorithm == 'RSA':
return self.private_key.decrypt(key,
OAEP(
mgf=MGF1(algorithm=SHA1()),
algorithm=SHA1(),
mgf=MGF1(algorithm=SHA1()), #nosec
algorithm=SHA1(), #nosec
label=None)
)

Expand Down
Loading

0 comments on commit 12e2e34

Please sign in to comment.