Skip to content

Commit

Permalink
pylint and mypy
Browse files Browse the repository at this point in the history
  • Loading branch information
iscai-msft committed Feb 2, 2021
1 parent 4fd6637 commit 3885590
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 22 deletions.
1 change: 0 additions & 1 deletion eng/test_tools.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ Jinja2==2.11.2
pylint==1.8.4; python_version < '3.4'
pylint==2.5.2; python_version >= '3.4'

../../../tools/azure-devtools

# python-dotenv
python-dotenv==0.15.0
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import json
from six.moves.urllib.parse import urlencode
from azure.core.polling import LROPoller
from azure.core.polling.base_polling import LROBasePolling, OperationResourcePolling, OperationFailed, BadStatus
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
TextDocumentInput,
RecognizeEntitiesAction,
RecognizePiiEntitiesAction,
ExtractKeyPhrasesAction,
AnalyzeBatchActionsType,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,14 +217,16 @@ def _num_tasks_in_current_page(returned_tasks_object):
len(returned_tasks_object.key_phrase_extraction_tasks or [])
)

def get_iter_items(doc_id_order, task_order, obj, response_headers, analyze_job_state):
def get_iter_items(doc_id_order, task_order, response_headers, analyze_job_state):
iter_items = []
returned_tasks_object = analyze_job_state.tasks
for current_task_type in task_order:
deserialization_callback = _get_deserialization_callback_from_task_type(current_task_type)
property_name = _get_property_name_from_task_type(current_task_type)
response_task_to_deserialize = getattr(returned_tasks_object, property_name).pop(0)
document_results = deserialization_callback(doc_id_order, response_task_to_deserialize.results, response_headers, lro=True)
document_results = deserialization_callback(
doc_id_order, response_task_to_deserialize.results, response_headers, lro=True
)
iter_items.append(
AnalyzeBatchActionsResult(
document_results=document_results,
Expand All @@ -234,9 +236,9 @@ def get_iter_items(doc_id_order, task_order, obj, response_headers, analyze_job_
)
return iter_items

def analyze_extract_page_data(doc_id_order, task_order, obj, response_headers, analyze_job_state):
def analyze_extract_page_data(doc_id_order, task_order, response_headers, analyze_job_state):
# return next link, list of
iter_items = get_iter_items(doc_id_order, task_order, obj, response_headers, analyze_job_state)
iter_items = get_iter_items(doc_id_order, task_order, response_headers, analyze_job_state)
return analyze_job_state.next_link, iter_items


Expand Down Expand Up @@ -269,7 +271,7 @@ def healthcare_paged_result(doc_id_order, health_status_callback, _, obj, respon
def analyze_paged_result(doc_id_order, task_order, analyze_status_callback, _, obj, response_headers, show_stats=False): # pylint: disable=unused-argument
return AnalyzeResult(
functools.partial(lro_get_next_page, analyze_status_callback, obj, show_stats=show_stats),
functools.partial(analyze_extract_page_data, doc_id_order, task_order, obj, response_headers),
functools.partial(analyze_extract_page_data, doc_id_order, task_order, response_headers),
statistics=TextDocumentBatchStatistics._from_generated(obj.statistics) \
if show_stats and obj.statistics is not None else None # pylint: disable=protected-access
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,16 @@ def healthcare_paged_result(doc_id_order, health_status_callback, response, obj,
statistics=RequestStatistics._from_generated(obj.results.statistics) if show_stats else None # pylint: disable=protected-access
)

async def analyze_extract_page_data_async(doc_id_order, task_order, obj, response_headers, analyze_job_state):
iter_items = get_iter_items(doc_id_order, task_order, obj, response_headers, analyze_job_state)
async def analyze_extract_page_data_async(doc_id_order, task_order, response_headers, analyze_job_state):
iter_items = get_iter_items(doc_id_order, task_order, response_headers, analyze_job_state)
return analyze_job_state.next_link, AsyncList(iter_items)

def analyze_paged_result(doc_id_order, task_order, analyze_status_callback, response, obj, response_headers, show_stats=False): # pylint: disable=unused-argument
def analyze_paged_result(
doc_id_order, task_order, analyze_status_callback, response, obj, response_headers, show_stats=False # pylint: disable=unused-argument
):
return AnalyzeResultAsync(
functools.partial(lro_get_next_page_async, analyze_status_callback, obj),
functools.partial(analyze_extract_page_data_async, doc_id_order, task_order, obj, response_headers),
functools.partial(analyze_extract_page_data_async, doc_id_order, task_order, response_headers),
statistics=TextDocumentBatchStatistics._from_generated(obj.statistics) \
if show_stats and obj.statistics is not None else None # pylint: disable=protected-access
)
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
TextAnalyticsOperationResourcePolling,
TextAnalyticsLROPollingMethod,
AnalyzeBatchActionsLROPollingMethod,
AnalyzeBatchActionsLROPoller,
)

if TYPE_CHECKING:
Expand Down Expand Up @@ -767,13 +766,16 @@ def begin_analyze_batch_actions( # type: ignore
try:
analyze_tasks = self._client.models(api_version='v3.1-preview.3').JobManifestTasks(
entity_recognition_tasks=[
t.to_generated() for t in [a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_ENTITIES]
t.to_generated() for t in
[a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_ENTITIES]
],
entity_recognition_pii_tasks=[
t.to_generated() for t in [a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_PII_ENTITIES]
t.to_generated() for t in
[a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_PII_ENTITIES]
],
key_phrase_extraction_tasks=[
t.to_generated() for t in [a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.EXTRACT_KEY_PHRASES]
t.to_generated() for t in
[a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.EXTRACT_KEY_PHRASES]
]
)
analyze_body = self._client.models(api_version='v3.1-preview.3').AnalyzeBatchInput(
Expand All @@ -783,7 +785,9 @@ def begin_analyze_batch_actions( # type: ignore
)
return self._client.begin_analyze(
body=analyze_body,
cls=kwargs.pop("cls", partial(self._analyze_result_callback, doc_id_order, task_order, show_stats=show_stats)),
cls=kwargs.pop("cls", partial(
self._analyze_result_callback, doc_id_order, task_order, show_stats=show_stats
)),
polling=AnalyzeBatchActionsLROPollingMethod(
timeout=polling_interval,
lro_algorithms=[
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -694,7 +694,7 @@ def _analyze_result_callback(self, doc_id_order, task_order, raw_response, _, he
async def begin_analyze_batch_actions( # type: ignore
self,
documents, # type: Union[List[str], List[TextDocumentInput], List[Dict[str, str]]]
actions, # type: Union[List[RecognizeEntitiesAction], List[RecognizePiiEntitiesAction], List[ExtractKeyPhrasesAction]]
actions, # type: List[Union[RecognizeEntitiesAction, RecognizePiiEntitiesAction, ExtractKeyPhrasesAction]]
**kwargs # type: Any
): # type: (...) -> AsyncLROPoller[AsyncItemPaged[AnalyzeBatchActionsResult]]
"""Start a long-running operation to perform a variety of text analysis tasks over a batch of documents.
Expand Down Expand Up @@ -762,13 +762,16 @@ async def begin_analyze_batch_actions( # type: ignore
try:
analyze_tasks = self._client.models(api_version='v3.1-preview.3').JobManifestTasks(
entity_recognition_tasks=[
t.to_generated() for t in [a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_ENTITIES]
t.to_generated() for t in
[a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_ENTITIES]
],
entity_recognition_pii_tasks=[
t.to_generated() for t in [a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_PII_ENTITIES]
t.to_generated() for t in
[a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.RECOGNIZE_PII_ENTITIES]
],
key_phrase_extraction_tasks=[
t.to_generated() for t in [a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.EXTRACT_KEY_PHRASES]
t.to_generated() for t in
[a for a in actions if _determine_action_type(a) == AnalyzeBatchActionsType.EXTRACT_KEY_PHRASES]
]
)
analyze_body = self._client.models(api_version='v3.1-preview.3').AnalyzeBatchInput(
Expand All @@ -778,7 +781,9 @@ async def begin_analyze_batch_actions( # type: ignore
)
return await self._client.begin_analyze(
body=analyze_body,
cls=kwargs.pop("cls", partial(self._analyze_result_callback, doc_id_order, task_order, show_stats=show_stats)),
cls=kwargs.pop("cls", partial(
self._analyze_result_callback, doc_id_order, task_order, show_stats=show_stats
)),
polling=AsyncAnalyzeBatchActionsLROPollingMethod(
timeout=polling_interval,
lro_algorithms=[
Expand Down

0 comments on commit 3885590

Please sign in to comment.