Skip to content

Commit

Permalink
Rename Query APIs (Azure#20830)
Browse files Browse the repository at this point in the history
  • Loading branch information
rakshith91 authored Sep 24, 2021
1 parent f2b2cea commit cce4b29
Show file tree
Hide file tree
Showing 24 changed files with 56 additions and 54 deletions.
4 changes: 3 additions & 1 deletion sdk/monitor/azure-monitor-query/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
### Breaking Changes

- `LogsQueryResult` now iterates over the tables directly as a convinience.
- `query` API now returns a union of `LogsQueryPartialResult` and `LogsQueryResult`.
- `query` API in logs is renamed to `query_workspace`
- `query` API in metrics is renamed to `query_resource`
- `query_workspace` API now returns a union of `LogsQueryPartialResult` and `LogsQueryResult`.
- `query_batch` API now returns a union of `LogsQueryPartialResult`, `LogsQueryError` and `LogsQueryResult`.
- `metric_namespace` is renamed to `namespace` and is a keyword-only argument in `list_metric_definitions` API.

Expand Down
10 changes: 5 additions & 5 deletions sdk/monitor/azure-monitor-query/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ start_time=datetime(2021, 7, 2)
end_time=datetime.now()

# returns LogsQueryResult
response = client.query(
response = client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
query,
timespan=(start_time, end_time)
Expand Down Expand Up @@ -230,7 +230,7 @@ from azure.identity import DefaultAzureCredential
credential = DefaultAzureCredential()
client = LogsQueryClient(credential)

response = client.query(
response = client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
"range x from 1 to 10000000000 step 1 | count",
server_timeout=1,
Expand All @@ -250,7 +250,7 @@ The same logs query can be executed across multiple Log Analytics workspaces. In
For example, the following query executes in three workspaces:

```python
client.query(
client.query_workspace(
<workspace_id>,
query,
additional_workspaces=['<workspace 2>', '<workspace 3>']
Expand Down Expand Up @@ -282,7 +282,7 @@ client = MetricsQueryClient(credential)
start_time = datetime(2021, 5, 25)
duration = timedelta(days=1)
metrics_uri = os.environ['METRICS_RESOURCE_URI']
response = client.query(
response = client.query_resource(
metrics_uri,
metric_names=["PublishSuccessCount"],
timespan=(start_time, duration)
Expand Down Expand Up @@ -328,7 +328,7 @@ credential = DefaultAzureCredential()
client = MetricsQueryClient(credential)

metrics_uri = os.environ['METRICS_RESOURCE_URI']
response = client.query(
response = client.query_resource(
metrics_uri,
metric_names=["MatchedEventCount"],
aggregations=[MetricAggregationType.COUNT]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def __init__(self, credential, **kwargs):
self._query_op = self._client.query

@distributed_trace
def query(self, workspace_id, query, **kwargs):
def query_workspace(self, workspace_id, query, **kwargs):
# type: (str, str, Any) -> Union[LogsQueryResult, LogsQueryPartialResult]
"""Execute an Analytics query.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(self, credential, **kwargs):
self._definitions_op = self._client.metric_definitions

@distributed_trace
def query(self, resource_uri, metric_names, **kwargs):
def query_resource(self, resource_uri, metric_names, **kwargs):
# type: (str, list, Optional[timedelta], Any) -> MetricsResult
"""Lists the metric values for a resource.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
self._query_op = self._client.query

@distributed_trace_async
async def query(
async def query_workspace(
self,
workspace_id: str,
query: str,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
self._definitions_op = self._client.metric_definitions

@distributed_trace_async
async def query(
async def query_resource(
self, resource_uri: str, metric_names: List, **kwargs: Any
) -> MetricsResult:
"""Lists the metric values for a resource.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ async def logs_query():

# returns LogsQueryResult
async with client:
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None)
response = await client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None)

if not response.tables:
print("No results for the query")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ async def query_metrics():

metrics_uri = os.environ['METRICS_RESOURCE_URI']
async with client:
response = await client.query(
response = await client.query_resource(
metrics_uri,
metric_names=["Ingress"],
timespan=timedelta(hours=2),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

# returns LogsQueryResult
try:
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
if response.status == LogsQueryStatus.PARTIAL:
# handle error here
error = response.partial_error
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId"""

# returns LogsQueryResult
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(hours=1))
response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(hours=1))

if not response.tables:
print("No results for the query")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
end_time = datetime.now(UTC())

# returns LogsQueryResult
response = client.query(
response = client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
query,
additional_workspaces=[os.environ["SECONDARY_WORKSPACE_ID"]],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId"""

# returns LogsQueryResult
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))

try:
table = response.tables[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

# [START send_metrics_query]
metrics_uri = os.environ['METRICS_RESOURCE_URI']
response = client.query(
response = client.query_resource(
metrics_uri,
metric_names=["Ingress"],
timespan=timedelta(hours=2),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

client = LogsQueryClient(credential)

response = client.query(
response = client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
"range x from 1 to 10000000000 step 1 | count",
server_timeout=1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ async def test_logs_single_query_fatal_exception():
credential = _credential()
client = LogsQueryClient(credential)
with pytest.raises(HttpResponseError):
await client.query('bad_workspace_id', 'AppRequests', timespan=None)
await client.query_workspace('bad_workspace_id', 'AppRequests', timespan=None)

@pytest.mark.live_test_only
@pytest.mark.asyncio
Expand All @@ -30,7 +30,7 @@ async def test_logs_single_query_partial_exception_not_allowed():
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
response = await client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
assert response.__class__ == LogsQueryPartialResult
assert response.partial_error is not None
assert response.partial_error.code == 'PartialError'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ async def test_logs_auth():
summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId"""

# returns LogsQueryResult
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None)
response = await client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None)

assert response is not None
assert response.tables is not None
Expand All @@ -41,7 +41,7 @@ async def test_logs_auth_no_timespan():

# returns LogsQueryResult
with pytest.raises(TypeError):
await client.query(os.environ['LOG_WORKSPACE_ID'], query)
await client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query)


@pytest.mark.skip("https://github.com/Azure/azure-sdk-for-python/issues/19917")
Expand All @@ -50,7 +50,7 @@ async def test_logs_auth_no_timespan():
async def test_logs_server_timeout():
client = LogsQueryClient(_credential())
with pytest.raises(HttpResponseError) as e:
response = await client.query(
response = await client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
"range x from 1 to 10000000000 step 1 | count",
timespan=None,
Expand Down Expand Up @@ -111,7 +111,7 @@ async def test_logs_single_query_additional_workspaces_async():
query = "union * | where TimeGenerated > ago(100d) | project TenantId | summarize count() by TenantId"

# returns LogsQueryResult
response = await client.query(
response = await client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
query,
timespan=None,
Expand Down Expand Up @@ -162,7 +162,7 @@ async def test_logs_single_query_with_render():
query = """AppRequests"""

# returns LogsQueryResult
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_visualization=True)
response = await client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_visualization=True)

assert response.visualization is not None

Expand All @@ -174,7 +174,7 @@ async def test_logs_single_query_with_render_and_stats():
query = """AppRequests"""

# returns LogsQueryResult
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_visualization=True, include_statistics=True)
response = await client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_visualization=True, include_statistics=True)

assert response.visualization is not None
assert response.statistics is not None
Expand All @@ -186,7 +186,7 @@ async def test_logs_query_result_iterate_over_tables():

query = "AppRequests; AppRequests | take 5"

response = await client.query(
response = await client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
query,
timespan=None,
Expand All @@ -210,7 +210,7 @@ async def test_logs_query_result_row_type():

query = "AppRequests | take 5"

response = await client.query(
response = await client.query_workspace(
os.environ['LOG_WORKSPACE_ID'],
query,
timespan=None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def _credential():
async def test_metrics_auth():
credential = _credential()
client = MetricsQueryClient(credential)
response = await client.query(
response = await client.query_resource(
os.environ['METRICS_RESOURCE_URI'],
metric_names=["MatchedEventCount"],
timespan=timedelta(days=1),
Expand All @@ -32,7 +32,7 @@ async def test_metrics_auth():
async def test_metrics_granularity():
credential = _credential()
client = MetricsQueryClient(credential)
response = await client.query(
response = await client.query_resource(
os.environ['METRICS_RESOURCE_URI'],
metric_names=["MatchedEventCount"],
timespan=timedelta(days=1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def run_sync(self):
Avoid putting any ancilliary logic (e.g. generating UUIDs), and put this in the setup/init instead
so that we're only measuring the client API call.
"""
self.metrics_client.query(
self.metrics_client.query_resource(
self.metrics_uri,
self.names,
aggregations=self.aggregations
Expand All @@ -59,7 +59,7 @@ async def run_async(self):
Avoid putting any ancilliary logic (e.g. generating UUIDs), and put this in the setup/init instead
so that we're only measuring the client API call.
"""
await self.async_metrics_client.query(
await self.async_metrics_client.query_resource(
self.metrics_uri,
self.names,
aggregations=self.aggregations
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def run_sync(self):
"""
start_time=datetime(2021, 7, 25, 0, 0, 0, tzinfo=timezone.utc)
end_time=datetime(2021, 7, 26, 0, 0, 0, tzinfo=timezone.utc)
self.logs_client.query(
self.logs_client.query_workspace(
self.workspace_id,
self.query,
timespan=(start_time, end_time)
Expand All @@ -63,7 +63,7 @@ async def run_async(self):
"""
start_time=datetime(2021, 7, 25, 0, 0, 0, tzinfo=timezone.utc)
end_time=datetime(2021, 7, 26, 0, 0, 0, tzinfo=timezone.utc)
await self.async_logs_client.query(
await self.async_logs_client.query_workspace(
self.workspace_id,
self.query,
timespan=(start_time, end_time)
Expand Down
4 changes: 2 additions & 2 deletions sdk/monitor/azure-monitor-query/tests/test_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def test_logs_single_query_fatal_exception():
credential = _credential()
client = LogsQueryClient(credential)
with pytest.raises(HttpResponseError):
client.query('bad_workspace_id', 'AppRequests', timespan=None)
client.query_workspace('bad_workspace_id', 'AppRequests', timespan=None)

@pytest.mark.live_test_only
def test_logs_single_query_partial_exception():
Expand All @@ -27,7 +27,7 @@ def test_logs_single_query_partial_exception():
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
assert response.__class__ == LogsQueryPartialResult
assert response.partial_error is not None
assert response.partial_data is not None
Expand Down
Loading

0 comments on commit cce4b29

Please sign in to comment.