Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Validate metrics using metadata.csv #6027

Merged
merged 3 commits into from
Apr 16, 2020
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 42 additions & 3 deletions datadog_checks_base/datadog_checks/base/stubs/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,9 @@

from six import iteritems

from datadog_checks.base.stubs.common import HistogramBucketStub, MetricStub, ServiceCheckStub
from datadog_checks.base.stubs.similar import build_similar_elements_msg

from ..utils.common import ensure_unicode, to_native_string
from .common import HistogramBucketStub, MetricStub, ServiceCheckStub
from .similar import build_similar_elements_msg


def normalize_tags(tags, sort=False):
Expand Down Expand Up @@ -42,6 +41,7 @@ class AggregatorStub(object):
('historate', 6),
)
)
METRIC_ENUM_MAP_REV = {v: k for k, v in iteritems(METRIC_ENUM_MAP)}
GAUGE, RATE, COUNT, MONOTONIC_COUNT, COUNTER, HISTOGRAM, HISTORATE = list(METRIC_ENUM_MAP.values())
AGGREGATE_TYPES = {COUNT, COUNTER}
IGNORED_METRICS = {'datadog.agent.profile.memory.check_run_alloc'}
Expand Down Expand Up @@ -301,6 +301,45 @@ def assert_all_metrics_covered(self):
msg += '\nMissing Metrics:{}{}'.format(prefix, prefix.join(sorted(self.not_asserted())))
assert condition, msg

def assert_metrics_using_metadata(self, metadata_metrics, check_metric_type=True, exclude=None):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

wdyt? since more integrations would have integration tests and not e2e

Suggested change
def assert_metrics_using_metadata(self, metadata_metrics, check_metric_type=True, exclude=None):
def assert_metrics_using_metadata(self, metadata_metrics, check_metric_type=False, exclude=None):

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@ofek
I think this method has the most value for e2e since check the type is quite valuable.

Having check_metric_type=True by default will encourage that. WDYT?

"""
Assert metrics using metadata.csv

Checking type: Since we are asserting the in-app metric type (NOT submission type),
asserting the type make sense only for e2e (metrics collected from agent).
For integration tests, set kwarg `check_metric_type=False`.

Usage:

from datadog_checks.dev.utils import get_metadata_metrics
aggregator.assert_metrics_using_metadata(get_metadata_metrics())

"""

exclude = exclude or []
errors = set()
for metric_name, metric_stubs in iteritems(self._metrics):
if metric_name in exclude:
continue
for metric_stub in metric_stubs:

if metric_stub.name not in metadata_metrics:
errors.add("Expect `{}` to be in metadata.csv.".format(metric_stub.name))
continue

if check_metric_type:
expected_metric_type = metadata_metrics[metric_stub.name]['metric_type']
actual_metric_type = AggregatorStub.METRIC_ENUM_MAP_REV[metric_stub.type]

if expected_metric_type != actual_metric_type:
errors.add(
"Expect `{}` to have type `{}` but got `{}`.".format(
metric_stub.name, expected_metric_type, actual_metric_type
)
)

assert not errors, "Metadata assertion errors using metadata.csv: " + "\n\t- ".join([""] + sorted(list(errors)))
AlexandreYang marked this conversation as resolved.
Show resolved Hide resolved

def assert_no_duplicate_all(self):
"""
Assert no duplicate metrics and service checks have been submitted.
Expand Down
12 changes: 12 additions & 0 deletions datadog_checks_dev/datadog_checks/dev/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Utilities functions abstracting common operations, specially designed to be used
by Integrations within tests.
"""
import csv
import inspect
import os
import platform
Expand Down Expand Up @@ -315,3 +316,14 @@ def get_ip():
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
return s.getsockname()[0]


def get_metadata_metrics():
# Only called in tests of a check, so just go back one frame
root = find_check_root(depth=1)
metadata_path = os.path.join(root, 'metadata.csv')
metrics = {}
with open(metadata_path) as f:
for row in csv.DictReader(f):
metrics[row['metric_name']] = row
return metrics