Skip to content

Commit

Permalink
Validate metrics using metadata.csv (#6027)
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexandreYang authored Apr 16, 2020
1 parent 25f06ec commit 52706b0
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 3 deletions.
45 changes: 42 additions & 3 deletions datadog_checks_base/datadog_checks/base/stubs/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,9 @@

from six import iteritems

from datadog_checks.base.stubs.common import HistogramBucketStub, MetricStub, ServiceCheckStub
from datadog_checks.base.stubs.similar import build_similar_elements_msg

from ..utils.common import ensure_unicode, to_native_string
from .common import HistogramBucketStub, MetricStub, ServiceCheckStub
from .similar import build_similar_elements_msg


def normalize_tags(tags, sort=False):
Expand Down Expand Up @@ -42,6 +41,7 @@ class AggregatorStub(object):
('historate', 6),
)
)
METRIC_ENUM_MAP_REV = {v: k for k, v in iteritems(METRIC_ENUM_MAP)}
GAUGE, RATE, COUNT, MONOTONIC_COUNT, COUNTER, HISTOGRAM, HISTORATE = list(METRIC_ENUM_MAP.values())
AGGREGATE_TYPES = {COUNT, COUNTER}
IGNORED_METRICS = {'datadog.agent.profile.memory.check_run_alloc'}
Expand Down Expand Up @@ -301,6 +301,45 @@ def assert_all_metrics_covered(self):
msg += '\nMissing Metrics:{}{}'.format(prefix, prefix.join(sorted(self.not_asserted())))
assert condition, msg

def assert_metrics_using_metadata(self, metadata_metrics, check_metric_type=True, exclude=None):
"""
Assert metrics using metadata.csv
Checking type: Since we are asserting the in-app metric type (NOT submission type),
asserting the type make sense only for e2e (metrics collected from agent).
For integration tests, set kwarg `check_metric_type=False`.
Usage:
from datadog_checks.dev.utils import get_metadata_metrics
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
"""

exclude = exclude or []
errors = set()
for metric_name, metric_stubs in iteritems(self._metrics):
if metric_name in exclude:
continue
for metric_stub in metric_stubs:

if metric_stub.name not in metadata_metrics:
errors.add("Expect `{}` to be in metadata.csv.".format(metric_stub.name))
continue

if check_metric_type:
expected_metric_type = metadata_metrics[metric_stub.name]['metric_type']
actual_metric_type = AggregatorStub.METRIC_ENUM_MAP_REV[metric_stub.type]

if expected_metric_type != actual_metric_type:
errors.add(
"Expect `{}` to have type `{}` but got `{}`.".format(
metric_stub.name, expected_metric_type, actual_metric_type
)
)

assert not errors, "Metadata assertion errors using metadata.csv:\n" + "\n\t- ".join(sorted(errors))

def assert_no_duplicate_all(self):
"""
Assert no duplicate metrics and service checks have been submitted.
Expand Down
12 changes: 12 additions & 0 deletions datadog_checks_dev/datadog_checks/dev/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Utilities functions abstracting common operations, specially designed to be used
by Integrations within tests.
"""
import csv
import inspect
import os
import platform
Expand Down Expand Up @@ -315,3 +316,14 @@ def get_ip():
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
return s.getsockname()[0]


def get_metadata_metrics():
# Only called in tests of a check, so just go back one frame
root = find_check_root(depth=1)
metadata_path = os.path.join(root, 'metadata.csv')
metrics = {}
with open(metadata_path) as f:
for row in csv.DictReader(f):
metrics[row['metric_name']] = row
return metrics

0 comments on commit 52706b0

Please sign in to comment.