diff --git a/datadog_checks_base/datadog_checks/base/stubs/aggregator.py b/datadog_checks_base/datadog_checks/base/stubs/aggregator.py index 3f6b5621e538e..6e0eaf898d757 100644 --- a/datadog_checks_base/datadog_checks/base/stubs/aggregator.py +++ b/datadog_checks_base/datadog_checks/base/stubs/aggregator.py @@ -7,10 +7,9 @@ from six import iteritems -from datadog_checks.base.stubs.common import HistogramBucketStub, MetricStub, ServiceCheckStub -from datadog_checks.base.stubs.similar import build_similar_elements_msg - from ..utils.common import ensure_unicode, to_native_string +from .common import HistogramBucketStub, MetricStub, ServiceCheckStub +from .similar import build_similar_elements_msg def normalize_tags(tags, sort=False): @@ -42,6 +41,7 @@ class AggregatorStub(object): ('historate', 6), ) ) + METRIC_ENUM_MAP_REV = {v: k for k, v in iteritems(METRIC_ENUM_MAP)} GAUGE, RATE, COUNT, MONOTONIC_COUNT, COUNTER, HISTOGRAM, HISTORATE = list(METRIC_ENUM_MAP.values()) AGGREGATE_TYPES = {COUNT, COUNTER} IGNORED_METRICS = {'datadog.agent.profile.memory.check_run_alloc'} @@ -301,6 +301,45 @@ def assert_all_metrics_covered(self): msg += '\nMissing Metrics:{}{}'.format(prefix, prefix.join(sorted(self.not_asserted()))) assert condition, msg + def assert_metrics_using_metadata(self, metadata_metrics, check_metric_type=True, exclude=None): + """ + Assert metrics using metadata.csv + + Checking type: Since we are asserting the in-app metric type (NOT submission type), + asserting the type make sense only for e2e (metrics collected from agent). + For integration tests, set kwarg `check_metric_type=False`. + + Usage: + + from datadog_checks.dev.utils import get_metadata_metrics + aggregator.assert_metrics_using_metadata(get_metadata_metrics()) + + """ + + exclude = exclude or [] + errors = set() + for metric_name, metric_stubs in iteritems(self._metrics): + if metric_name in exclude: + continue + for metric_stub in metric_stubs: + + if metric_stub.name not in metadata_metrics: + errors.add("Expect `{}` to be in metadata.csv.".format(metric_stub.name)) + continue + + if check_metric_type: + expected_metric_type = metadata_metrics[metric_stub.name]['metric_type'] + actual_metric_type = AggregatorStub.METRIC_ENUM_MAP_REV[metric_stub.type] + + if expected_metric_type != actual_metric_type: + errors.add( + "Expect `{}` to have type `{}` but got `{}`.".format( + metric_stub.name, expected_metric_type, actual_metric_type + ) + ) + + assert not errors, "Metadata assertion errors using metadata.csv:\n" + "\n\t- ".join(sorted(errors)) + def assert_no_duplicate_all(self): """ Assert no duplicate metrics and service checks have been submitted. diff --git a/datadog_checks_dev/datadog_checks/dev/utils.py b/datadog_checks_dev/datadog_checks/dev/utils.py index 28055f89cfa39..9742dd9cf8abe 100644 --- a/datadog_checks_dev/datadog_checks/dev/utils.py +++ b/datadog_checks_dev/datadog_checks/dev/utils.py @@ -5,6 +5,7 @@ Utilities functions abstracting common operations, specially designed to be used by Integrations within tests. """ +import csv import inspect import os import platform @@ -315,3 +316,14 @@ def get_ip(): # doesn't even have to be reachable s.connect(('10.255.255.255', 1)) return s.getsockname()[0] + + +def get_metadata_metrics(): + # Only called in tests of a check, so just go back one frame + root = find_check_root(depth=1) + metadata_path = os.path.join(root, 'metadata.csv') + metrics = {} + with open(metadata_path) as f: + for row in csv.DictReader(f): + metrics[row['metric_name']] = row + return metrics