diff --git a/CHANGELOG-developer.next.asciidoc b/CHANGELOG-developer.next.asciidoc index 462408009138..97609354e159 100644 --- a/CHANGELOG-developer.next.asciidoc +++ b/CHANGELOG-developer.next.asciidoc @@ -151,6 +151,7 @@ The list below covers the major changes between 7.0.0-rc2 and main only. - Add an option to disable event normalization when creating a `beat.Client`. {pull}33657[33657] - Add the file path of the instance lock on the error when it's is already locked {pull}33788[33788] - Add DropFields processor to js API {pull}33458[33458] +- Add support for different folders when testing data {pull}34467[34467] ==== Deprecated diff --git a/metricbeat/mb/testing/data/README.md b/metricbeat/mb/testing/data/README.md index 348f102d234b..9ab389b7c1f3 100644 --- a/metricbeat/mb/testing/data/README.md +++ b/metricbeat/mb/testing/data/README.md @@ -18,15 +18,18 @@ An alternative is to just run from metricbeat `mage mockedTests` to achieve the ### Available flags / environment variables -- `-data`: It will regenerate the _expected_ JSON file with the output of an event an place it within `testdata` folder. For example: `go test . -data`. If using mage, a environment variable `GENERATE` is available to +- `-data`: It will regenerate the _expected_ JSON file with the output of an event an place it within `testdata` folder. For example: `go test . -data`. If using mage, a environment variable `GENERATE` is available to - `-module`: Test only the specified module. For example `go test . -module=apache`. If using mage `MODULE` environment variable must be set with the _module_ name that must be tested. > You can also combine both flags with `go test . -data -module=apache` to generate files for Apache module only. ### Available settings in `config.yml` +- `path`: (string) Path to reach the directory containing the files to read from. Default is "_meta/testdata". +- `writepath`: (string) Path to the directory where the expected files are going to be written. Default is "_meta/testdata". +- `datapath`: (string) Path to the directory where the `data.json` file is going to be written. Default is "_meta". The `data.json` file is used to render example events published in our official documentation and can be created when testing the module. - `type`: (string) The type of the test to run. At the moment, only `http` is supported. -- `url`: (string) This is the URL path that the module usually fetches to get metrics. For example, in case of Apache module this url is `/server-status?auto=` +- `url`: (string) This is the URL path that the module usually fetches to get metrics. For example, in case of Apache module this url is `/server-status?auto=` - `suffix`: (string) The suffix that the input file has. By default `json` other common suffixes are `plain` (string) for plain text files. - `omit_documented_fields_check`: (List of strings) Some fields generated by the modules are completely dynamic so they aren't documented in `fields.yml`. Set a list of fields or paths in your metricset that might not be documented like `apache.status.*` for all fields within `apache.status` object or `apache.status.hostname` just for that specific field. Even you can omit all fields using `*` - `remove_fields_from_comparison`: (List of strings) Some fields must be removed for byte-to-byte comparison but they must be printed anyways in the _expected_ JSON files. Write a list of those fields here. For example, `apache.status.hostname` in the Apache module was generating a new port on each run so a comparison wasn't possible. Set one item with `apache.status.hostname` to omit this field when comparing outputs. diff --git a/metricbeat/mb/testing/data/data_test.go b/metricbeat/mb/testing/data/data_test.go index d855b18b752b..028c7a546840 100644 --- a/metricbeat/mb/testing/data/data_test.go +++ b/metricbeat/mb/testing/data/data_test.go @@ -49,7 +49,7 @@ func TestAll(t *testing.T) { } else { config := mbtest.ReadDataConfig(t, f) - mbtest.TestDataFilesWithConfig(t, moduleName, metricSetName, config) + mbtest.TestDataFilesWithConfig(t, moduleName, metricSetName, config, getModulesPath()) } }) } diff --git a/metricbeat/mb/testing/testdata.go b/metricbeat/mb/testing/testdata.go index c05c7333f656..dcc16f00da4b 100644 --- a/metricbeat/mb/testing/testdata.go +++ b/metricbeat/mb/testing/testdata.go @@ -42,8 +42,10 @@ import ( ) const ( - expectedExtension = "-expected.json" - applicationJson = "application/json" + expectedExtension = "-expected.json" + applicationJson = "application/json" + expectedFolder = "_meta/testdata" + expectedDataFolder = "_meta" ) // DataConfig is the configuration for testdata tests @@ -55,21 +57,27 @@ const ( // url: "/server-status?auto=" // suffix: plain // omit_documented_fields_check: -// - "apache.status.hostname" +// - "apache.status.hostname" +// // remove_fields_from_comparison: // - "apache.status.hostname" // module: -// namespace: test +// +// namespace: test +// // ``` // A test will be run for each file with the `plain` extension in the same directory // where a file with this configuration is placed. type DataConfig struct { - // Path is the directory containing this configuration + // Path is the directory containing the read files Path string // WritePath is the path where to write the generated files WritePath string + // DataPath is the path to the data.json file + DataPath string + // The type of the test to run, usually `http`. Type string @@ -111,8 +119,9 @@ type DataConfig struct { func defaultDataConfig() DataConfig { return DataConfig{ - Path: ".", - WritePath: ".", + Path: expectedFolder, + WritePath: expectedFolder, + DataPath: expectedDataFolder, Suffix: "json", ContentType: applicationJson, } @@ -122,8 +131,7 @@ func defaultDataConfig() DataConfig { func ReadDataConfig(t *testing.T, f string) DataConfig { t.Helper() config := defaultDataConfig() - config.Path = filepath.Dir(f) - config.WritePath = filepath.Dir(config.Path) + configFile, err := ioutil.ReadFile(f) if err != nil { t.Fatalf("failed to read '%s': %v", f, err) @@ -139,25 +147,42 @@ func ReadDataConfig(t *testing.T, f string) DataConfig { // from the usual path func TestDataConfig(t *testing.T) DataConfig { t.Helper() - return ReadDataConfig(t, "_meta/testdata/config.yml") + f := filepath.Join(expectedFolder, "config.yml") + return ReadDataConfig(t, f) } // TestDataFiles run tests with config from the usual path (`_meta/testdata`) func TestDataFiles(t *testing.T, module, metricSet string) { t.Helper() config := TestDataConfig(t) - TestDataFilesWithConfig(t, module, metricSet, config) + TestDataFilesWithConfig(t, module, metricSet, config, "") } // TestDataFilesWithConfig run tests for a testdata config -func TestDataFilesWithConfig(t *testing.T, module, metricSet string, config DataConfig) { +func TestDataFilesWithConfig(t *testing.T, module, metricSet string, config DataConfig, testPrefix string) { t.Helper() - ff, err := filepath.Glob(filepath.Join(config.Path, "*."+config.Suffix)) + + // the location of the read files + location := filepath.Join(config.Path, "*."+config.Suffix) + + // if this function was called from data_test.go then the testPrefix is defined and not the default empty string + if testPrefix != "" { + // the prefix for read and write files should be ../../../module/moduleName/metricsetName + prefix := filepath.Join(testPrefix, module, metricSet) + location = filepath.Join(prefix, location) + + // the prefix needs to be appended to the path of the expected files and the original files + config.WritePath = filepath.Join(prefix, config.WritePath) + config.Path = filepath.Join(prefix, config.Path) + config.DataPath = filepath.Join(prefix, expectedDataFolder) + } + ff, err := filepath.Glob(location) + if err != nil { t.Fatal(err) } if len(ff) == 0 { - t.Fatalf("test path with config but without data files: %s", config.Path) + t.Fatalf("read test path without data files: %s", config.Path) } var files []string @@ -193,6 +218,26 @@ func TestMetricsetFieldsDocumented(t *testing.T, metricSet mb.MetricSet, events } func runTest(t *testing.T, file string, module, metricSetName string, config DataConfig) { + filename := filepath.Base(file) + /* + If the expected suffix is '.json' we need to exclude the data.json file, since + by the end of this function we may create a data.json file if there is a docs file with the config suffix: + if strings.HasSuffix(file, "docs."+config.Suffix) { + writeDataJSON(t, data[0], filepath.Join(config.WritePath, "data.json")) + } + Since the expected file name is obtained through filename + expectedExtension, we could end up testing files like: + Metrics file: data.json + Expected metrics file: data.json-expected.json + If the config extension is '.json'. + This is not possible, since running go -test data does not produce an expected file for data.json files. This is why + we need to exclude this file from the tests. + */ + if filename == "data.json" { + return + } + + t.Logf("Testing %s file\n", file) + // starts a server serving the given file under the given url s := server(t, file, config.URL, config.ContentType) defer s.Close() @@ -242,22 +287,25 @@ func runTest(t *testing.T, file string, module, metricSetName string, config Dat err, module, metricSetName) } + expectedFile := filepath.Join(config.WritePath, filename+expectedExtension) + // Overwrites the golden files if run with -generate if *flags.DataFlag { outputIndented, err := json.MarshalIndent(&data, "", " ") if err != nil { t.Fatal(err) } - if err = ioutil.WriteFile(file+expectedExtension, outputIndented, 0644); err != nil { + if err = ioutil.WriteFile(expectedFile, outputIndented, 0644); err != nil { t.Fatal(err) } } // Read expected file - expected, err := ioutil.ReadFile(file + expectedExtension) + expected, err := ioutil.ReadFile(expectedFile) if err != nil { t.Fatalf("could not read file: %s", err) } + t.Logf("Expected %s file\n", expectedFile) expectedMap := []mapstr.M{} if err := json.Unmarshal(expected, &expectedMap); err != nil { @@ -307,7 +355,7 @@ func runTest(t *testing.T, file string, module, metricSetName string, config Dat } if strings.HasSuffix(file, "docs."+config.Suffix) { - writeDataJSON(t, data[0], filepath.Join(config.WritePath, "data.json")) + writeDataJSON(t, data[0], filepath.Join(config.DataPath, "data.json")) } } @@ -369,7 +417,7 @@ func documentedFieldCheck(foundKeys mapstr.M, knownKeys map[string]interface{}, splits := strings.Split(foundKey, ".") found := false for pos := 1; pos < len(splits)-1; pos++ { - key := strings.Join(splits[0:pos], ".") + ".*." + strings.Join(splits[pos+1:len(splits)], ".") + key := strings.Join(splits[0:pos], ".") + ".*." + strings.Join(splits[pos+1:], ".") if _, ok := knownKeys[key]; ok { found = true break diff --git a/metricbeat/module/openmetrics/collector/_meta/data.json b/metricbeat/module/openmetrics/collector/_meta/data.json index ebbb4a0efd20..425e2d297c0b 100644 --- a/metricbeat/module/openmetrics/collector/_meta/data.json +++ b/metricbeat/module/openmetrics/collector/_meta/data.json @@ -11,10 +11,12 @@ }, "openmetrics": { "labels": { - "job": "openmetrics" + "job": "openmetrics", + "listener_name": "http" }, "metrics": { - "up": 1 + "net_conntrack_listener_conn_accepted_total": 3, + "net_conntrack_listener_conn_closed_total": 0 } }, "service": { diff --git a/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/config.yml b/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/config.yml index a8369b90cf22..591952313f27 100644 --- a/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/config.yml +++ b/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/config.yml @@ -2,6 +2,8 @@ type: http url: "/metrics" content_type: "application/openmetrics-text" suffix: plain +path: "_meta/samelabeltestdata" +writepath: "_meta/samelabeltestdata" remove_fields_from_comparison: ["openmetrics.labels.instance"] module: enable_exemplars: false diff --git a/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/docs.plain-expected.json b/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/docs.plain-expected.json index e1244391ba74..b6dc70083a11 100644 --- a/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/docs.plain-expected.json +++ b/metricbeat/module/openmetrics/collector/_meta/samelabeltestdata/docs.plain-expected.json @@ -11,7 +11,7 @@ }, "openmetrics": { "labels": { - "instance": "127.0.0.1:55922", + "instance": "127.0.0.1:42911", "job": "openmetrics" }, "metrics": { @@ -35,7 +35,7 @@ }, "openmetrics": { "labels": { - "instance": "127.0.0.1:55922", + "instance": "127.0.0.1:42911", "job": "openmetrics", "listener_name": "http" }, @@ -49,4 +49,4 @@ "type": "openmetrics" } } -] +] \ No newline at end of file diff --git a/metricbeat/module/openmetrics/collector/_meta/testdata/docs.plain-expected.json b/metricbeat/module/openmetrics/collector/_meta/testdata/docs.plain-expected.json index 04dd247087df..9f8a858a1ef8 100644 --- a/metricbeat/module/openmetrics/collector/_meta/testdata/docs.plain-expected.json +++ b/metricbeat/module/openmetrics/collector/_meta/testdata/docs.plain-expected.json @@ -10,14 +10,16 @@ "period": 10000 }, "openmetrics": { + "help": "Total number of connections opened to the listener of a given name.", "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics" + "instance": "127.0.0.1:37409", + "job": "openmetrics", + "listener_name": "http" }, "metrics": { - "up": 1 + "net_conntrack_listener_conn_accepted_total": 3 }, - "type":"gauge" + "type": "counter" }, "service": { "address": "127.0.0.1:55555", @@ -35,16 +37,14 @@ "period": 10000 }, "openmetrics": { - "help": "Total number of connections opened to the listener of a given name.", "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics", - "listener_name": "http" + "instance": "127.0.0.1:37409", + "job": "openmetrics" }, "metrics": { - "net_conntrack_listener_conn_accepted_total": 3 + "up": 1 }, - "type":"counter" + "type": "gauge" }, "service": { "address": "127.0.0.1:55555", @@ -64,18 +64,18 @@ "openmetrics": { "help": "Total number of connections closed that were made to the listener of a given name.", "labels": { - "instance": "127.0.0.1:55922", + "instance": "127.0.0.1:37409", "job": "openmetrics", "listener_name": "http" }, "metrics": { "net_conntrack_listener_conn_closed_total": 0 }, - "type":"counter" + "type": "counter" }, "service": { "address": "127.0.0.1:55555", "type": "openmetrics" } } -] +] \ No newline at end of file diff --git a/metricbeat/module/openmetrics/collector/_meta/testdata/openmetrics-features.plain-expected.json b/metricbeat/module/openmetrics/collector/_meta/testdata/openmetrics-features.plain-expected.json index ce7febdc874c..a7bdbd98902c 100644 --- a/metricbeat/module/openmetrics/collector/_meta/testdata/openmetrics-features.plain-expected.json +++ b/metricbeat/module/openmetrics/collector/_meta/testdata/openmetrics-features.plain-expected.json @@ -10,16 +10,16 @@ "period": 10000 }, "openmetrics": { - "help": "When my_counter was last incremented", "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics" + "instance": "127.0.0.1:33419", + "job": "openmetrics", + "name": "metrics collector", + "version": "8.2.7" }, "metrics": { - "my_counter_last_increment_timestamp_milliseconds": 123 + "collector_info": 1 }, - "type":"gauge", - "unit":"milliseconds" + "type": "info" }, "service": { "address": "127.0.0.1:55555", @@ -38,13 +38,14 @@ }, "openmetrics": { "labels": { - "instance": "127.0.0.1:55922", + "category": "shirts", + "instance": "127.0.0.1:33419", "job": "openmetrics" }, "metrics": { - "up": 1 + "enable_category": 1 }, - "type":"gauge" + "type": "stateset" }, "service": { "address": "127.0.0.1:55555", @@ -62,16 +63,15 @@ "period": 10000 }, "openmetrics": { - "help": "Count total disk errors", "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics", - "type": "netapp" + "category": "shoes", + "instance": "127.0.0.1:33419", + "job": "openmetrics" }, "metrics": { - "disk_errors_total": 17 + "enable_category": 0 }, - "type":"counter" + "type": "stateset" }, "service": { "address": "127.0.0.1:55555", @@ -89,16 +89,20 @@ "period": 10000 }, "openmetrics": { + "exemplar": { + "cnt_rulefires_deployment_total": 0.67, + "labels": { + "trace_id": "KOO5S4vxi0o" + } + }, "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics", - "name": "open metrics collector", - "version": "6.3.9" + "instance": "127.0.0.1:33419", + "job": "openmetrics" }, "metrics": { - "app_info": 1 + "cnt_rulefires_deployment_total": 66666 }, - "type":"info" + "type": "counter" }, "service": { "address": "127.0.0.1:55555", @@ -117,15 +121,13 @@ }, "openmetrics": { "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics", - "name": "metrics collector", - "version": "8.2.7" + "instance": "127.0.0.1:33419", + "job": "openmetrics" }, "metrics": { - "collector_info": 1 + "up": 1 }, - "type":"info" + "type": "gauge" }, "service": { "address": "127.0.0.1:55555", @@ -144,14 +146,15 @@ }, "openmetrics": { "labels": { - "instance": "127.0.0.1:55922", + "instance": "127.0.0.1:33419", "job": "openmetrics", - "category": "shoes" + "name": "open metrics collector", + "version": "6.3.9" }, "metrics": { - "enable_category": 0 + "app_info": 1 }, - "type":"stateset" + "type": "info" }, "service": { "address": "127.0.0.1:55555", @@ -169,15 +172,16 @@ "period": 10000 }, "openmetrics": { + "help": "When my_counter was last incremented", "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics", - "category": "shirts" + "instance": "127.0.0.1:33419", + "job": "openmetrics" }, "metrics": { - "enable_category": 1 + "my_counter_last_increment_timestamp_milliseconds": 123 }, - "type":"stateset" + "type": "gauge", + "unit": "milliseconds" }, "service": { "address": "127.0.0.1:55555", @@ -195,15 +199,16 @@ "period": 10000 }, "openmetrics": { + "help": "Count total disk errors", "labels": { - "instance": "127.0.0.1:55922", + "instance": "127.0.0.1:33419", "job": "openmetrics", - "category": "shades" + "type": "netapp" }, "metrics": { - "enable_category": 0 + "disk_errors_total": 17 }, - "type":"stateset" + "type": "counter" }, "service": { "address": "127.0.0.1:55555", @@ -221,14 +226,25 @@ "period": 10000 }, "openmetrics": { + "exemplar": { + "labels": { + "trace_id": "0d482-ac43e-d9320-debfe" + }, + "process_cpu_seconds_total": 17, + "timestamp": 1622302012000 + }, + "help": "Total user and system CPU time spent in seconds. Exemplar with timestamp and labels.", "labels": { - "instance": "127.0.0.1:55922", + "build": "8.2.7", + "entity": "controller", + "instance": "127.0.0.1:33419", "job": "openmetrics" }, "metrics": { - "connection_errors": 42 + "process_cpu_seconds_total": 11111 }, - "type":"unknown" + "type": "counter", + "unit": "seconds" }, "service": { "address": "127.0.0.1:55555", @@ -246,20 +262,15 @@ "period": 10000 }, "openmetrics": { - "exemplar": { - "cnt_rulefires_deployment_total":0.67, - "labels": { - "trace_id":"KOO5S4vxi0o" - } - }, "labels": { - "instance": "127.0.0.1:55922", + "category": "shades", + "instance": "127.0.0.1:33419", "job": "openmetrics" }, "metrics": { - "cnt_rulefires_deployment_total": 66666 + "enable_category": 0 }, - "type":"counter" + "type": "stateset" }, "service": { "address": "127.0.0.1:55555", @@ -277,29 +288,18 @@ "period": 10000 }, "openmetrics": { - "exemplar": { - "labels": { - "trace_id": "0d482-ac43e-d9320-debfe" - }, - "process_cpu_seconds_total": 17, - "timestamp": 1622302012000 - }, - "help": "Total user and system CPU time spent in seconds. Exemplar with timestamp and labels.", "labels": { - "instance": "127.0.0.1:55922", - "job": "openmetrics", - "entity": "controller", - "build": "8.2.7" + "instance": "127.0.0.1:33419", + "job": "openmetrics" }, "metrics": { - "process_cpu_seconds_total": 11111 + "connection_errors": 42 }, - "type":"counter", - "unit":"seconds" + "type": "unknown" }, "service": { "address": "127.0.0.1:55555", "type": "openmetrics" } } -] +] \ No newline at end of file diff --git a/metricbeat/module/openmetrics/collector/collector_test.go b/metricbeat/module/openmetrics/collector/collector_test.go index d2f9ab4ff97f..fa5e0845312f 100644 --- a/metricbeat/module/openmetrics/collector/collector_test.go +++ b/metricbeat/module/openmetrics/collector/collector_test.go @@ -44,8 +44,9 @@ func TestData(t *testing.T) { func TestSameLabels(t *testing.T) { dataConfig := mbtest.ReadDataConfig(t, "_meta/samelabeltestdata/config.yml") - mbtest.TestDataFilesWithConfig(t, "openmetrics", "collector", dataConfig) + mbtest.TestDataFilesWithConfig(t, "openmetrics", "collector", dataConfig, "") } + func TestGetOpenMetricsEventsFromMetricFamily(t *testing.T) { labels := mapstr.M{ "handler": "query",