From 13c972b295086647b189913f9e4a6f49aa5438af Mon Sep 17 00:00:00 2001 From: Georgy Moiseev Date: Mon, 29 Nov 2021 18:41:37 +0300 Subject: [PATCH] Add statistics for all CRUD operations on router Add statistics module for collecting metrics of CRUD operations on router. Wrap all CRUD operation calls in statistics collector. Statistics may be disabled and re-enabled. Some internal methods of select/pairs were reworked or extended to provide statistics info. `cursor` returned from storage on select/pairs now contains stats of tuple count and lookup count. All changes are backward-compatible and should work even with older versions of crud routers and storages. Part of #224 --- CHANGELOG.md | 1 + README.md | 61 ++++ crud.lua | 47 +-- crud/common/utils.lua | 4 + crud/select.lua | 11 +- crud/select/compat/select.lua | 13 +- crud/select/compat/select_old.lua | 15 + crud/select/executor.lua | 26 +- crud/select/merger.lua | 12 + crud/stats/label.lua | 17 ++ crud/stats/local_registry.lua | 116 ++++++++ crud/stats/module.lua | 195 +++++++++++++ crud/stats/registry_common.lua | 53 ++++ test/integration/stats_test.lua | 443 +++++++++++++++++++++++++++++ test/unit/select_executor_test.lua | 28 +- test/unit/stats_test.lua | 318 +++++++++++++++++++++ 16 files changed, 1322 insertions(+), 38 deletions(-) create mode 100644 crud/stats/label.lua create mode 100644 crud/stats/local_registry.lua create mode 100644 crud/stats/module.lua create mode 100644 crud/stats/registry_common.lua create mode 100644 test/integration/stats_test.lua create mode 100644 test/unit/stats_test.lua diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f606f622..15f0a29c0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added +* Statistics for all CRUD operations on router (#224). ### Changed diff --git a/README.md b/README.md index c62a5412a..0145d8887 100644 --- a/README.md +++ b/README.md @@ -586,6 +586,67 @@ crud.len('customers') ... ``` +### Statistics + +`crud` routers provide statistics on called operations. +```lua +-- Returns table with statistics information. +crud.stats() +``` + +Statistics collect enabled by default on all routers. +It can be disabled and re-enabled later. +```lua +-- Disables statistics collect and resets all collectors. +crud.disable_stats() + +-- Enable statistics collect and recreates all collectors. +crud.enable_stats() +``` + +Enabling stats on non-router instances is meaningless. + +`crud.stats()` contains several sections: `insert` (for `insert` and `insert_object` calls), +`get`, `replace` (for `replace` and `replace_object` calls), `update`, +`upsert` (for `upsert` and `upsert_object` calls), `delete`, +`select` (for `select` and `pairs` calls), `truncate`, `len` and +`borders` (for `min` and `max` calls). + +```lua +crud.stats()['insert'] +--- +- ok: + latency: 0.002 + count: 19800 + time: 35.98 + error: + latency: 0.000001 + count: 4 + time: 0.000004 +--- +``` +Each section contains different collectors for success calls +and error (both error throw and `nil, err`) returns. `count` +is total requests count since instance start or stats restart. +`latency` is execution time of last request. +`time` is total time of requests execution. You can use them +to compute average time of execution. + +Additionally, `select` section contains `details` collectors. +```lua +crud.stats()['select']['details'] +--- +- map_reduces: 4 + tuples_fetched: 10500 + tuples_lookup: 2380000 +... +``` +`map_reduces` is a count of planned map reduces +(including those not executed successfully). `tuples_fetched` +is a count of tuples fetched from storages during execution, +`tuples_lookup` is a count of tuples looked up on storages +while collecting response for call. + ## Cartridge roles `cartridge.roles.crud-storage` is a Tarantool Cartridge role that depends on the diff --git a/crud.lua b/crud.lua index 2777013e8..31b31dee9 100644 --- a/crud.lua +++ b/crud.lua @@ -14,6 +14,7 @@ local len = require('crud.len') local borders = require('crud.borders') local sharding_key = require('crud.common.sharding_key') local utils = require('crud.common.utils') +local stats = require('crud.stats.module') local crud = {} @@ -22,47 +23,47 @@ local crud = {} -- @refer insert.tuple -- @function insert -crud.insert = insert.tuple +crud.insert = stats.wrap(insert.tuple, stats.label.INSERT) -- @refer insert.object -- @function insert_object -crud.insert_object = insert.object +crud.insert_object = stats.wrap(insert.object, stats.label.INSERT) -- @refer get.call -- @function get -crud.get = get.call +crud.get = stats.wrap(get.call, stats.label.GET) -- @refer replace.tuple -- @function replace -crud.replace = replace.tuple +crud.replace = stats.wrap(replace.tuple, stats.label.REPLACE) -- @refer replace.object -- @function replace_object -crud.replace_object = replace.object +crud.replace_object = stats.wrap(replace.object, stats.label.REPLACE) -- @refer update.call -- @function update -crud.update = update.call +crud.update = stats.wrap(update.call, stats.label.UPDATE) -- @refer upsert.tuple -- @function upsert -crud.upsert = upsert.tuple +crud.upsert = stats.wrap(upsert.tuple, stats.label.UPSERT) -- @refer upsert.object -- @function upsert -crud.upsert_object = upsert.object +crud.upsert_object = stats.wrap(upsert.object, stats.label.UPSERT) -- @refer delete.call -- @function delete -crud.delete = delete.call +crud.delete = stats.wrap(delete.call, stats.label.DELETE) -- @refer select.call -- @function select -crud.select = select.call +crud.select = stats.wrap(select.call, stats.label.SELECT) -- @refer select.pairs -- @function pairs -crud.pairs = select.pairs +crud.pairs = stats.wrap_pairs(select.pairs, stats.label.SELECT) -- @refer utils.unflatten_rows -- @function unflatten_rows @@ -70,19 +71,19 @@ crud.unflatten_rows = utils.unflatten_rows -- @refer truncate.call -- @function truncate -crud.truncate = truncate.call +crud.truncate = stats.wrap(truncate.call, stats.label.TRUNCATE) -- @refer len.call -- @function len -crud.len = len.call +crud.len = stats.wrap(len.call, stats.label.LEN) -- @refer borders.min -- @function min -crud.min = borders.min +crud.min = stats.wrap(borders.min, stats.label.BORDERS) -- @refer borders.max -- @function max -crud.max = borders.max +crud.max = stats.wrap(borders.max, stats.label.BORDERS) -- @refer utils.cut_rows -- @function cut_rows @@ -92,6 +93,18 @@ crud.cut_rows = utils.cut_rows -- @function cut_objects crud.cut_objects = utils.cut_objects +-- @refer stats.get +-- @function stats +crud.stats = stats.get + +-- @refer stats.enable +-- @function enable_stats +crud.enable_stats = stats.enable + +-- @refer stats.disable +-- @function disable_stats +crud.disable_stats = stats.disable + --- Initializes crud on node -- -- Exports all functions that are used for calls @@ -118,11 +131,13 @@ function crud.init_storage() end function crud.init_router() - rawset(_G, 'crud', crud) + rawset(_G, 'crud', crud) + stats.enable() end function crud.stop_router() rawset(_G, 'crud', nil) + stats.disable() end function crud.stop_storage() diff --git a/crud/common/utils.lua b/crud/common/utils.lua index d7a629417..1975285ed 100644 --- a/crud/common/utils.lua +++ b/crud/common/utils.lua @@ -606,4 +606,8 @@ function utils.merge_options(opts_a, opts_b) return fun.chain(opts_a or {}, opts_b or {}):tomap() end +function utils.pass() + -- Do nothing. +end + return utils diff --git a/crud/select.lua b/crud/select.lua index 2e9010d49..571d186d1 100644 --- a/crud/select.lua +++ b/crud/select.lua @@ -59,7 +59,7 @@ local function select_on_storage(space_name, index_id, conditions, opts) end -- execute select - local tuples, err = select_executor.execute(space, index, filter_func, { + local res, err = select_executor.execute(space, index, filter_func, { scan_value = opts.scan_value, after_tuple = opts.after_tuple, tarantool_iter = opts.tarantool_iter, @@ -70,15 +70,18 @@ local function select_on_storage(space_name, index_id, conditions, opts) end local cursor - if #tuples < opts.limit or opts.limit == 0 then + if res.stats.tuples_fetched < opts.limit or opts.limit == 0 then cursor = {is_end = true} else - cursor = make_cursor(tuples) + cursor = make_cursor(res.tuples) end + -- Pass statistics data from storage with cursor. + cursor.stats = res.stats + -- getting tuples with user defined fields (if `fields` option is specified) -- and fields that are needed for comparison on router (primary key + scan key) - return cursor, schema.filter_tuples_fields(tuples, opts.field_names) + return cursor, schema.filter_tuples_fields(res.tuples, opts.field_names) end function select_module.init() diff --git a/crud/select/compat/select.lua b/crud/select/compat/select.lua index c703615c2..2d8bf5638 100644 --- a/crud/select/compat/select.lua +++ b/crud/select/compat/select.lua @@ -8,6 +8,7 @@ local dev_checks = require('crud.common.dev_checks') local common = require('crud.select.compat.common') local schema = require('crud.common.schema') local sharding_key_module = require('crud.common.sharding_key') +local stats = require('crud.stats.module') local compare_conditions = require('crud.compare.conditions') local select_plan = require('crud.select.plan') @@ -111,6 +112,11 @@ local function build_select_iterator(space_name, user_conditions, opts) if err ~= nil then return nil, err, true end + else + -- Use function call to collect map reduce count instead + -- of passing values outside to wrapper to not break + -- pairs api with excessice context values in return. + stats.inc_map_reduce_count() end local tuples_limit = opts.first @@ -142,7 +148,12 @@ local function build_select_iterator(space_name, user_conditions, opts) local merger = Merger.new(replicasets_to_select, space, plan.index_id, common.SELECT_FUNC_NAME, {space_name, plan.index_id, plan.conditions, select_opts}, - {tarantool_iter = plan.tarantool_iter, field_names = plan.field_names, call_opts = opts.call_opts} + { + tarantool_iter = plan.tarantool_iter, + field_names = plan.field_names, + call_opts = opts.call_opts, + stats_callback = stats.get_fetch_callback(), + } ) -- filter space format by plan.field_names (user defined fields + primary key + scan key) diff --git a/crud/select/compat/select_old.lua b/crud/select/compat/select_old.lua index 33406edf2..55577c84e 100644 --- a/crud/select/compat/select_old.lua +++ b/crud/select/compat/select_old.lua @@ -9,6 +9,7 @@ local sharding = require('crud.common.sharding') local dev_checks = require('crud.common.dev_checks') local schema = require('crud.common.schema') local sharding_key_module = require('crud.common.sharding_key') +local stats = require('crud.stats.module') local compare_conditions = require('crud.compare.conditions') local select_plan = require('crud.select.plan') @@ -30,6 +31,7 @@ local function select_iteration(space_name, plan, opts) }) local call_opts = opts.call_opts + local stats_callback = stats.get_fetch_callback() -- call select on storages local storage_select_opts = { @@ -59,6 +61,14 @@ local function select_iteration(space_name, plan, opts) local tuples = {} for replicaset_uuid, replicaset_results in pairs(results) do + -- Stats extracted with callback here and not passed + -- outside to wrapper because fetch for pairs can be + -- called even after pairs() return from generators. + local cursor = replicaset_results[1] + if cursor.stats ~= nil then + stats_callback(cursor.stats) + end + tuples[replicaset_uuid] = replicaset_results[2] end @@ -137,6 +147,11 @@ local function build_select_iterator(space_name, user_conditions, opts) if err ~= nil then return nil, err, true end + else + -- Use function call to collect map reduce count instead + -- of passing values outside to wrapper to not break + -- pairs api with excessice context values in return. + stats.inc_map_reduce_count() end -- generate tuples comparator diff --git a/crud/select/executor.lua b/crud/select/executor.lua index 6d6f74837..02d481498 100644 --- a/crud/select/executor.lua +++ b/crud/select/executor.lua @@ -69,7 +69,13 @@ function executor.execute(space, index, filter_func, opts) opts = opts or {} if opts.limit == 0 then - return {} + return { + tuples = {}, + stats = { + tuples_fetched = 0, + tuples_lookup = 0, + }, + } end local tuples = {} @@ -94,7 +100,13 @@ function executor.execute(space, index, filter_func, opts) end if tuple == nil then - return {} + return { + tuples = {}, + stats = { + tuples_fetched = 0, + tuples_lookup = 0, + }, + } end end @@ -102,6 +114,7 @@ function executor.execute(space, index, filter_func, opts) gen.state, tuple = gen(gen.param, gen.state) end + local lookup_count = 0 while true do if tuple == nil then break @@ -121,9 +134,16 @@ function executor.execute(space, index, filter_func, opts) end gen.state, tuple = gen(gen.param, gen.state) + lookup_count = lookup_count + 1 end - return tuples + return { + tuples = tuples, + stats = { + tuples_fetched = #tuples, + tuples_lookup = lookup_count, + }, + } end return executor diff --git a/crud/select/merger.lua b/crud/select/merger.lua index fa443b849..232206593 100644 --- a/crud/select/merger.lua +++ b/crud/select/merger.lua @@ -2,6 +2,7 @@ local buffer = require('buffer') local msgpack = require('msgpack') local ffi = require('ffi') local call = require('crud.common.call') +local utils = require('crud.common.utils') local compat = require('crud.common.compat') local merger_lib = compat.require('tuple.merger', 'merger') @@ -93,6 +94,7 @@ local function fetch_chunk(context, state) local replicaset = context.replicaset local vshard_call_name = context.vshard_call_name local timeout = context.timeout or call.DEFAULT_VSHARD_CALL_TIMEOUT + local stats_callback = context.stats_callback local future = state.future -- The source was entirely drained. @@ -109,6 +111,14 @@ local function fetch_chunk(context, state) -- Decode metainfo, leave data to be processed by the merger. local cursor = decode_metainfo(buf) + -- Extract stats info. + -- Stats extracted with callback here and not passed + -- outside to wrapper because fetch for pairs can be + -- called even after pairs() return from generators. + if cursor.stats ~= nil then + stats_callback(cursor.stats) + end + -- Check whether we need the next call. if cursor.is_end then local next_state = {} @@ -137,6 +147,7 @@ local function new(replicasets, space, index_id, func_name, func_args, opts) opts = opts or {} local call_opts = opts.call_opts local mode = call_opts.mode or 'read' + local stats_callback = opts.stats_callback or utils.pass local vshard_call_name = call.get_vshard_call_name(mode, call_opts.prefer_replica, call_opts.balance) -- Request a first data chunk and create merger sources. @@ -157,6 +168,7 @@ local function new(replicasets, space, index_id, func_name, func_args, opts) replicaset = replicaset, vshard_call_name = vshard_call_name, timeout = call_opts.timeout, + stats_callback = stats_callback, } local state = {future = future} local source = merger_lib.new_buffer_source(fetch_chunk, context, state) diff --git a/crud/stats/label.lua b/crud/stats/label.lua new file mode 100644 index 000000000..ee1f98266 --- /dev/null +++ b/crud/stats/label.lua @@ -0,0 +1,17 @@ +return { + -- INSERT identifies both `insert` and `insert_object`. + INSERT = 'insert', + GET = 'get', + -- REPLACE identifies both `replace` and `replace_object`. + REPLACE = 'replace', + UPDATE = 'update', + -- UPSERT identifies both `upsert` and `upsert_object`. + UPSERT = 'upsert', + DELETE = 'delete', + -- SELECT identifies both `pairs` and `select`. + SELECT = 'select', + TRUNCATE = 'truncate', + LEN = 'len', + -- BORDERS identifies both `min` and `max`. + BORDERS = 'borders', +} diff --git a/crud/stats/local_registry.lua b/crud/stats/local_registry.lua new file mode 100644 index 000000000..b75c6e102 --- /dev/null +++ b/crud/stats/local_registry.lua @@ -0,0 +1,116 @@ +local label = require('crud.stats.label') +local dev_checks = require('crud.common.dev_checks') +local registry_common = require('crud.stats.registry_common') + +local registry = {} +local _registry = {} + +--- Initialize local metrics registry +-- +-- @function init +-- +-- @treturn boolean Returns true. +-- +function registry.init() + for _, op_label in pairs(label) do + _registry[op_label] = registry_common.build_collector(op_label) + end + + return true +end + +--- Destroy local metrics registry +-- +-- @function destroy +-- +-- @treturn boolean Returns true. +-- +function registry.destroy() + _registry = {} + return true +end + +--- Get copy of local metrics registry +-- +-- @function get +-- +-- @treturn table Returns copy of metrics registry. +-- +function registry.get() + return table.deepcopy(_registry) +end + +--- Increase requests count and update latency info +-- +-- @function measure +-- +-- @tparam string op_label +-- Label of registry collectos. +-- Use `require('crud.common.const').OP` to pick one. +-- +-- @tparam boolean success +-- true if no errors on execution, false otherwise. +-- +-- @tparam number latency +-- Time of call execution. +-- +-- @treturn boolean Returns true. +-- +function registry.measure(op_label, success, latency) + dev_checks('string', 'boolean', 'number') + + local collector + if success == true then + collector = _registry[op_label]['ok'] + else + collector = _registry[op_label]['error'] + end + + collector.count = collector.count + 1 + collector.latency = latency + collector.time = collector.time + latency + + return true +end + +--- Increase statistics of storage select/pairs calls +-- +-- @function measure_fetch +-- +-- @tparam number tuples_fetched +-- Count of tuples fetched during storage call. +-- +-- @tparam number tuples_lookup +-- Count of tuples looked up on storages while collecting response. +-- +-- @treturn boolean Returns true. +-- +function registry.measure_fetch(tuples_fetched, tuples_lookup) + dev_checks('number', 'number') + + local collector = _registry[label.SELECT].details + + collector.tuples_fetched = collector.tuples_fetched + tuples_fetched + collector.tuples_lookup = collector.tuples_lookup + tuples_lookup + return true +end + +--- Increase statistics of planned map reduces during select/pairs +-- +-- @function measure_map_reduces +-- +-- @tparam number count +-- Count of map reduces planned. +-- +-- @treturn boolean Returns true. +-- +function registry.measure_map_reduces(count) + dev_checks('number') + + local collector = _registry[label.SELECT].details + + collector.map_reduces = collector.map_reduces + count + return true +end + +return registry diff --git a/crud/stats/module.lua b/crud/stats/module.lua new file mode 100644 index 000000000..6d7f70220 --- /dev/null +++ b/crud/stats/module.lua @@ -0,0 +1,195 @@ +local clock = require('clock') +local dev_checks = require('crud.common.dev_checks') +local utils = require('crud.common.utils') + +local stats_registry = require('crud.stats.local_registry') + +local stats = {} +local _is_enabled = false + +--- Check if statistics module if enabled +-- +-- @function is_enabled +-- +-- @treturn[1] boolean Returns true or false. +-- +function stats.is_enabled() + return _is_enabled +end + +--- Initializes statistics registry and enables callbacks and wrappers +-- +-- @function enable +-- +-- @treturn[1] boolean Returns true. +-- +function stats.enable() + if _is_enabled then + return true + end + + stats_registry.init() + _is_enabled = true + + return true +end + +--- Destroys statistics registry and disable callbacks +-- +-- @function disable +-- +-- @treturn[1] boolean Returns true. +-- +function stats.disable() + stats_registry.destroy() + _is_enabled = false + + return true +end + +--- Get statistics on CRUD operations +-- +-- @function get +-- +-- @treturn[1] table Statistics on CRUD operations. +-- @treturn[2] nil If statistics disabled, returns nil. +-- +function stats.get() + return stats_registry.get() +end + +--- Wrap CRUD operation call to collect statistics +-- +-- @function wrap +-- +-- @tparam function func +-- Function to wrap. Errors are caught and thrown again. +-- +-- @tparam string op_label +-- Label of registry collectos. +-- Use `require('crud.common.const').OP` to pick one. +-- +-- @return First two arguments of wrapped function output. +-- +function stats.wrap(func, op_label) + dev_checks('function', 'string') + return function(...) + if not stats.is_enabled() then + return func(...) + end + + local start_time = clock.monotonic() + local status, res, err = pcall(func, ...) + local finish_time = clock.monotonic() + + local success = status and (err == nil) + local latency = finish_time - start_time + stats_registry.measure(op_label, success, latency) + + if status == false then + error(res) + end + + return res, err + end +end + +--- Wrap pairs operation call to collect statistics +-- +-- @function wrap_pairs +-- +-- @tparam function func +-- Pairs function to wrap. Errors are caught and thrown again. +-- +-- @tparam string op_label +-- Label of registry collectos. +-- Use `require('crud.common.const').OP` to pick one. +-- +-- @return Returns pairs generator. +-- +function stats.wrap_pairs(func, op_label) + dev_checks('function', 'string') + return function(...) + if not stats.is_enabled() then + return func(...) + end + + local start_time = clock.monotonic() + local status, gen, param, state = pcall(func, ...) + local finish_time = clock.monotonic() + + local success = status + local latency = finish_time - start_time + stats_registry.measure(op_label, success, latency) + + if status == false then + error(gen) + end + + return gen, param, state + end +end + +--- Callback to collect storage tuples stats (select/pairs) +-- +-- @function update_fetch_stats +-- +-- @tparam table storage_stats +-- Statistics from select storage call. +-- +-- @tfield number tuples_fetched +-- Count of tuples fetched during storage call. +-- +-- @tfield number tuples_lookup +-- Count of tuples looked up on storages while collecting response. +-- +-- @treturn boolean Returns true. +-- +local function update_fetch_stats(storage_stats) + dev_checks({ tuples_fetched = 'number', tuples_lookup = 'number' }) + + if not stats.is_enabled() then + return true + end + + stats_registry.measure_fetch(storage_stats.tuples_fetched, storage_stats.tuples_lookup) + return true +end + +--- Returns callback to collect storage tuples stats (select/pairs) +-- +-- @function get_fetch_callback +-- +-- @treturn[1] function `update_fetch_stats` function to collect tuples stats. +-- @treturn[2] function Dummy function, if stats disabled. +-- +function stats.get_fetch_callback() + if not stats.is_enabled() then + return utils.pass + end + + return update_fetch_stats +end + +--- Increase map reduce call stat by one (select/pairs) +-- +-- @function inc_map_reduce_count +-- +-- @treturn boolean Returns true. +-- +function stats.inc_map_reduce_count() + if not stats.is_enabled() then + return true + end + + stats_registry.measure_map_reduces(1) + return true +end + +--- Table with CRUD operation lables +-- +-- @table label +-- +stats.label = require('crud.stats.label') + +return stats diff --git a/crud/stats/registry_common.lua b/crud/stats/registry_common.lua new file mode 100644 index 000000000..a98f376b3 --- /dev/null +++ b/crud/stats/registry_common.lua @@ -0,0 +1,53 @@ +local label = require('crud.stats.label') +local dev_checks = require('crud.common.dev_checks') + +local registry_common = {} + +--- Build collector for local registry +-- +-- @function build_collector +-- +-- @tparam string op_label +-- Label of registry collectos. +-- Use `require('crud.common.const').OP` to pick one. +-- +-- @treturn table Returns collector for success and error requests count and latency. +-- +function registry_common.build_collector(op_label) + dev_checks('string') + + if op_label == label.SELECT then + return { + ok = { + count = 0, + latency = 0, + time = 0, + }, + error = { + count = 0, + latency = 0, + time = 0, + }, + details = { + tuples_fetched = 0, + tuples_lookup = 0, + map_reduces = 0, + }, + } + end + + return { + ok = { + count = 0, + latency = 0, + time = 0, + }, + error = { + count = 0, + latency = 0, + time = 0, + }, + } +end + +return registry_common \ No newline at end of file diff --git a/test/integration/stats_test.lua b/test/integration/stats_test.lua new file mode 100644 index 000000000..4b61ad077 --- /dev/null +++ b/test/integration/stats_test.lua @@ -0,0 +1,443 @@ +local fio = require('fio') +local clock = require('clock') + +local t = require('luatest') + +local helpers = require('test.helper') + +local g = t.group('stats_integration') + +g.before_all(function(g) + g.cluster = helpers.Cluster:new({ + datadir = fio.tempdir(), + server_command = helpers.entrypoint('srv_select'), + use_vshard = true, + replicasets = helpers.get_test_replicasets(), + }) + + g.cluster:start() +end) + +g.after_all(function(g) helpers.stop_cluster(g.cluster) end) + +g.before_each(function(g) + helpers.truncate_space_on_cluster(g.cluster, 'customers') +end) + +local simple_operation_cases = { + insert = { + func = 'crud.insert', + args = { + 'customers', + { 2, box.NULL, 'Ivan', 'Ivanov', 20, 'Moscow' }, + }, + op_label = 'insert', + }, + insert_object = { + func = 'crud.insert_object', + args = { + 'customers', + { id = 2, name = 'Ivan', last_name = 'Ivanov', age = 20, city = 'Moscow' }, + }, + op_label = 'insert', + }, + get = { + func = 'crud.get', + args = { 'customers', { 2 } }, + op_label = 'get', + }, + replace = { + func = 'crud.replace', + args = { + 'customers', + { 2, box.NULL, 'Ivan', 'Ivanov', 20, 'Moscow' }, + }, + op_label = 'replace', + }, + replace_object = { + func = 'crud.replace_object', + args = { + 'customers', + { id = 2, name = 'Ivan', last_name = 'Ivanov', age = 20, city = 'Moscow' }, + }, + op_label = 'replace', + }, + update = { + prepare = function(g) + helpers.insert_objects(g, 'customers', {{ + id = 2, name = 'Ivan', last_name = 'Ivanov', + age = 20, city = 'Moscow' + }}) + end, + func = 'crud.update', + args = { 'customers', 2, {{'+', 'age', 10}} }, + op_label = 'update', + }, + upsert = { + func = 'crud.upsert', + args = { + 'customers', + { 2, box.NULL, 'Ivan', 'Ivanov', 20, 'Moscow' }, + {{'+', 'age', 1}}, + }, + op_label = 'upsert', + }, + upsert_object = { + func = 'crud.upsert_object', + args = { + 'customers', + { id = 2, name = 'Ivan', last_name = 'Ivanov', age = 20, city = 'Moscow' }, + {{'+', 'age', 1}} + }, + op_label = 'upsert', + }, + delete = { + func = 'crud.delete', + args = { 'customers', { 2 } }, + op_label = 'delete', + }, + truncate = { + func = 'crud.truncate', + args = { 'customers' }, + op_label = 'truncate', + }, + len = { + func = 'crud.len', + args = { 'customers' }, + op_label = 'len', + }, + min = { + func = 'crud.min', + args = { 'customers' }, + op_label = 'borders', + }, + max = { + func = 'crud.max', + args = { 'customers' }, + op_label = 'borders', + }, + insert_error = { + func = 'crud.insert', + args = { 'notcustomers', { } }, + op_label = 'insert', + expect_error = true, + }, + insert_object_error = { + func = 'crud.insert_object', + args = { 'notcustomers', { } }, + op_label = 'insert', + expect_error = true, + }, + get_error = { + func = 'crud.get', + args = { 'notcustomers', { } }, + op_label = 'get', + expect_error = true, + }, + replace_error = { + func = 'crud.replace', + args = { 'notcustomers', { } }, + op_label = 'replace', + expect_error = true, + }, + replace_object_error = { + func = 'crud.replace_object', + args = { 'notcustomers', { } }, + op_label = 'replace', + expect_error = true, + }, + update_error = { + func = 'crud.update', + args = { 'notcustomers', 2, {{'+', 'age', 10}} }, + op_label = 'update', + expect_error = true, + }, + upsert_error = { + func = 'crud.upsert', + args = { 'notcustomers', { }, {{'+', 'age', 1}} }, + op_label = 'upsert', + expect_error = true, + }, + upsert_object_error = { + func = 'crud.upsert_object', + args = { 'notcustomers', { }, {{'+', 'age', 1}} }, + op_label = 'upsert', + expect_error = true, + }, + delete_error = { + func = 'crud.delete', + args = { 'notcustomers', { } }, + op_label = 'delete', + expect_error = true, + }, + truncate_error = { + func = 'crud.truncate', + args = { 'notcustomers' }, + op_label = 'truncate', + expect_error = true, + }, + len_error = { + func = 'crud.len', + args = { 'notcustomers' }, + op_label = 'len', + expect_error = true, + }, + min_error = { + func = 'crud.min', + args = { 'notcustomers' }, + op_label = 'borders', + expect_error = true, + }, + max_error = { + func = 'crud.max', + args = { 'notcustomers' }, + op_label = 'borders', + expect_error = true, + }, +} + +for name, case in pairs(simple_operation_cases) do + local test_name = ('test_%s_stats'):format(name) + + if case.prepare ~= nil then + g.before_test(test_name, case.prepare) + end + + g[test_name] = function(g) + local router = g.cluster:server('router').net_box + + -- Collect stats on active servers before call. + local stats_before = router:eval("return require('crud').stats()") + t.assert_type(stats_before, 'table') + + -- Call insert operation. + local before_start = clock.monotonic() + + local _, err = router:call(case.func, case.args) + + local after_finish = clock.monotonic() + + if case.expect_error ~= true then + t.assert_equals(err, nil) + end + + -- Collect stats on active servers after insert + local stats_after = router:eval("return require('crud').stats()") + t.assert_type(stats_after, 'table') + + -- Expecting 'ok' metrics to change on `expect_error == false` + -- and 'error' otherwise. + local changed, unchanged + if case.expect_error == true then + changed = 'error' + unchanged = 'ok' + else + unchanged = 'error' + changed = 'ok' + end + + local changed_before = stats_before[case.op_label][changed] + local changed_after = stats_after[case.op_label][changed] + + t.assert_equals(changed_after.count - changed_before.count, 1, 'Expected count incremented') + + local ok_latency_max = math.max(changed_before.latency, after_finish - before_start) + t.assert_gt(changed_after.latency, 0, 'Changed latency has appropriate value') + t.assert_le(changed_after.latency, ok_latency_max, 'Changed latency has appropriate value') + + local time_diff = changed_after.time - changed_before.time + t.assert_gt(time_diff, 0, 'Total time increase has appropriate value') + t.assert_le(time_diff, after_finish - before_start, 'Total time increase has appropriate value') + + local unchanged_before = stats_before[case.op_label][unchanged] + local unchanged_after = stats_after[case.op_label][unchanged] + t.assert_equals(unchanged_before, unchanged_after, 'Other stats remained the same') + end +end + +local eval = { + select = [[ + local crud = require('crud') + + local conditions = ... + + return crud.select('customers', conditions) + ]], + select_error = [[ + local crud = require('crud') + + return crud.select('notcustomers', {{ '==', 'i', 3 }}) + ]], + pairs = [[ + local crud = require('crud') + + local conditions = ... + + local result = {} + for _, v in crud.pairs('customers', conditions) do + table.insert(result, v) + end + + return result + ]], + pairs_error = [[ + local crud = require('crud') + + local _, err = pcall(crud.pairs, 'notcustomers') + + return nil, err + ]], +} + +local select_cases = { + select_by_primary_index = { + eval = eval.select, + conditions = {{ '==', 'id_index', 3 }}, + map_reduces = 0, + tuples_fetched = 1, + tuples_lookup = 1, + }, + select_space_error = { + eval = eval.select_error, + map_reduces = 0, + tuples_fetched = 0, + tuples_lookup = 0, + expect_error = true, + }, + select_by_secondary_index = { + eval = eval.select, + conditions = {{ '==', 'age_index', 46 }}, + map_reduces = 1, + tuples_fetched = 1, + tuples_lookup = 1, + }, + select_full_scan = { + eval = eval.select, + conditions = {{ '>', 'id_index', 0 }, { '==', 'city', 'Kyoto' }}, + map_reduces = 1, + tuples_fetched = 0, + tuples_lookup = 4, + }, + pairs_by_primary_index = { + eval = eval.pairs, + conditions = {{ '==', 'id_index', 3 }}, + map_reduces = 0, + tuples_fetched = 1, + tuples_lookup = 1, + }, + pairs_space_error = { + eval = eval.pairs_error, + map_reduces = 0, + tuples_fetched = 0, + tuples_lookup = 0, + expect_error = true, + }, + pairs_by_secondary_index = { + eval = eval.pairs, + conditions = {{ '==', 'age_index', 46 }}, + map_reduces = 1, + tuples_fetched = 1, + tuples_lookup = 1, + }, + pairs_full_scan = { + eval = eval.pairs, + conditions = {{ '>', 'id_index', 0 }, { '==', 'city', 'Kyoto' }}, + map_reduces = 1, + tuples_fetched = 0, + tuples_lookup = 4, + }, +} + +for name, case in pairs(select_cases) do + local test_name = ('test_%s_stats'):format(name) + + g.before_test(test_name, function(g) + helpers.insert_objects(g, 'customers', { + -- Storage is s-2. + { + id = 1, name = "Elizabeth", last_name = "Jackson", + age = 12, city = "New York", + }, + -- Storage is s-2. + { + id = 2, name = "Mary", last_name = "Brown", + age = 46, city = "Los Angeles", + }, + -- Storage is s-1. + { + id = 3, name = "David", last_name = "Smith", + age = 33, city = "Los Angeles", + }, + -- Storage is s-2. + { + id = 4, name = "William", last_name = "White", + age = 81, city = "Chicago", + }, + }) + end) + + g[test_name] = function(g) + local router = g.cluster:server('router').net_box + local op_label = 'select' + + -- Collect stats on active servers before call. + local stats_before = router:eval("return require('crud').stats()") + t.assert_type(stats_before, 'table') + + -- Call insert operation. + local before_start = clock.monotonic() + + local _, err = router:eval(case.eval, { case.conditions }) + + local after_finish = clock.monotonic() + + if case.expect_error ~= true then + t.assert_equals(err, nil) + end + + -- Collect stats on active servers after insert + local stats_after = router:eval("return require('crud').stats()") + t.assert_type(stats_after, 'table') + + -- Expecting 'ok' metrics to change on `expect_error == false` + -- and 'error' otherwise. + local changed, unchanged + if case.expect_error == true then + changed = 'error' + unchanged = 'ok' + else + unchanged = 'error' + changed = 'ok' + end + + local changed_before = stats_before[op_label][changed] + local changed_after = stats_after[op_label][changed] + + t.assert_equals(changed_after.count - changed_before.count, 1, 'Expected count incremented') + + local ok_latency_max = math.max(changed_before.latency, after_finish - before_start) + t.assert_gt(changed_after.latency, 0, 'Changed latency has appropriate value') + t.assert_le(changed_after.latency, ok_latency_max, 'Changed latency has appropriate value') + + local time_diff = changed_after.time - changed_before.time + t.assert_gt(time_diff, 0, 'Total time increase has appropriate value') + t.assert_le(time_diff, after_finish - before_start, 'Total time increase has appropriate value') + + local unchanged_before = stats_before[op_label][unchanged] + local unchanged_after = stats_after[op_label][unchanged] + + t.assert_equals(unchanged_before, unchanged_after, 'Other stats remained the same') + + local details_before = stats_before[op_label].details + local details_after = stats_after[op_label].details + + local tuples_fetched_diff = details_after.tuples_fetched - details_before.tuples_fetched + t.assert_equals(tuples_fetched_diff, case.tuples_fetched, 'Expected count of tuples fetched') + + local tuples_lookup_diff = details_after.tuples_lookup - details_before.tuples_lookup + t.assert_equals(tuples_lookup_diff, case.tuples_lookup, 'Expected count of tuples looked up on storage') + + local map_reduces_diff = details_after.map_reduces - details_before.map_reduces + t.assert_equals(map_reduces_diff, case.map_reduces, 'Expected count of map reduces planned') + end +end diff --git a/test/unit/select_executor_test.lua b/test/unit/select_executor_test.lua index a9977cd43..e150881a0 100644 --- a/test/unit/select_executor_test.lua +++ b/test/unit/select_executor_test.lua @@ -105,7 +105,7 @@ g.test_one_condition_no_index = function() tarantool_iter = plan.tarantool_iter, scan_condition_num = plan.scan_condition_num, }) - t.assert_equals(get_ids(results), {2, 3}) + t.assert_equals(get_ids(results.tuples), {2, 3}) -- after tuple 2 local after_tuple = space:frommap(customers[2]):totable() @@ -115,7 +115,7 @@ g.test_one_condition_no_index = function() after_tuple = after_tuple, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {3}) + t.assert_equals(get_ids(results.tuples), {3}) -- after tuple 3 local after_tuple = space:frommap(customers[3]):totable() @@ -125,7 +125,7 @@ g.test_one_condition_no_index = function() after_tuple = after_tuple, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(#results, 0) + t.assert_equals(results.tuples, {}) end g.test_one_condition_with_index = function() @@ -164,7 +164,7 @@ g.test_one_condition_with_index = function() scan_value = plan.scan_value, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {3, 2, 4}) -- in age order + t.assert_equals(get_ids(results.tuples), {3, 2, 4}) -- in age order -- after tuple 3 local after_tuple = space:frommap(customers[3]):totable() @@ -174,7 +174,7 @@ g.test_one_condition_with_index = function() after_tuple = after_tuple, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {2, 4}) -- in age order + t.assert_equals(get_ids(results.tuples), {2, 4}) -- in age order end g.test_multiple_conditions = function() @@ -220,7 +220,7 @@ g.test_multiple_conditions = function() scan_value = plan.scan_value, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {5, 2}) -- in age order + t.assert_equals(get_ids(results.tuples), {5, 2}) -- in age order -- after tuple 5 local after_tuple = space:frommap(customers[5]):totable() @@ -230,7 +230,7 @@ g.test_multiple_conditions = function() after_tuple = after_tuple, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {2}) + t.assert_equals(get_ids(results.tuples), {2}) end g.test_composite_index = function() @@ -271,7 +271,7 @@ g.test_composite_index = function() scan_value = plan.scan_value, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {2, 1, 4}) -- in full_name order + t.assert_equals(get_ids(results.tuples), {2, 1, 4}) -- in full_name order -- after tuple 2 local after_tuple = space:frommap(customers[2]):totable() @@ -281,7 +281,7 @@ g.test_composite_index = function() after_tuple = after_tuple, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {1, 4}) + t.assert_equals(get_ids(results.tuples), {1, 4}) end g.test_get_by_id = function() @@ -319,7 +319,7 @@ g.test_get_by_id = function() scan_value = plan.scan_value, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {2}) + t.assert_equals(get_ids(results.tuples), {2}) end g.test_early_exit = function() @@ -360,7 +360,7 @@ g.test_early_exit = function() scan_value = plan.scan_value, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {4, 2}) + t.assert_equals(get_ids(results.tuples), {4, 2}) end g.test_select_all = function() @@ -397,7 +397,7 @@ g.test_select_all = function() scan_value = plan.scan_value, tarantool_iter = plan.tarantool_iter, }) - t.assert_equals(get_ids(results), {1, 2, 3, 4}) + t.assert_equals(get_ids(results.tuples), {1, 2, 3, 4}) end g.test_limit = function() @@ -435,7 +435,7 @@ g.test_limit = function() tarantool_iter = plan.tarantool_iter, limit = 0, }) - t.assert_equals(#results, 0) + t.assert_equals(results.tuples, {}) -- limit 2 local results = select_executor.execute(space, index, filter_func, { @@ -443,5 +443,5 @@ g.test_limit = function() tarantool_iter = plan.tarantool_iter, limit = 2, }) - t.assert_equals(get_ids(results), {1, 2}) + t.assert_equals(get_ids(results.tuples), {1, 2}) end diff --git a/test/unit/stats_test.lua b/test/unit/stats_test.lua new file mode 100644 index 000000000..183a885d0 --- /dev/null +++ b/test/unit/stats_test.lua @@ -0,0 +1,318 @@ +local clock = require('clock') +local fiber = require('fiber') +local stats_module = require('crud.stats.module') + +local t = require('luatest') +local g = t.group('stats_unit') + +g.before_each(function() + stats_module.enable() +end) + +g.after_each(function() + stats_module.disable() +end) + +g.test_get_returns_expected_format = function() + local stats = stats_module.get() + + t.assert_type(stats, 'table') + + local keys = {} + for key, stat in pairs(stats) do + table.insert(keys, key) + + t.assert_type(stat.ok, 'table') + t.assert_type(stat.ok.count, 'number') + t.assert_type(stat.ok.latency, 'number') + t.assert_type(stat.ok.time, 'number') + + t.assert_type(stat.error, 'table') + t.assert_type(stat.error.count, 'number') + t.assert_type(stat.error.latency, 'number') + t.assert_type(stat.error.time, 'number') + + if key == 'select' then + t.assert_type(stat.details, 'table') + t.assert_type(stat.details.tuples_fetched, 'number') + t.assert_type(stat.details.tuples_lookup, 'number') + t.assert_type(stat.details.map_reduces, 'number') + end + end + + t.assert_items_equals(keys, stats_module.label, 'All expected labels presents') +end + +local sleep_time = 0.01 +-- Using `fiber.sleep(time)` between two `clock.monotonic()` +-- may return diff less than `time`. +local function sleep_for(time) + local start = clock.monotonic() + while (clock.monotonic() - start) < time do + fiber.sleep(time / 10) + end +end + +local return_true = function() + sleep_for(sleep_time) + return true +end + +local error_table = { err = 'err' } + +local return_err = function() + sleep_for(sleep_time) + return nil, error_table +end + +local pairs_ok = function() + sleep_for(sleep_time) + return pairs({}) +end + +local throw_message = 'custom error' + +local throws_error = function() + sleep_for(sleep_time) + error(throw_message) + return pairs({}) +end + +local measure_cases = { + wrapper_measures_expected_values_on_ok = { + wrapper = stats_module.wrap, + func = return_true, + changed_coll = 'ok', + unchanged_coll = 'error', + }, + wrapper_measures_expected_values_on_error_return = { + wrapper = stats_module.wrap, + func = return_err, + changed_coll = 'error', + unchanged_coll = 'ok', + }, + wrapper_measures_expected_values_on_error_throw = { + wrapper = stats_module.wrap, + func = throws_error, + changed_coll = 'error', + unchanged_coll = 'ok', + pcall = true, + }, + pairs_wrapper_measures_expected_values_on_ok = { + wrapper = stats_module.wrap_pairs, + func = pairs_ok, + changed_coll = 'ok', + unchanged_coll = 'error', + }, + pairs_wrapper_measures_expected_values_on_error = { + wrapper = stats_module.wrap_pairs, + func = throws_error, + changed_coll = 'error', + unchanged_coll = 'ok', + pcall = true, + }, +} + +for name, case in pairs(measure_cases) do + g[('test_%s'):format(name)] = function() + local stats_before = stats_module.get() + + local op_label = stats_module.label.INSERT + local w_func = case.wrapper(case.func, op_label) + + local before_start = clock.monotonic() + if case.pcall == true then + pcall(w_func) + else + w_func() + end + local after_finish = clock.monotonic() + + local stats_after = stats_module.get() + + -- Expected collectors (changed_coll: 'ok' or 'error') have changed. + local changed_before = stats_before[op_label][case.changed_coll] + local changed_after = stats_after[op_label][case.changed_coll] + + t.assert_equals(changed_after.count - changed_before.count, 1, 'Count incremented') + + local latency_min = math.min(changed_before.latency, sleep_time) + t.assert_ge(changed_after.latency, latency_min, 'Latency has appropriate value') + + local latency_max = math.max(changed_before.latency, after_finish - before_start) + t.assert_le(changed_after.latency, latency_max, 'Latency has appropriate value') + + local time_diff = changed_after.time - changed_before.time + t.assert_ge(time_diff, sleep_time, 'Total time increase has appropriate value') + t.assert_le(time_diff, after_finish - before_start, 'Total time increase has appropriate value') + + -- Another collectors (unchanged_coll: 'error' or 'ok') haven't changed. + local unchanged_before = stats_before[op_label][case.unchanged_coll] + local unchanged_after = stats_after[op_label][case.unchanged_coll] + t.assert_equals(unchanged_before, unchanged_after, 'No changes in another collectors') + end +end + +local disable_stats_cases = { + stats_disable_before_wrap_ = { + before_wrap = function() stats_module.disable() end, + after_wrap = function() end, + }, + stats_disable_after_wrap_ = { + before_wrap = function() end, + after_wrap = function() stats_module.disable() end, + }, + [''] = { + before_wrap = function() end, + after_wrap = function() end, + }, +} + +local preserve_return_cases = { + wrapper_preserves_return_values_on_ok = { + func = return_true, + res = true, + err = nil, + }, + wrapper_preserves_return_values_on_error = { + func = return_err, + res = nil, + err = error_table, + }, +} + +local preserve_throw_cases = { + wrapper_preserves_error_throw = { + wrapper = stats_module.wrap, + }, + pairs_wrapper_preserves_error_throw = { + wrapper = stats_module.wrap_pairs, + }, +} + +for name_head, disable_case in pairs(disable_stats_cases) do + for name_tail, return_case in pairs(preserve_return_cases) do + g[('test_%s%s'):format(name_head, name_tail)] = function() + local op_label = stats_module.label.INSERT + + disable_case.before_wrap() + local w_func = stats_module.wrap(return_case.func, op_label) + disable_case.after_wrap() + local res, err = w_func() + + t.assert_equals(res, return_case.res, 'Wrapper preserves first return value') + t.assert_equals(err, return_case.err, 'Wrapper preserves second return value') + end + end + + g[('test_%pairs_wrapper_preserves_return_values'):format(name_head)] = function() + local input = { a = 'a', b = 'b' } + + local op_label = stats_module.label.INSERT + + disable_case.before_wrap() + local w_func = stats_module.wrap_pairs(function() return pairs(input) end, op_label) + disable_case.after_wrap() + + local res = {} + for k, v in w_func() do + res[k] = v + end + + t.assert_equals(input, res, 'Wrapper preserves pairs return values') + end + + for name_tail, throw_case in pairs(preserve_throw_cases) do + g[('test_%s%s'):format(name_head, name_tail)] = function() + local op_label = stats_module.label.INSERT + + disable_case.before_wrap() + local w_func = throw_case.wrapper(throws_error, op_label) + disable_case.after_wrap() + + t.assert_error_msg_contains(throw_message, w_func, 'Wrapper preserves error throw') + end + end +end + +g.test_stats_is_empty_after_disable = function() + stats_module.disable() + + stats_module.wrap(return_true, stats_module.label.INSERT)() + + local stats = stats_module.get() + t.assert_equals(stats, {}) +end + +g.test_stats_reenable_does_not_reset_stats = function() + -- Prepare non-default stats + local op_label = stats_module.label.INSERT + stats_module.wrap(return_true, op_label)() + + local stats_before = stats_module.get() + t.assert_equals(stats_before[op_label].ok.count, 1, 'Non-zero stats prepared') + + stats_module.enable() + + local stats_after = stats_module.get() + + t.assert_equals(stats_after, stats_before, 'Stats have not been reset') +end + +g.test_stats_fetch_callback = function() + local stats_before = stats_module.get() + + local storage_cursor_stats = { tuples_fetched = 5, tuples_lookup = 25 } + stats_module.get_fetch_callback()(storage_cursor_stats) + + local stats_after = stats_module.get() + + local details_before = stats_before.select.details + local details_after = stats_after.select.details + + local tuples_fetched_diff = details_after.tuples_fetched - details_before.tuples_fetched + local tuples_lookup_diff = details_after.tuples_lookup - details_before.tuples_lookup + t.assert_equals(tuples_fetched_diff, 5, 'tuples_fetched is inremented by expected value') + t.assert_equals(tuples_lookup_diff, 25, 'tuples_lookup is inremented by expected value') +end + +g.test_disable_stats_before_fetch_callback_get_do_not_break_call = function() + stats_module.disable() + + local storage_cursor_stats = { tuples_fetched = 5, tuples_lookup = 25 } + stats_module.get_fetch_callback()(storage_cursor_stats) + + t.success('No unexpected errors') +end + +g.test_disable_stats_after_fetch_callback_get_do_not_break_call = function() + local callback = stats_module.get_fetch_callback() + stats_module.disable() + + local storage_cursor_stats = { tuples_fetched = 5, tuples_lookup = 25 } + callback(storage_cursor_stats) + + t.success('No unexpected errors') +end + +g.test_stats_map_reduce_increment = function() + local stats_before = stats_module.get() + + stats_module.inc_map_reduce_count() + + local stats_after = stats_module.get() + + local details_before = stats_before.select.details + local details_after = stats_after.select.details + + local map_reduces_diff = details_after.map_reduces - details_before.map_reduces + t.assert_equals(map_reduces_diff, 1, 'map_reduces is inremented by 1') +end + +g.test_disable_stats_do_not_break_inc_map_reduce_count_call = function() + stats_module.disable() + + stats_module.inc_map_reduce_count() + + t.success('No unexpected errors') +end