Skip to content

Commit

Permalink
[Rollup] Move toBuilders() methods out of rollup config objects (#32585)
Browse files Browse the repository at this point in the history
  • Loading branch information
tlrx committed Aug 27, 2018
1 parent ec9563e commit b0411f2
Show file tree
Hide file tree
Showing 8 changed files with 236 additions and 197 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,11 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.joda.time.DateTimeZone;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;

Expand Down Expand Up @@ -182,19 +177,6 @@ public Rounding createRounding() {
return createRounding(interval.toString(), timeZone);
}

/**
* This returns a set of aggregation builders which represent the configured
* set of date histograms. Used by the rollup indexer to iterate over historical data
*/
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
DateHistogramValuesSourceBuilder vsBuilder =
new DateHistogramValuesSourceBuilder(RollupField.formatIndexerAggName(field, DateHistogramAggregationBuilder.NAME));
vsBuilder.dateHistogramInterval(interval);
vsBuilder.field(field);
vsBuilder.timeZone(toDateTimeZone(timeZone));
return Collections.singletonList(vsBuilder);
}

public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
ActionRequestValidationException validationException) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,13 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.xpack.core.rollup.RollupField;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;

Expand Down Expand Up @@ -85,25 +80,6 @@ public String[] getFields() {
return fields;
}

/**
* This returns a set of aggregation builders which represent the configured
* set of histograms. Used by the rollup indexer to iterate over historical data
*/
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
if (fields.length == 0) {
return Collections.emptyList();
}

return Arrays.stream(fields).map(f -> {
HistogramValuesSourceBuilder vsBuilder
= new HistogramValuesSourceBuilder(RollupField.formatIndexerAggName(f, HistogramAggregationBuilder.NAME));
vsBuilder.interval(interval);
vsBuilder.field(f);
vsBuilder.missingBucket(true);
return vsBuilder;
}).collect(Collectors.toList());
}

public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
ActionRequestValidationException validationException) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,9 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.xpack.core.rollup.RollupField;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand All @@ -53,11 +44,11 @@
public class MetricConfig implements Writeable, ToXContentObject {

// TODO: replace these with an enum
private static final ParseField MIN = new ParseField("min");
private static final ParseField MAX = new ParseField("max");
private static final ParseField SUM = new ParseField("sum");
private static final ParseField AVG = new ParseField("avg");
private static final ParseField VALUE_COUNT = new ParseField("value_count");
public static final ParseField MIN = new ParseField("min");
public static final ParseField MAX = new ParseField("max");
public static final ParseField SUM = new ParseField("sum");
public static final ParseField AVG = new ParseField("avg");
public static final ParseField VALUE_COUNT = new ParseField("value_count");

static final String NAME = "metrics";
private static final String FIELD = "field";
Expand Down Expand Up @@ -111,46 +102,6 @@ public List<String> getMetrics() {
return metrics;
}

/**
* This returns a set of aggregation builders which represent the configured
* set of metrics. Used by the rollup indexer to iterate over historical data
*/
public List<ValuesSourceAggregationBuilder.LeafOnly> toBuilders() {
if (metrics.size() == 0) {
return Collections.emptyList();
}

List<ValuesSourceAggregationBuilder.LeafOnly> aggs = new ArrayList<>(metrics.size());
for (String metric : metrics) {
ValuesSourceAggregationBuilder.LeafOnly newBuilder;
if (metric.equals(MIN.getPreferredName())) {
newBuilder = new MinAggregationBuilder(RollupField.formatFieldName(field, MinAggregationBuilder.NAME, RollupField.VALUE));
} else if (metric.equals(MAX.getPreferredName())) {
newBuilder = new MaxAggregationBuilder(RollupField.formatFieldName(field, MaxAggregationBuilder.NAME, RollupField.VALUE));
} else if (metric.equals(AVG.getPreferredName())) {
// Avgs are sum + count
newBuilder = new SumAggregationBuilder(RollupField.formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.VALUE));
ValuesSourceAggregationBuilder.LeafOnly countBuilder
= new ValueCountAggregationBuilder(
RollupField.formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD), ValueType.NUMERIC);
countBuilder.field(field);
aggs.add(countBuilder);
} else if (metric.equals(SUM.getPreferredName())) {
newBuilder = new SumAggregationBuilder(RollupField.formatFieldName(field, SumAggregationBuilder.NAME, RollupField.VALUE));
} else if (metric.equals(VALUE_COUNT.getPreferredName())) {
// TODO allow non-numeric value_counts.
// Hardcoding this is fine for now since the job validation guarantees that all metric fields are numerics
newBuilder = new ValueCountAggregationBuilder(
RollupField.formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE), ValueType.NUMERIC);
} else {
throw new IllegalArgumentException("Unsupported metric type [" + metric + "]");
}
newBuilder.field(field);
aggs.add(newBuilder);
}
return aggs;
}

public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
ActionRequestValidationException validationException) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,11 @@
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.xpack.core.rollup.RollupField;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;

Expand Down Expand Up @@ -79,20 +74,6 @@ public String[] getFields() {
return fields;
}

/**
* This returns a set of aggregation builders which represent the configured
* set of date histograms. Used by the rollup indexer to iterate over historical data
*/
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
return Arrays.stream(fields).map(f -> {
TermsValuesSourceBuilder vsBuilder
= new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME));
vsBuilder.field(f);
vsBuilder.missingBucket(true);
return vsBuilder;
}).collect(Collectors.toList());
}

public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
ActionRequestValidationException validationException) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,16 @@
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.test.AbstractSerializingTestCase;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCase<TermsGroupConfig> {

Expand Down Expand Up @@ -77,62 +74,4 @@ public void testValidateFieldWrongType() {
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a terms group must be a [numeric] or " +
"[keyword/text] type, but found [geo_point] for field [my_field]"));
}

public void testValidateFieldMatchingNotAggregatable() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();

// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
when(fieldCaps.isAggregatable()).thenReturn(false);
responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps));

TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
}

public void testValidateMatchingField() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
String type = getRandomType();

// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
when(fieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("my_field", Collections.singletonMap(type, fieldCaps));

TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
if (e.validationErrors().size() != 0) {
fail(e.getMessage());
}

List<CompositeValuesSourceBuilder<?>> builders = config.toBuilders();
assertThat(builders.size(), equalTo(1));
}

private String getRandomType() {
int n = randomIntBetween(0,8);
if (n == 0) {
return "keyword";
} else if (n == 1) {
return "text";
} else if (n == 2) {
return "long";
} else if (n == 3) {
return "integer";
} else if (n == 4) {
return "short";
} else if (n == 5) {
return "float";
} else if (n == 6) {
return "double";
} else if (n == 7) {
return "scaled_float";
} else if (n == 8) {
return "half_float";
}
return "long";
}
}
Loading

0 comments on commit b0411f2

Please sign in to comment.