Skip to content

Commit

Permalink
[ML] Consistent pattern for strict/lenient parser names (#32399)
Browse files Browse the repository at this point in the history
Previously we had two patterns for naming of strict
and lenient parsers.

Some classes had CONFIG_PARSER and METADATA_PARSER,
and used an enum to pass the parser type to nested
parsers.

Other classes had STRICT_PARSER and LENIENT_PARSER
and used ternary operators to pass the parser type
to nested parsers.

This change makes all ML classes use the second of
the patterns described above.
  • Loading branch information
droberts195 committed Jul 26, 2018
1 parent 92d7802 commit 13d1316
Show file tree
Hide file tree
Showing 36 changed files with 320 additions and 417 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ public List<NamedXContentRegistry.Entry> getNamedXContent() {
return Arrays.asList(
// ML - Custom metadata
new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField("ml"),
parser -> MlMetadata.METADATA_PARSER.parse(parser, null).build()),
parser -> MlMetadata.LENIENT_PARSER.parse(parser, null).build()),
// ML - Persistent action requests
new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(StartDatafeedAction.TASK_NAME),
StartDatafeedAction.DatafeedParams::fromXContent),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,12 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {

public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), Collections.emptySortedMap());
// This parser follows the pattern that metadata is parsed leniently (to allow for enhancements)
public static final ObjectParser<Builder, Void> METADATA_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new);
public static final ObjectParser<Builder, Void> LENIENT_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new);

static {
METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.METADATA_PARSER.apply(p, c).build(), JOBS_FIELD);
METADATA_PARSER.declareObjectArray(Builder::putDatafeeds,
(p, c) -> DatafeedConfig.METADATA_PARSER.apply(p, c).build(), DATAFEEDS_FIELD);
LENIENT_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.LENIENT_PARSER.apply(p, c).build(), JOBS_FIELD);
LENIENT_PARSER.declareObjectArray(Builder::putDatafeeds,
(p, c) -> DatafeedConfig.LENIENT_PARSER.apply(p, c).build(), DATAFEEDS_FIELD);
}

private final SortedMap<String, Job> jobs;
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public Response newResponse() {
public static class Request extends AcknowledgedRequest<Request> implements ToXContentObject {

public static Request parseRequest(String datafeedId, XContentParser parser) {
DatafeedConfig.Builder datafeed = DatafeedConfig.CONFIG_PARSER.apply(parser, null);
DatafeedConfig.Builder datafeed = DatafeedConfig.STRICT_PARSER.apply(parser, null);
datafeed.setId(datafeedId);
return new Request(datafeed.build());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public Response newResponse() {
public static class Request extends AcknowledgedRequest<Request> implements ToXContentObject {

public static Request parseRequest(String jobId, XContentParser parser) {
Job.Builder jobBuilder = Job.CONFIG_PARSER.apply(parser, null);
Job.Builder jobBuilder = Job.STRICT_PARSER.apply(parser, null);
if (jobBuilder.getId() == null) {
jobBuilder.setId(jobId);
} else if (!Strings.isNullOrEmpty(jobId) && !jobId.equals(jobBuilder.getId())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public static class Request extends ActionRequest implements ToXContentObject {
private Detector detector;

public static Request parseRequest(XContentParser parser) {
Detector detector = Detector.CONFIG_PARSER.apply(parser, null).build();
Detector detector = Detector.STRICT_PARSER.apply(parser, null).build();
return new Request(detector);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public static class Request extends ActionRequest {
private Job job;

public static Request parseRequest(XContentParser parser) {
Job.Builder job = Job.CONFIG_PARSER.apply(parser, null);
Job.Builder job = Job.STRICT_PARSER.apply(parser, null);
// When jobs are PUT their ID must be supplied in the URL - assume this will
// be valid unless an invalid job ID is specified in the JSON to be validated
job.setId(job.getId() != null ? job.getId() : "ok");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,10 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.ml.MlParserType;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;

import java.io.IOException;
import java.util.EnumMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;

/**
Expand All @@ -34,32 +31,27 @@ public class ChunkingConfig implements ToXContentObject, Writeable {
public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span");

// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ConstructingObjectParser<ChunkingConfig, Void> METADATA_PARSER = new ConstructingObjectParser<>(
"chunking_config", true, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]));
public static final ConstructingObjectParser<ChunkingConfig, Void> CONFIG_PARSER = new ConstructingObjectParser<>(
"chunking_config", false, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]));
public static final Map<MlParserType, ConstructingObjectParser<ChunkingConfig, Void>> PARSERS =
new EnumMap<>(MlParserType.class);

static {
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
for (MlParserType parserType : MlParserType.values()) {
ConstructingObjectParser<ChunkingConfig, Void> parser = PARSERS.get(parserType);
assert parser != null;
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return Mode.fromString(p.text());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, MODE_FIELD, ValueType.STRING);
parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, TIME_SPAN_FIELD, ValueType.STRING);
}
public static final ConstructingObjectParser<ChunkingConfig, Void> LENIENT_PARSER = createParser(true);
public static final ConstructingObjectParser<ChunkingConfig, Void> STRICT_PARSER = createParser(false);

private static ConstructingObjectParser<ChunkingConfig, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<ChunkingConfig, Void> parser = new ConstructingObjectParser<>(
"chunking_config", ignoreUnknownFields, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]));

parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return Mode.fromString(p.text());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, MODE_FIELD, ValueType.STRING);
parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, TIME_SPAN_FIELD, ValueType.STRING);

return parser;
}

private final Mode mode;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.core.ml.MlParserType;
import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
Expand All @@ -38,7 +37,6 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -87,44 +85,46 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
public static final ParseField HEADERS = new ParseField("headers");

// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ObjectParser<Builder, Void> METADATA_PARSER = new ObjectParser<>("datafeed_config", true, Builder::new);
public static final ObjectParser<Builder, Void> CONFIG_PARSER = new ObjectParser<>("datafeed_config", false, Builder::new);
public static final Map<MlParserType, ObjectParser<Builder, Void>> PARSERS = new EnumMap<>(MlParserType.class);

static {
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
for (MlParserType parserType : MlParserType.values()) {
ObjectParser<Builder, Void> parser = PARSERS.get(parserType);
assert parser != null;
parser.declareString(Builder::setId, ID);
parser.declareString(Builder::setJobId, Job.ID);
parser.declareStringArray(Builder::setIndices, INDEXES);
parser.declareStringArray(Builder::setIndices, INDICES);
parser.declareStringArray(Builder::setTypes, TYPES);
parser.declareString((builder, val) ->
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
parser.declareString((builder, val) ->
builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY);
parser.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY);
parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS);
parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS);
parser.declareObject(Builder::setScriptFields, (p, c) -> {
List<SearchSourceBuilder.ScriptField> parsedScriptFields = new ArrayList<>();
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p));
}
parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName));
return parsedScriptFields;
}, SCRIPT_FIELDS);
parser.declareInt(Builder::setScrollSize, SCROLL_SIZE);
// TODO this is to read former _source field. Remove in v7.0.0
parser.declareBoolean((builder, value) -> {}, SOURCE);
parser.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSERS.get(parserType), CHUNKING_CONFIG);
}
// Headers are only parsed by the metadata parser, so headers supplied in the _body_ of a REST request will be rejected.
// (For config headers are explicitly transferred from the auth headers by code in the put/update datafeed actions.)
METADATA_PARSER.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS);
public static final ObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
public static final ObjectParser<Builder, Void> STRICT_PARSER = createParser(false);

private static ObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<Builder, Void> parser = new ObjectParser<>("datafeed_config", ignoreUnknownFields, Builder::new);

parser.declareString(Builder::setId, ID);
parser.declareString(Builder::setJobId, Job.ID);
parser.declareStringArray(Builder::setIndices, INDEXES);
parser.declareStringArray(Builder::setIndices, INDICES);
parser.declareStringArray(Builder::setTypes, TYPES);
parser.declareString((builder, val) ->
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
parser.declareString((builder, val) ->
builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY);
parser.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY);
parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS);
parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS);
parser.declareObject(Builder::setScriptFields, (p, c) -> {
List<SearchSourceBuilder.ScriptField> parsedScriptFields = new ArrayList<>();
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p));
}
parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName));
return parsedScriptFields;
}, SCRIPT_FIELDS);
parser.declareInt(Builder::setScrollSize, SCROLL_SIZE);
// TODO this is to read former _source field. Remove in v7.0.0
parser.declareBoolean((builder, value) -> {
}, SOURCE);
parser.declareObject(Builder::setChunkingConfig, ignoreUnknownFields ? ChunkingConfig.LENIENT_PARSER : ChunkingConfig.STRICT_PARSER,
CHUNKING_CONFIG);

if (ignoreUnknownFields) {
// Headers are not parsed by the strict (config) parser, so headers supplied in the _body_ of a REST request will be rejected.
// (For config, headers are explicitly transferred from the auth headers by code in the put/update datafeed actions.)
parser.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS);
}

return parser;
}

private final String id;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
return parsedScriptFields;
}, DatafeedConfig.SCRIPT_FIELDS);
PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE);
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.CONFIG_PARSER, DatafeedConfig.CHUNKING_CONFIG);
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.STRICT_PARSER, DatafeedConfig.CHUNKING_CONFIG);
}

private final String id;
Expand Down
Loading

0 comments on commit 13d1316

Please sign in to comment.