Skip to content

Commit

Permalink
Rename Mapper#name to Mapper#fullPath
Browse files Browse the repository at this point in the history
This addresses a long standing TODO that caused quite a few bugs over time, in that the mapper name does not include its full path, while the MappedFieldType name does.

We have renamed Mapper.Builder#name to leafName (elastic#109971) and Mapper#simpleName to leafName (elastic#110030). This commit renames Mapper#name to fullPath for clarity
This required some adjustments in FieldAliasMapper to avoid confusion between the existing path method and fullPath. I renamed path to targetPath for clarity.
ObjectMapper already had a fullPath method that returned name, and was effectively a copy of name, so it could be removed.
  • Loading branch information
javanna committed Jun 21, 2024
1 parent 54e7b4d commit bcba8fb
Show file tree
Hide file tree
Showing 71 changed files with 354 additions and 312 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ private List<String> findRoutingPaths(String indexName, Settings allSettings, Li
private static void extractPath(List<String> routingPaths, Mapper mapper) {
if (mapper instanceof KeywordFieldMapper keywordFieldMapper) {
if (keywordFieldMapper.fieldType().isDimension()) {
routingPaths.add(mapper.name());
routingPaths.add(mapper.fullPath());
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@ public FieldMapper.Builder getMergeBuilder() {
protected void checkIncomingMergeType(FieldMapper mergeWith) {
if (mergeWith instanceof LegacyGeoShapeFieldMapper == false && CONTENT_TYPE.equals(mergeWith.typeName())) {
throw new IllegalArgumentException(
"mapper [" + name() + "] of type [geo_shape] cannot change strategy from [recursive] to [BKD]"
"mapper [" + fullPath() + "] of type [geo_shape] cannot change strategy from [recursive] to [BKD]"
);
}
super.checkIncomingMergeType(mergeWith);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ protected SyntheticSourceMode syntheticSourceMode() {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
);
}
return new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), leafName(), null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,17 +205,17 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
value = context.parser().floatValue();
}

if (context.doc().getByKey(name()) != null) {
if (context.doc().getByKey(fullPath()) != null) {
throw new IllegalArgumentException(
"[rank_feature] fields do not support indexing multiple values for the same field [" + name() + "] in the same document"
"[rank_feature] fields do not support indexing multiple values for the same field [" + fullPath() + "] in the same document"
);
}

if (positiveScoreImpact == false) {
value = 1 / value;
}

context.doc().addWithKey(name(), new FeatureField(NAME, name(), value));
context.doc().addWithKey(fullPath(), new FeatureField(NAME, fullPath(), value));
}

private static Float objectToFloat(Object value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ public void parse(DocumentParserContext context) throws IOException {
} else if (token == Token.VALUE_NULL) {
// ignore feature, this is consistent with numeric fields
} else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) {
final String key = name() + "." + feature;
final String key = fullPath() + "." + feature;
float value = context.parser().floatValue(true);
if (context.doc().getByKey(key) != null) {
throw new IllegalArgumentException(
Expand All @@ -187,7 +187,7 @@ public void parse(DocumentParserContext context) throws IOException {
if (positiveScoreImpact == false) {
value = 1 / value;
}
context.doc().addWithKey(key, new FeatureField(name(), feature, value));
context.doc().addWithKey(key, new FeatureField(fullPath(), feature, value));
} else {
throw new IllegalArgumentException(
"[rank_features] fields take hashes that map a feature to a strictly positive "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
context.addIgnoredField(mappedFieldType.name());
if (isSourceSynthetic) {
// Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
}
return;
} else {
Expand Down Expand Up @@ -559,7 +559,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
context.addIgnoredField(mappedFieldType.name());
if (isSourceSynthetic) {
// Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser()));
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
}
return;
} else {
Expand Down Expand Up @@ -721,15 +721,19 @@ protected SyntheticSourceMode syntheticSourceMode() {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (hasDocValues == false) {
throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values"
"field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support synthetic source because it doesn't have doc values"
);
}
if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
);
}
return new SortedNumericDocValuesSyntheticFieldLoader(name(), leafName(), ignoreMalformed.value()) {
return new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) {
@Override
protected void writeValue(XContentBuilder b, long value) throws IOException {
b.value(decodeForSyntheticSource(value, scalingFactor));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
if (value == null) {
tokenCount = nullValue;
} else {
tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements);
tokenCount = countPositions(analyzer, fullPath(), value, enablePositionIncrements);
}

NumberFieldMapper.NumberType.INTEGER.addFields(context.doc(), fieldType().name(), tokenCount, index, hasDocValues, store);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ private static void assertSearchAsYouTypeFieldMapper(SearchAsYouTypeFieldMapper
assertSearchAsYouTypeFieldType(mapper, mapper.fieldType(), maxShingleSize, analyzerName, mapper.prefixField().fieldType());

assertThat(mapper.prefixField(), notNullValue());
assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.name()));
assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.fullPath()));
assertPrefixFieldType(mapper.prefixField(), mapper.indexAnalyzers(), maxShingleSize, analyzerName);

for (int shingleSize = 2; shingleSize <= maxShingleSize; shingleSize++) {
Expand All @@ -709,20 +709,20 @@ private static void assertSearchAsYouTypeFieldMapper(SearchAsYouTypeFieldMapper
assertThat(mapper.shingleFields().length, equalTo(numberOfShingleSubfields));

final Set<String> fieldsUsingSourcePath = new HashSet<>();
mapper.sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.name()));
mapper.sourcePathUsedBy().forEachRemaining(mapper1 -> fieldsUsingSourcePath.add(mapper1.fullPath()));
int multiFields = 0;
for (FieldMapper ignored : mapper.multiFields()) {
multiFields++;
}
assertThat(fieldsUsingSourcePath.size(), equalTo(numberOfShingleSubfields + 1 + multiFields));

final Set<String> expectedFieldsUsingSourcePath = new HashSet<>();
expectedFieldsUsingSourcePath.add(mapper.prefixField().name());
expectedFieldsUsingSourcePath.add(mapper.prefixField().fullPath());
for (ShingleFieldMapper shingleFieldMapper : mapper.shingleFields()) {
expectedFieldsUsingSourcePath.add(shingleFieldMapper.name());
expectedFieldsUsingSourcePath.add(shingleFieldMapper.fullPath());
}
for (FieldMapper multiField : mapper.multiFields()) {
expectedFieldsUsingSourcePath.add(multiField.name());
expectedFieldsUsingSourcePath.add(multiField.fullPath());
}
assertThat(fieldsUsingSourcePath, equalTo(expectedFieldsUsingSourcePath));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ public ParentJoinFieldMapper build(MapperBuilderContext context) {
relations.get()
.stream()
.map(relation -> new ParentIdFieldMapper(leafName() + "#" + relation.parent(), eagerGlobalOrdinals.get()))
.forEach(mapper -> parentIdFields.put(mapper.name(), mapper));
.forEach(mapper -> parentIdFields.put(mapper.fullPath(), mapper));
Joiner joiner = new Joiner(leafName(), relations.get());
return new ParentJoinFieldMapper(
leafName(),
Expand Down Expand Up @@ -264,37 +264,41 @@ public void parse(DocumentParserContext context) throws IOException {
} else if ("parent".equals(currentFieldName)) {
parent = context.parser().text();
} else {
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]");
throw new IllegalArgumentException(
"unknown field name [" + currentFieldName + "] in join field [" + fullPath() + "]"
);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("parent".equals(currentFieldName)) {
parent = context.parser().numberValue().toString();
} else {
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]");
throw new IllegalArgumentException(
"unknown field name [" + currentFieldName + "] in join field [" + fullPath() + "]"
);
}
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
name = context.parser().text();
parent = null;
} else {
throw new IllegalStateException("[" + name() + "] expected START_OBJECT or VALUE_STRING but was: " + token);
throw new IllegalStateException("[" + fullPath() + "] expected START_OBJECT or VALUE_STRING but was: " + token);
}

if (name == null) {
throw new IllegalArgumentException("null join name in field [" + name() + "]");
throw new IllegalArgumentException("null join name in field [" + fullPath() + "]");
}

if (fieldType().joiner.knownRelation(name) == false) {
throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + name() + "]");
throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + fullPath() + "]");
}
if (fieldType().joiner.childTypeExists(name)) {
// Index the document as a child
if (parent == null) {
throw new IllegalArgumentException("[parent] is missing for join field [" + name() + "]");
throw new IllegalArgumentException("[parent] is missing for join field [" + fullPath() + "]");
}
if (context.routing() == null) {
throw new IllegalArgumentException("[routing] is missing for join field [" + name() + "]");
throw new IllegalArgumentException("[routing] is missing for join field [" + fullPath() + "]");
}
String fieldName = fieldType().joiner.parentJoinField(name);
parentIdFields.get(fieldName).indexValue(context, parent);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -430,13 +430,13 @@ public void testSubFields() throws IOException {

Iterator<Mapper> it = mapper.iterator();
FieldMapper next = (FieldMapper) it.next();
assertThat(next.name(), equalTo("join_field#parent"));
assertThat(next.fullPath(), equalTo("join_field#parent"));
assertTrue(next.fieldType().isSearchable());
assertTrue(next.fieldType().isAggregatable());

assertTrue(it.hasNext());
next = (FieldMapper) it.next();
assertThat(next.name(), equalTo("join_field#child"));
assertThat(next.fullPath(), equalTo("join_field#child"));
assertTrue(next.fieldType().isSearchable());
assertTrue(next.fieldType().isAggregatable());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ protected boolean supportsParsingObject() {
@Override
public void parse(DocumentParserContext context) throws IOException {
SearchExecutionContext executionContext = this.searchExecutionContext.get();
if (context.doc().getField(queryBuilderField.name()) != null) {
if (context.doc().getField(queryBuilderField.fullPath()) != null) {
// If a percolator query has been defined in an array object then multiple percolator queries
// could be provided. In order to prevent this we fail if we try to parse more than one query
// for the current document.
Expand Down Expand Up @@ -493,27 +493,27 @@ void processQuery(Query query, DocumentParserContext context) {
builder.append(new BytesRef(extraction.field()));
builder.append(FIELD_VALUE_SEPARATOR);
builder.append(extraction.bytes());
doc.add(new StringField(queryTermsField.name(), builder.toBytesRef(), Field.Store.NO));
doc.add(new StringField(queryTermsField.fullPath(), builder.toBytesRef(), Field.Store.NO));
} else if (extraction.range != null) {
byte[] min = extraction.range.lowerPoint;
byte[] max = extraction.range.upperPoint;
doc.add(new BinaryRange(rangeFieldMapper.name(), encodeRange(extraction.range.fieldName, min, max)));
doc.add(new BinaryRange(rangeFieldMapper.fullPath(), encodeRange(extraction.range.fieldName, min, max)));
}
}

if (result.matchAllDocs) {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_FAILED, Field.Store.NO));
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_FAILED, Field.Store.NO));
if (result.verified) {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_COMPLETE, Field.Store.NO));
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
}
} else if (result.verified) {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_COMPLETE, Field.Store.NO));
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
} else {
doc.add(new StringField(extractionResultField.name(), EXTRACTION_PARTIAL, Field.Store.NO));
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_PARTIAL, Field.Store.NO));
}

context.addToFieldNames(fieldType().name());
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.fullPath(), result.minimumShouldMatch));
}

static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ public void testStoringQueryBuilders() throws IOException {
when(searchExecutionContext.getWriteableRegistry()).thenReturn(writableRegistry());
when(searchExecutionContext.getParserConfig()).thenReturn(parserConfig());
when(searchExecutionContext.getForField(fieldMapper.fieldType(), fielddataOperation)).thenReturn(
new BytesBinaryIndexFieldData(fieldMapper.name(), CoreValuesSourceType.KEYWORD)
new BytesBinaryIndexFieldData(fieldMapper.fullPath(), CoreValuesSourceType.KEYWORD)
);
when(searchExecutionContext.getFieldType(Mockito.anyString())).thenAnswer(invocation -> {
final String fieldName = (String) invocation.getArguments()[0];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
}

if (value.length() > ignoreAbove) {
context.addIgnoredField(name());
context.addIgnoredField(fullPath());
return;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -580,11 +580,11 @@ protected SyntheticSourceMode syntheticSourceMode() {
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
if (copyTo.copyToFields().isEmpty() != true) {
throw new IllegalArgumentException(
"field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
"field [" + fullPath() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to"
);
}
if (fieldType.stored()) {
return new StringStoredFieldFieldLoader(name(), leafName(), null) {
return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
@Override
protected void write(XContentBuilder b, Object value) throws IOException {
b.value((String) value);
Expand All @@ -602,7 +602,7 @@ protected void write(XContentBuilder b, Object value) throws IOException {
Locale.ROOT,
"field [%s] of type [%s] doesn't support synthetic source unless it is stored or has a sub-field of"
+ " type [keyword] with doc values or stored and without a normalizer",
name(),
fullPath(),
typeName()
)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1;
context.doc().add(new SortedNumericDocValuesField(fieldType().name(), hash));
if (fieldType().isStored()) {
context.doc().add(new StoredField(name(), hash));
context.doc().add(new StoredField(fullPath(), hash));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public void postParse(DocumentParserContext context) {
return;
}
final int value = context.sourceToParse().source().length();
NumberType.INTEGER.addFields(context.doc(), name(), value, true, true, true);
NumberType.INTEGER.addFields(context.doc(), fullPath(), value, true, true, true);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -859,7 +859,7 @@ protected String contentType() {

@Override
public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
return new StringStoredFieldFieldLoader(name(), leafName(), null) {
return new StringStoredFieldFieldLoader(fullPath(), leafName(), null) {
@Override
protected void write(XContentBuilder b, Object value) throws IOException {
BytesRef ref = (BytesRef) value;
Expand Down
Loading

0 comments on commit bcba8fb

Please sign in to comment.