Skip to content

Commit

Permalink
Mappings: Remove dead code after previous refactorings
Browse files Browse the repository at this point in the history
This is mostly removing code that handled deletion of types, which was
removed in elastic#8877.
  • Loading branch information
rjernst committed Apr 19, 2015
1 parent dc17427 commit 554182e
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 101 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -489,14 +489,14 @@ public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listen
// lock to avoid concurrency issues with mapping updates coming from the API
synchronized(this) {
// simulate on the first time to check if the mapping update is applicable
MergeContext mergeContext = newMmergeContext(new MergeFlags().simulate(true));
MergeContext mergeContext = newMergeContext(new MergeFlags().simulate(true));
rootObjectMapper.merge(update, mergeContext);
if (mergeContext.hasConflicts()) {
throw new MapperParsingException("Could not apply generated dynamic mappings: " + Arrays.toString(mergeContext.buildConflicts()));
} else {
// then apply it for real
mappingsModified = true;
mergeContext = newMmergeContext(new MergeFlags().simulate(false));
mergeContext = newMergeContext(new MergeFlags().simulate(false));
rootObjectMapper.merge(update, mergeContext);
}
}
Expand Down Expand Up @@ -665,7 +665,7 @@ public void traverse(ObjectMapperListener listener) {
rootObjectMapper.traverse(listener);
}

private MergeContext newMmergeContext(MergeFlags mergeFlags) {
private MergeContext newMergeContext(MergeFlags mergeFlags) {
return new MergeContext(mergeFlags) {

List<String> conflicts = new ArrayList<>();
Expand Down Expand Up @@ -699,7 +699,7 @@ public String[] buildConflicts() {
}

public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
final MergeContext mergeContext = newMmergeContext(mergeFlags);
final MergeContext mergeContext = newMergeContext(mergeFlags);
assert rootMappers.size() == mergeWith.rootMappers.size();

rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,19 +45,6 @@ private static CopyOnWriteHashMap<String, FieldMappers> add(CopyOnWriteHashMap<S
return map.copyAndPut(key, mappers);
}

private static CopyOnWriteHashMap<String, FieldMappers> remove(CopyOnWriteHashMap<String, FieldMappers> map, String key, FieldMapper<?> mapper) {
FieldMappers mappers = map.get(key);
if (mappers == null) {
return map;
}
mappers = mappers.remove(mapper);
if (mappers.isEmpty()) {
return map.copyAndRemove(key);
} else {
return map.copyAndPut(key, mappers);
}
}

private static class MappersLookup {

final CopyOnWriteHashMap<String, FieldMappers> name, indexName, fullName;
Expand All @@ -80,22 +67,7 @@ MappersLookup addNewMappers(Iterable<? extends FieldMapper<?>> mappers) {
}
return new MappersLookup(name, indexName, fullName);
}

MappersLookup removeMappers(Iterable<?> mappers) {
CopyOnWriteHashMap<String, FieldMappers> name = this.name;
CopyOnWriteHashMap<String, FieldMappers> indexName = this.indexName;
CopyOnWriteHashMap<String, FieldMappers> fullName = this.fullName;
for (Object o : mappers) {
if (!(o instanceof FieldMapper)) {
continue;
}
FieldMapper<?> mapper = (FieldMapper<?>) o;
name = remove(name, mapper.names().name(), mapper);
indexName = remove(indexName, mapper.names().indexName(), mapper);
fullName = remove(fullName, mapper.names().fullName(), mapper);
}
return new MappersLookup(name, indexName, fullName);
}

}

private final CopyOnWriteHashSet<FieldMapper<?>> mappers;
Expand All @@ -121,18 +93,6 @@ public FieldMappersLookup copyAndAddAll(Collection<? extends FieldMapper<?>> new
return new FieldMappersLookup(mappers.copyAndAddAll(newMappers), lookup.addNewMappers(newMappers));
}

/**
* Return a new instance that contains this instance minus the provided mappers.
*/
public FieldMappersLookup copyAndRemoveAll(Collection<?> mappersToRemove) {
final CopyOnWriteHashSet<FieldMapper<?>> newMappers = mappers.copyAndRemoveAll(mappersToRemove);
if (newMappers != mappers) {
return new FieldMappersLookup(newMappers, lookup.removeMappers(mappersToRemove));
} else {
return this;
}
}

/**
* Returns the field mappers based on the mapper index name.
*/
Expand Down
39 changes: 1 addition & 38 deletions src/main/java/org/elasticsearch/index/mapper/MapperService.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public class MapperService extends AbstractIndexComponent {

public static final String DEFAULT_MAPPING = "_default_";
private static ObjectOpenHashSet<String> META_FIELDS = ObjectOpenHashSet.from(
"_uid", "_id", "_type", "_all", "_analyzer", "_parent", "_routing", "_index",
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl"
);
private final AnalysisService analysisService;
Expand All @@ -104,7 +104,6 @@ public class MapperService extends AbstractIndexComponent {
private volatile String defaultMappingSource;
private volatile String defaultPercolatorMappingSource;


private volatile Map<String, DocumentMapper> mappers = ImmutableMap.of();

private final Object typeMutex = new Object();
Expand Down Expand Up @@ -394,42 +393,6 @@ private void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
}
}

public void remove(String type) {
synchronized (typeMutex) {
DocumentMapper docMapper = mappers.get(type);
if (docMapper == null) {
return;
}
docMapper.close();
mappers = newMapBuilder(mappers).remove(type).map();
removeObjectAndFieldMappers(docMapper);
for (DocumentTypeListener typeListener : typeListeners) {
typeListener.afterRemove(docMapper);
}
}
}

private void removeObjectAndFieldMappers(DocumentMapper docMapper) {
synchronized (mappersMutex) {
fieldMappers = fieldMappers.copyAndRemoveAll(docMapper.mappers());

ImmutableOpenMap.Builder<String, ObjectMappers> fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers);
for (ObjectMapper mapper : docMapper.objectMappers().values()) {
ObjectMappers mappers = fullPathObjectMappers.get(mapper.fullPath());
if (mappers != null) {
mappers = mappers.remove(mapper);
if (mappers.isEmpty()) {
fullPathObjectMappers.remove(mapper.fullPath());
} else {
fullPathObjectMappers.put(mapper.fullPath(), mappers);
}
}
}

this.fullPathObjectMappers = fullPathObjectMappers.build();
}
}

public DocumentMapper parse(String mappingType, CompressedString mappingSource, boolean applyDefault) throws MapperParsingException {
String defaultMappingSource;
if (PercolatorService.TYPE_NAME.equals(mappingType)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,16 +356,6 @@ protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boo
this.multiFields = multiFields;
this.copyTo = copyTo;
}

@Nullable
protected String defaultPostingFormat() {
return null;
}

@Nullable
protected String defaultDocValuesFormat() {
return null;
}

protected boolean defaultDocValues() {
if (indexCreatedBefore2x) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -386,14 +386,6 @@ private void applyMappings(ClusterChangedEvent event) {
typesToRefresh.toArray(new String[typesToRefresh.size()]), event.state().nodes().localNodeId())
);
}
// go over and remove mappings
for (DocumentMapper documentMapper : mapperService.docMappers(true)) {
if (seenMappings.containsKey(new Tuple<>(index, documentMapper.type())) && !indexMetaData.mappings().containsKey(documentMapper.type())) {
// we have it in our mappings, but not in the metadata, and we have seen it in the cluster state, remove it
mapperService.remove(documentMapper.type());
seenMappings.remove(new Tuple<>(index, documentMapper.type()));
}
}
} catch (Throwable t) {
// if we failed the mappings anywhere, we need to fail the shards for this index, note, we safeguard
// by creating the processing the mappings on the master, or on the node the mapping was introduced on,
Expand Down

0 comments on commit 554182e

Please sign in to comment.