Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into add-dataset-size-to-cat
Browse files Browse the repository at this point in the history
  • Loading branch information
dakrone committed Aug 22, 2023
2 parents 2bd1a9d + 366a9a2 commit b673268
Show file tree
Hide file tree
Showing 333 changed files with 2,782 additions and 1,400 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.compute.operator.Operator;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xpack.esql.evaluator.EvalMapper;
import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs;
import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin;
import org.elasticsearch.xpack.esql.planner.EvalMapper;
import org.elasticsearch.xpack.esql.planner.Layout;
import org.elasticsearch.xpack.esql.type.EsqlDataTypes;
import org.elasticsearch.xpack.ql.expression.FieldAttribute;
Expand Down
10 changes: 9 additions & 1 deletion docs/changelog/98204.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
pr: 98204
summary: Introduce executor for concurrent search
area: Search
type: enhancement
type: feature
issues: []
highlight:
title: Enable parallel knn search across segments
body: |-
Elasticsearch has until now performed search sequentially across the
segments within each shard. This change makes knn queries faster on shards
that are made of more than one segment, by rewriting and collecting each
segment in parallel.
notable: true
5 changes: 5 additions & 0 deletions docs/changelog/98265.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 98265
summary: Do not assign ignored shards
area: Allocation
type: bug
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/98332.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 98332
summary: Correct behaviour of `ContentPath::remove()`
area: Mapping
type: bug
issues:
- 98327
5 changes: 5 additions & 0 deletions docs/changelog/98628.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 98628
summary: Add ESQL own flavor of arithmetic operators
area: ES|QL
type: bug
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/98653.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 98653
summary: Reset `GatewayService` flags before reroute
area: Cluster Coordination
type: bug
issues:
- 98606
6 changes: 6 additions & 0 deletions docs/changelog/98684.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 98684
summary: Explicit parsing object capabilities of `FieldMappers`
area: Mapping
type: enhancement
issues:
- 98537
5 changes: 5 additions & 0 deletions docs/changelog/98711.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 98711
summary: Support unsigned long in sqrt and log10 for ESQL
area: ES|QL
type: enhancement
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public void testNoData() throws Exception {
indexWriter.addDocument(Collections.singleton(new StringField("another_field", "value", Field.Store.NO)));
}
try (IndexReader reader = indexWriter.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields(
Collections.singletonList("field")
);
Expand All @@ -52,7 +52,7 @@ public void testUnmapped() throws Exception {
indexWriter.addDocument(Collections.singleton(new StringField("another_field", "value", Field.Store.NO)));
}
try (IndexReader reader = indexWriter.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields(
Collections.singletonList("bogus")
);
Expand Down Expand Up @@ -87,7 +87,7 @@ public void testTwoFields() throws Exception {
MultiPassStats multiPassStats = new MultiPassStats(fieldA, fieldB);
multiPassStats.computeStats(Arrays.asList(fieldAValues), Arrays.asList(fieldBValues));
try (IndexReader reader = indexWriter.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields(
Arrays.asList(fieldA, fieldB)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public void testBasics() throws IOException {

IndexReader r = DirectoryReader.open(w);
w.close();
IndexSearcher searcher = new IndexSearcher(r);
IndexSearcher searcher = newSearcher(r);
searcher.setSimilarity(sim);
Query query = new BoostQuery(
new BooleanQuery.Builder().add(new TermQuery(new Term("f", "foo")), Occur.SHOULD)
Expand Down Expand Up @@ -124,7 +124,7 @@ public void testWeightScript() throws IOException {

IndexReader r = DirectoryReader.open(w);
w.close();
IndexSearcher searcher = new IndexSearcher(r);
IndexSearcher searcher = newSearcher(r);
searcher.setSimilarity(sim);
Query query = new BoostQuery(
new BooleanQuery.Builder().add(new TermQuery(new Term("f", "foo")), Occur.SHOULD)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,12 @@ protected Object getSampleValueForDocument() {
return "POINT (14.0 15.0)";
}

@Override
public void testSupportsParsingObject() throws IOException {
super.testSupportsParsingObject();
assertWarnings("Parameter [strategy] is deprecated and will be removed in a future version");
}

@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "geo_shape").field("strategy", "recursive");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,11 @@ public RankFeaturesFieldType fieldType() {
return (RankFeaturesFieldType) super.fieldType();
}

@Override
protected boolean supportsParsingObject() {
return true;
}

@Override
public void parse(DocumentParserContext context) throws IOException {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,8 +244,13 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo
termStats.add(new TermStatistics(term.bytes(), 1, 1L));
}
}
simScorer = searcher.getSimilarity().scorer(boost, collectionStatistics, termStats.toArray(TermStatistics[]::new));
approximationWeight = searcher.createWeight(approximate(in), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (termStats.size() > 0) {
simScorer = searcher.getSimilarity().scorer(boost, collectionStatistics, termStats.toArray(TermStatistics[]::new));
approximationWeight = searcher.createWeight(approximate(in), ScoreMode.COMPLETE_NO_SCORES, 1f);
} else {
simScorer = null;
approximationWeight = null;
}
}
return new Weight(this) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,11 @@ protected Object getSampleValueForDocument() {
return Map.of("ten", 10, "twenty", 20);
}

@Override
protected Object getSampleObjectForDocument() {
return getSampleValueForDocument();
}

@Override
protected void assertExistsQuery(MapperService mapperService) {
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> super.assertExistsQuery(mapperService));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public void testTerm() throws Exception {
w.addDocument(doc);

try (IndexReader reader = DirectoryReader.open(w)) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);

TermQuery query = new TermQuery(new Term("body", "c"));
Query sourceConfirmedPhraseQuery = new SourceConfirmedTextQuery(query, SOURCE_FETCHER_PROVIDER, Lucene.STANDARD_ANALYZER);
Expand Down Expand Up @@ -99,7 +99,7 @@ public void testPhrase() throws Exception {
w.addDocument(doc);

try (IndexReader reader = DirectoryReader.open(w)) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);

PhraseQuery query = new PhraseQuery("body", "b", "c");
Query sourceConfirmedPhraseQuery = new SourceConfirmedTextQuery(query, SOURCE_FETCHER_PROVIDER, Lucene.STANDARD_ANALYZER);
Expand Down Expand Up @@ -136,6 +136,7 @@ public void testPhrase() throws Exception {
}
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98712")
public void testMultiPhrase() throws Exception {
try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(Lucene.STANDARD_ANALYZER))) {

Expand All @@ -152,7 +153,7 @@ public void testMultiPhrase() throws Exception {
w.addDocument(doc);

try (IndexReader reader = DirectoryReader.open(w)) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);

MultiPhraseQuery query = new MultiPhraseQuery.Builder().add(new Term[] { new Term("body", "a"), new Term("body", "b") }, 0)
.add(new Term[] { new Term("body", "c") }, 1)
Expand Down Expand Up @@ -216,7 +217,7 @@ public void testMultiPhrasePrefix() throws Exception {
w.addDocument(doc);

try (IndexReader reader = DirectoryReader.open(w)) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);

MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("body");
Query sourceConfirmedPhraseQuery = new SourceConfirmedTextQuery(query, SOURCE_FETCHER_PROVIDER, Lucene.STANDARD_ANALYZER);
Expand Down Expand Up @@ -292,7 +293,7 @@ public void testSpanNear() throws Exception {
w.addDocument(doc);

try (IndexReader reader = DirectoryReader.open(w)) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);

SpanNearQuery query = new SpanNearQuery(
new SpanQuery[] { new SpanTermQuery(new Term("body", "b")), new SpanTermQuery(new Term("body", "c")) },
Expand Down Expand Up @@ -420,7 +421,7 @@ public void testApproximation() {
public void testEmptyIndex() throws Exception {
try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(Lucene.STANDARD_ANALYZER))) {
try (IndexReader reader = DirectoryReader.open(w)) {
IndexSearcher searcher = new IndexSearcher(reader);
IndexSearcher searcher = newSearcher(reader);
PhraseQuery query = new PhraseQuery("body", "a", "b");
Query sourceConfirmedPhraseQuery = new SourceConfirmedTextQuery(query, SOURCE_FETCHER_PROVIDER, Lucene.STANDARD_ANALYZER);
assertEquals(0, searcher.count(sourceConfirmedPhraseQuery));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,11 @@ protected void parseCreateField(DocumentParserContext context) {
throw new UnsupportedOperationException("parsing is implemented in parse(), this method should NEVER be called");
}

@Override
protected boolean supportsParsingObject() {
return true;
}

@Override
public void parse(DocumentParserContext context) throws IOException {
context.path().add(simpleName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,11 @@ Tuple<List<BytesRef>, Map<String, List<byte[]>>> extractTermsAndRanges(IndexRead
this.clusterTransportVersion = clusterTransportVersion;
}

@Override
protected boolean supportsParsingObject() {
return true;
}

@Override
public void parse(DocumentParserContext context) throws IOException {
SearchExecutionContext executionContext = this.searchExecutionContext.get();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -915,7 +915,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception {
iw.addDocuments(documents); // IW#addDocuments(...) ensures we end up with a single segment
}
try (IndexReader ir = DirectoryReader.open(directory)) {
IndexSearcher percolateSearcher = new IndexSearcher(ir);
IndexSearcher percolateSearcher = newSearcher(ir);
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery(
"_name",
queryStore,
Expand Down Expand Up @@ -954,7 +954,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception {
iw.addDocument(document);
}
try (IndexReader ir = DirectoryReader.open(directory)) {
IndexSearcher percolateSearcher = new IndexSearcher(ir);
IndexSearcher percolateSearcher = newSearcher(ir);
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery(
"_name",
queryStore,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.MockKeywordPlugin;
Expand Down Expand Up @@ -415,19 +416,24 @@ public void testDuelESLucene() throws Exception {
// we generate as many docs as many shards we have
TestDoc[] testDocs = generateTestDocs("test", testFieldSettings);

DirectoryReader directoryReader = indexDocsWithLucene(testDocs);
TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);
DirectoryReader directoryReader = null;
try {
directoryReader = indexDocsWithLucene(testDocs);
TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);

for (TestConfig test : testConfigs) {
TermVectorsRequestBuilder request = getRequestForConfig(test);
if (test.expectedException != null) {
assertRequestBuilderThrows(request, test.expectedException);
continue;
}

for (TestConfig test : testConfigs) {
TermVectorsRequestBuilder request = getRequestForConfig(test);
if (test.expectedException != null) {
assertRequestBuilderThrows(request, test.expectedException);
continue;
TermVectorsResponse response = request.get();
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
validateResponse(response, luceneTermVectors, test);
}

TermVectorsResponse response = request.get();
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
validateResponse(response, luceneTermVectors, test);
} finally {
IOUtils.close(directoryReader);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.engine.VersionConflictEngineException;

Expand All @@ -34,27 +35,33 @@ public void testDuelESLucene() throws Exception {
// we generate as many docs as many shards we have
TestDoc[] testDocs = generateTestDocs("test", testFieldSettings);

DirectoryReader directoryReader = indexDocsWithLucene(testDocs);
AbstractTermVectorsTestCase.TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);
DirectoryReader directoryReader = null;
try {
directoryReader = indexDocsWithLucene(testDocs);

MultiTermVectorsRequestBuilder requestBuilder = client().prepareMultiTermVectors();
for (AbstractTermVectorsTestCase.TestConfig test : testConfigs) {
requestBuilder.add(getRequestForConfig(test).request());
}
AbstractTermVectorsTestCase.TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);

MultiTermVectorsItemResponse[] responseItems = requestBuilder.get().getResponses();
MultiTermVectorsRequestBuilder requestBuilder = client().prepareMultiTermVectors();
for (AbstractTermVectorsTestCase.TestConfig test : testConfigs) {
requestBuilder.add(getRequestForConfig(test).request());
}

for (int i = 0; i < testConfigs.length; i++) {
TestConfig test = testConfigs[i];
MultiTermVectorsItemResponse item = responseItems[i];
if (test.expectedException != null) {
assertTrue(item.isFailed());
continue;
} else if (item.isFailed()) {
fail(item.getFailure().getCause().getMessage());
MultiTermVectorsItemResponse[] responseItems = requestBuilder.get().getResponses();

for (int i = 0; i < testConfigs.length; i++) {
TestConfig test = testConfigs[i];
MultiTermVectorsItemResponse item = responseItems[i];
if (test.expectedException != null) {
assertTrue(item.isFailed());
continue;
} else if (item.isFailed()) {
fail(item.getFailure().getCause().getMessage());
}
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
validateResponse(item.getResponse(), luceneTermVectors, test);
}
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
validateResponse(item.getResponse(), luceneTermVectors, test);
} finally {
IOUtils.close(directoryReader);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1677,7 +1677,7 @@ private long getLocalCheckpointOfSafeCommit(IndexCommit safeIndexCommit) throws
// be > than the safe commit local checkpoint, since that's checked and updated in
// InternalEngine#restoreVersionMapAndCheckpointTracker
try (DirectoryReader directoryReader = DirectoryReader.open(safeIndexCommit)) {
final IndexSearcher searcher = new IndexSearcher(directoryReader);
final IndexSearcher searcher = newSearcher(directoryReader);
searcher.setQueryCache(null);
final Query query = new BooleanQuery.Builder().add(
LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, commitLocalCheckpoint + 1, Long.MAX_VALUE),
Expand Down
Loading

0 comments on commit b673268

Please sign in to comment.