Skip to content

Commit

Permalink
Spotless apply
Browse files Browse the repository at this point in the history
Signed-off-by: Finn Carroll <carrofin@amazon.com>
  • Loading branch information
finnegancarroll committed Aug 14, 2024
1 parent 93f4a51 commit 701ed58
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,9 @@ protected boolean canOptimize() {
}

@Override
protected void prepare() throws IOException { buildRanges(context); }
protected void prepare() throws IOException {
buildRanges(context);
}

protected Rounding getRounding(final long low, final long high) {
return valuesSource.getRounding();
Expand All @@ -216,7 +218,7 @@ protected int rangeMax() {
}

@Override
protected long getOrd(int rangeIdx){
protected long getOrd(int rangeIdx) {
long rangeStart = LongPoint.decodeDimension(filterRewriteOptimizationContext.getRanges().getLower(rangeIdx), 0);
rangeStart = this.getFieldType().convertNanosToMillis(rangeStart);
long ord = bucketOrds.add(0, getRoundingPrepared().round(rangeStart));
Expand Down Expand Up @@ -563,7 +565,12 @@ private void processLeafFromQuery(LeafReaderContext ctx, Sort indexSortPrefix) t

@Override
protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
boolean optimized = filterRewriteOptimizationContext.tryOptimize(ctx, this::incrementBucketDocCount, sub, segmentMatchAll(context, ctx));
boolean optimized = filterRewriteOptimizationContext.tryOptimize(
ctx,
this::incrementBucketDocCount,
sub,
segmentMatchAll(context, ctx)
);
if (optimized) throw new CollectionTerminatedException();

finishLeaf();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ protected int rangeMax() {
/**
* Translate an index of the packed value range array to an agg bucket ordinal.
*/
protected long getOrd(int rangeIdx){
protected long getOrd(int rangeIdx) {
return rangeIdx;
}

Expand All @@ -95,8 +95,12 @@ protected long getOrd(int rangeIdx){
* @param values the point values (index structure for numeric values) for a segment
* @param incrementDocCount a consumer to increment the document count for a range bucket. The First parameter is document count, the second is the key of the bucket
*/
public final FilterRewriteOptimizationContext.DebugInfo tryOptimize(PointValues values, BiConsumer<Long, Long> incrementDocCount, PackedValueRanges ranges, final LeafBucketCollector sub)
throws IOException {
public final FilterRewriteOptimizationContext.DebugInfo tryOptimize(
PointValues values,
BiConsumer<Long, Long> incrementDocCount,
PackedValueRanges ranges,
final LeafBucketCollector sub
) throws IOException {
PointTreeTraversal.RangeAwareIntersectVisitor treeVisitor;

if (sub != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Weight;

import org.opensearch.common.Rounding;
import org.opensearch.index.mapper.DateFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PointValues;
import org.opensearch.search.aggregations.LeafBucketCollector;
import org.opensearch.index.mapper.DocCountFieldMapper;
import org.opensearch.search.aggregations.LeafBucketCollector;
import org.opensearch.search.internal.SearchContext;

import java.io.IOException;
Expand Down Expand Up @@ -101,8 +101,12 @@ public PackedValueRanges getRanges() {
* @param incrementDocCount consume the doc_count results for certain ordinal
* @param segmentMatchAll if your optimization can prepareFromSegment, you should pass in this flag to decide whether to prepareFromSegment
*/
public boolean tryOptimize(final LeafReaderContext leafCtx, final BiConsumer<Long, Long> incrementDocCount, LeafBucketCollector sub, boolean segmentMatchAll)
throws IOException {
public boolean tryOptimize(
final LeafReaderContext leafCtx,
final BiConsumer<Long, Long> incrementDocCount,
LeafBucketCollector sub,
boolean segmentMatchAll
) throws IOException {
segments.incrementAndGet();
if (!canOptimize) {
return false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@ public static boolean withinUpperBound(byte[] value, byte[] upperBound) {
return compareByteValue(value, upperBound) < 0;
}

public byte[] getLower(int idx){
public byte[] getLower(int idx) {
return lowers[idx];
}

public byte[] getUpper(int idx){
public byte[] getUpper(int idx) {
return uppers[idx];
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,9 @@ protected boolean canOptimize() {
}

@Override
protected void prepare() throws IOException { buildRanges(context); }
protected void prepare() throws IOException {
buildRanges(context);
}

@Override
protected Rounding getRounding(final long low, final long high) {
Expand Down Expand Up @@ -199,7 +201,7 @@ protected Prepared getRoundingPrepared() {
}

@Override
protected long getOrd(int rangeIdx){
protected long getOrd(int rangeIdx) {
long rangeStart = LongPoint.decodeDimension(filterRewriteOptimizationContext.getRanges().getLower(rangeIdx), 0);
rangeStart = this.getFieldType().convertNanosToMillis(rangeStart);
long ord = getBucketOrds().add(0, getRoundingPrepared().round(rangeStart));
Expand Down Expand Up @@ -243,7 +245,12 @@ public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBuc
return LeafBucketCollector.NO_OP_COLLECTOR;
}

boolean optimized = filterRewriteOptimizationContext.tryOptimize(ctx, this::incrementBucketDocCount, sub, segmentMatchAll(context, ctx));
boolean optimized = filterRewriteOptimizationContext.tryOptimize(
ctx,
this::incrementBucketDocCount,
sub,
segmentMatchAll(context, ctx)
);
if (optimized) throw new CollectionTerminatedException();

final SortedNumericDocValues values = valuesSource.longValues(ctx);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,9 @@ protected boolean canOptimize() {
}

@Override
protected void prepare() throws IOException { buildRanges(context); }
protected void prepare() throws IOException {
buildRanges(context);
}

@Override
protected Rounding getRounding(long low, long high) {
Expand All @@ -144,7 +146,7 @@ protected long[] processHardBounds(long[] bounds) {
}

@Override
protected long getOrd(int rangeIdx){
protected long getOrd(int rangeIdx) {
long rangeStart = LongPoint.decodeDimension(filterRewriteOptimizationContext.getRanges().getLower(rangeIdx), 0);
rangeStart = this.getFieldType().convertNanosToMillis(rangeStart);
long ord = bucketOrds.add(0, getRoundingPrepared().round(rangeStart));
Expand Down Expand Up @@ -173,7 +175,12 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol
return LeafBucketCollector.NO_OP_COLLECTOR;
}

boolean optimized = filterRewriteOptimizationContext.tryOptimize(ctx, this::incrementBucketDocCount, sub, segmentMatchAll(context, ctx));
boolean optimized = filterRewriteOptimizationContext.tryOptimize(
ctx,
this::incrementBucketDocCount,
sub,
segmentMatchAll(context, ctx)
);
if (optimized) throw new CollectionTerminatedException();

SortedNumericDocValues values = valuesSource.longValues(ctx);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@
import java.util.Map;
import java.util.Objects;
import java.util.function.BiConsumer;
import java.util.function.Function;

import static org.opensearch.core.xcontent.ConstructingObjectParser.optionalConstructorArg;

Expand Down Expand Up @@ -305,7 +304,7 @@ public ScoreMode scoreMode() {

@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
boolean optimized = filterRewriteOptimizationContext.tryOptimize(ctx, this::incrementBucketDocCount, sub,false);
boolean optimized = filterRewriteOptimizationContext.tryOptimize(ctx, this::incrementBucketDocCount, sub, false);
if (optimized) throw new CollectionTerminatedException();

final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
Expand Down

0 comments on commit 701ed58

Please sign in to comment.