Skip to content

Commit

Permalink
[Refactor] Remaining HPPC to java.util collections (opensearch-projec…
Browse files Browse the repository at this point in the history
…t#8730)

This commit refactors the remaining usages of hppc collections to java.util
collections and completely removes the obsolete hppc dependency.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
  • Loading branch information
nknize committed Jul 18, 2023
1 parent 83c03dc commit 164fbb1
Show file tree
Hide file tree
Showing 104 changed files with 730 additions and 1,576 deletions.
172 changes: 172 additions & 0 deletions libs/common/src/main/java/org/opensearch/common/util/BitMixer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

/*
* HPPC
*
* Copyright (C) 2010-2022 Carrot Search s.c.
* All rights reserved.
*
* Refer to the full license file "LICENSE.txt":
* https://github.com/carrotsearch/hppc/blob/master/LICENSE.txt
*/

/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/

package org.opensearch.common.util;

/**
* Bit mixing utilities from carrotsearch.hppc.
*
* Licensed under ALv2. This is pulled in directly to avoid a full hppc dependency.
*
* The purpose of these methods is to evenly distribute key space over int32
* range.
*/
public final class BitMixer {

// Don't bother mixing very small key domains much.
public static int mix(byte key) {
return key * PHI_C32;
}

public static int mix(byte key, int seed) {
return (key ^ seed) * PHI_C32;
}

public static int mix(short key) {
return mixPhi(key);
}

public static int mix(short key, int seed) {
return mixPhi(key ^ seed);
}

public static int mix(char key) {
return mixPhi(key);
}

public static int mix(char key, int seed) {
return mixPhi(key ^ seed);
}

// Better mix for larger key domains.
public static int mix(int key) {
return mix32(key);
}

public static int mix(int key, int seed) {
return mix32(key ^ seed);
}

public static int mix(float key) {
return mix32(Float.floatToIntBits(key));
}

public static int mix(float key, int seed) {
return mix32(Float.floatToIntBits(key) ^ seed);
}

public static int mix(double key) {
return (int) mix64(Double.doubleToLongBits(key));
}

public static int mix(double key, int seed) {
return (int) mix64(Double.doubleToLongBits(key) ^ seed);
}

public static int mix(long key) {
return (int) mix64(key);
}

public static int mix(long key, int seed) {
return (int) mix64(key ^ seed);
}

public static int mix(Object key) {
return key == null ? 0 : mix32(key.hashCode());
}

public static int mix(Object key, int seed) {
return key == null ? 0 : mix32(key.hashCode() ^ seed);
}

/**
* MH3's plain finalization step.
*/
public static int mix32(int k) {
k = (k ^ (k >>> 16)) * 0x85ebca6b;
k = (k ^ (k >>> 13)) * 0xc2b2ae35;
return k ^ (k >>> 16);
}

/**
* Computes David Stafford variant 9 of 64bit mix function (MH3 finalization step,
* with different shifts and constants).
*
* Variant 9 is picked because it contains two 32-bit shifts which could be possibly
* optimized into better machine code.
*
* @see "http://zimbry.blogspot.com/2011/09/better-bit-mixing-improving-on.html"
*/
public static long mix64(long z) {
z = (z ^ (z >>> 32)) * 0x4cd6944c5cc20b6dL;
z = (z ^ (z >>> 29)) * 0xfc12c5b19d3259e9L;
return z ^ (z >>> 32);
}

/*
* Golden ratio bit mixers.
*/

private static final int PHI_C32 = 0x9e3779b9;
private static final long PHI_C64 = 0x9e3779b97f4a7c15L;

public static int mixPhi(byte k) {
final int h = k * PHI_C32;
return h ^ (h >>> 16);
}

public static int mixPhi(char k) {
final int h = k * PHI_C32;
return h ^ (h >>> 16);
}

public static int mixPhi(short k) {
final int h = k * PHI_C32;
return h ^ (h >>> 16);
}

public static int mixPhi(int k) {
final int h = k * PHI_C32;
return h ^ (h >>> 16);
}

public static int mixPhi(float k) {
final int h = Float.floatToIntBits(k) * PHI_C32;
return h ^ (h >>> 16);
}

public static int mixPhi(double k) {
final long h = Double.doubleToLongBits(k) * PHI_C64;
return (int) (h ^ (h >>> 32));
}

public static int mixPhi(long k) {
final long h = k * PHI_C64;
return (int) (h ^ (h >>> 32));
}

public static int mixPhi(Object k) {
final int h = (k == null ? 0 : k.hashCode() * PHI_C32);
return h ^ (h >>> 16);
}
}
6 changes: 0 additions & 6 deletions libs/core/.classpath1
Original file line number Diff line number Diff line change
Expand Up @@ -253,12 +253,6 @@
<attribute name="test" value="true"/>
</attributes>
</classpathentry>
<classpathentry sourcepath="/home/alpar/.gradle/caches/modules-2/files-2.1/com.carrotsearch/hppc/0.8.1/b338e50c3f98c7ec2bf67a5efb7fa8726a4a9b2d/hppc-0.8.1-sources.jar" kind="lib" path="/home/alpar/.gradle/caches/modules-2/files-2.1/com.carrotsearch/hppc/0.8.1/ffc7ba8f289428b9508ab484b8001dea944ae603/hppc-0.8.1.jar">
<attributes>
<attribute name="gradle_used_by_scope" value="test"/>
<attribute name="test" value="true"/>
</attributes>
</classpathentry>
<classpathentry sourcepath="/home/alpar/.gradle/caches/modules-2/files-2.1/joda-time/joda-time/2.10.2/fbf6cbd712c30629c77cefa42fe15ca888e609d5/joda-time-2.10.2-sources.jar" kind="lib" path="/home/alpar/.gradle/caches/modules-2/files-2.1/joda-time/joda-time/2.10.2/a079fc39ccc3de02acdeb7117443e5d9bd431687/joda-time-2.10.2.jar">
<attributes>
<attribute name="gradle_used_by_scope" value="test"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.opensearch.geo.search.aggregations.bucket;

import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.opensearch.Version;
import org.opensearch.action.index.IndexRequestBuilder;
Expand All @@ -26,8 +24,10 @@
import org.opensearch.test.VersionUtils;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;

Expand All @@ -51,11 +51,11 @@ public abstract class AbstractGeoBucketAggregationIntegTest extends GeoModulePlu

protected static Rectangle boundingRectangleForGeoShapesAgg;

protected static ObjectIntMap<String> expectedDocsCountForGeoShapes;
protected static Map<String, Integer> expectedDocsCountForGeoShapes;

protected static ObjectIntMap<String> expectedDocCountsForSingleGeoPoint;
protected static Map<String, Integer> expectedDocCountsForSingleGeoPoint;

protected static ObjectIntMap<String> multiValuedExpectedDocCountsGeoPoint;
protected static Map<String, Integer> multiValuedExpectedDocCountsGeoPoint;

protected static final String GEO_SHAPE_FIELD_NAME = "location_geo_shape";

Expand All @@ -82,7 +82,7 @@ protected boolean forbidPrivateIndexSettings() {
* @throws Exception thrown during index creation.
*/
protected void prepareGeoShapeIndexForAggregations(final Random random) throws Exception {
expectedDocsCountForGeoShapes = new ObjectIntHashMap<>();
expectedDocsCountForGeoShapes = new HashMap<>();
final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
final List<IndexRequestBuilder> geoshapes = new ArrayList<>();
assertAcked(prepareCreate(GEO_SHAPE_INDEX_NAME).setSettings(settings).setMapping(GEO_SHAPE_FIELD_NAME, "type" + "=geo_shape"));
Expand Down Expand Up @@ -129,7 +129,7 @@ protected void prepareGeoShapeIndexForAggregations(final Random random) throws E
* @throws Exception thrown during index creation.
*/
protected void prepareSingleValueGeoPointIndex(final Random random) throws Exception {
expectedDocCountsForSingleGeoPoint = new ObjectIntHashMap<>();
expectedDocCountsForSingleGeoPoint = new HashMap<>();
createIndex("idx_unmapped");
final Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
Expand All @@ -155,7 +155,7 @@ protected void prepareSingleValueGeoPointIndex(final Random random) throws Excep
}

protected void prepareMultiValuedGeoPointIndex(final Random random) throws Exception {
multiValuedExpectedDocCountsGeoPoint = new ObjectIntHashMap<>();
multiValuedExpectedDocCountsGeoPoint = new HashMap<>();
final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
final List<IndexRequestBuilder> cities = new ArrayList<>();
assertAcked(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@

package org.opensearch.geo.search.aggregations.bucket;

import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
Expand All @@ -49,6 +47,7 @@
import org.opensearch.search.aggregations.bucket.filter.Filter;
import org.opensearch.test.OpenSearchIntegTestCase;

import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
Expand All @@ -70,7 +69,7 @@ public void setupSuiteScopeCluster() throws Exception {
Random random = random();
// Creating a BB for limiting the number buckets generated during aggregation
boundingRectangleForGeoShapesAgg = getGridAggregationBoundingBox(random);
expectedDocCountsForSingleGeoPoint = new ObjectIntHashMap<>();
expectedDocCountsForSingleGeoPoint = new HashMap<>();
prepareSingleValueGeoPointIndex(random);
prepareMultiValuedGeoPointIndex(random);
prepareGeoShapeIndexForAggregations(random);
Expand Down Expand Up @@ -232,9 +231,9 @@ public void testTopMatch() {
String geohash = cell.getKeyAsString();
long bucketCount = cell.getDocCount();
int expectedBucketCount = 0;
for (ObjectIntCursor<String> cursor : expectedDocCountsForSingleGeoPoint) {
if (cursor.key.length() == precision) {
expectedBucketCount = Math.max(expectedBucketCount, cursor.value);
for (var cursor : expectedDocCountsForSingleGeoPoint.entrySet()) {
if (cursor.getKey().length() == precision) {
expectedBucketCount = Math.max(expectedBucketCount, cursor.getValue());
}
}
assertNotSame(bucketCount, 0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,6 @@

package org.opensearch.geo.search.aggregations.metrics;

import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.ObjectObjectMap;
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.Strings;
Expand All @@ -32,7 +28,9 @@
import org.opensearch.search.sort.SortOrder;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.IntStream;

import static org.hamcrest.Matchers.equalTo;
Expand Down Expand Up @@ -65,8 +63,8 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul
protected static Geometry[] geoShapesValues;
protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid,
unmappedCentroid, geoShapeTopLeft, geoShapeBottomRight;
protected static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
protected static ObjectObjectMap<String, GeoPoint> expectedCentroidsForGeoHash = null;
protected static Map<String, Integer> expectedDocCountsForGeoHash = null;
protected static Map<String, GeoPoint> expectedCentroidsForGeoHash = null;

@Override
public void setupSuiteScopeCluster() throws Exception {
Expand Down Expand Up @@ -98,8 +96,8 @@ public void setupSuiteScopeCluster() throws Exception {

numDocs = randomIntBetween(6, 20);
numUniqueGeoPoints = randomIntBetween(1, numDocs);
expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
expectedCentroidsForGeoHash = new ObjectObjectHashMap<>(numDocs * 2);
expectedDocCountsForGeoHash = new HashMap<>(numDocs * 2);
expectedCentroidsForGeoHash = new HashMap<>(numDocs * 2);

singleValues = new GeoPoint[numUniqueGeoPoints];
for (int i = 0; i < singleValues.length; i++) {
Expand Down
1 change: 0 additions & 1 deletion server/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ dependencies {

// utilities
api project(":libs:opensearch-cli")
api 'com.carrotsearch:hppc:0.8.1'

// time handling, remove with java 8 time
api "joda-time:joda-time:${versions.joda}"
Expand Down
1 change: 0 additions & 1 deletion server/licenses/hppc-0.8.1.jar.sha1

This file was deleted.

Loading

0 comments on commit 164fbb1

Please sign in to comment.