Skip to content

Commit

Permalink
Fix lossy-conversions lint warnings (#96398)
Browse files Browse the repository at this point in the history
JDK 20 added a new javac lint warning for possible lossy conversion in compound assignments - because of implicit type casts, e.g.
warning: [lossy-conversions] implicit cast from int to byte in compound assignment is possibly lossy

The change resolves all such warnings, by either widening the type of the left-hand operand, or explicitly casting the type of the right-hand operand.
  • Loading branch information
ChrisHegarty authored May 31, 2023
1 parent 9a8bb61 commit 1cc1d12
Show file tree
Hide file tree
Showing 32 changed files with 53 additions and 53 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ private static Tuple<byte[], byte[]> getLowerUpper(Tuple<InetAddress, Integer> c
// Borrowed from Lucene
for (int i = prefixLength; i < 8 * lower.length; i++) {
int m = 1 << (7 - (i & 7));
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
lower[i >> 3] &= (byte) ~m;
upper[i >> 3] |= (byte) m;
}
return new Tuple<>(lower, upper);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1017,7 +1017,7 @@ public void testArtificialDocWithPreference() throws InterruptedException, IOExc
List<Integer> shardIds = Arrays.stream(searchShardsResponse.getGroups()).map(s -> s.getShardId().id()).toList();

// request termvectors of artificial document from each shard
int sumTotalTermFreq = 0;
long sumTotalTermFreq = 0;
int sumDocFreq = 0;
for (Integer shardId : shardIds) {
TermVectorsResponse tvResponse = client().prepareTermVectors()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ public void testAddBlockWhileIndexingDocuments() throws Exception {
ensureGreen(indexName);

final APIBlock block = randomAddableBlock();
int nbDocs = 0;
long nbDocs = 0;
try {
try (BackgroundIndexer indexer = new BackgroundIndexer(indexName, client(), 1000)) {
indexer.setFailureAssertion(t -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ public void testWriteIndexingPressureMetricsAreIncremented() throws Exception {
final Releasable replicaRelease = blockReplicas(replicaThreadPool);

final BulkRequest bulkRequest = new BulkRequest();
int totalRequestSize = 0;
long totalRequestSize = 0;
for (int i = 0; i < 80; ++i) {
IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID())
.source(Collections.singletonMap("key", randomAlphaOfLength(50)));
Expand Down Expand Up @@ -235,7 +235,7 @@ public void testWriteIndexingPressureMetricsAreIncremented() throws Exception {

public void testWriteCanBeRejectedAtCoordinatingLevel() throws Exception {
final BulkRequest bulkRequest = new BulkRequest();
int totalRequestSize = 0;
long totalRequestSize = 0;
for (int i = 0; i < 80; ++i) {
IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID())
.source(Collections.singletonMap("key", randomAlphaOfLength(50)));
Expand Down Expand Up @@ -300,7 +300,7 @@ public void testWriteCanBeRejectedAtCoordinatingLevel() throws Exception {

public void testWriteCanBeRejectedAtPrimaryLevel() throws Exception {
final BulkRequest bulkRequest = new BulkRequest();
int totalRequestSize = 0;
long totalRequestSize = 0;
for (int i = 0; i < 80; ++i) {
IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID())
.source(Collections.singletonMap("key", randomAlphaOfLength(50)));
Expand Down Expand Up @@ -370,7 +370,7 @@ public void testWritesWillSucceedIfBelowThreshold() throws Exception {
int thresholdToStopSending = 800 * 1024;

ArrayList<ActionFuture<IndexResponse>> responses = new ArrayList<>();
int totalRequestSize = 0;
long totalRequestSize = 0;
while (totalRequestSize < thresholdToStopSending) {
IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID())
.source(Collections.singletonMap("key", randomAlphaOfLength(500)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,12 +149,12 @@ public void testMemoryBreaker() throws Exception {
assertFailures(searchRequest, RestStatus.INTERNAL_SERVER_ERROR, containsString(errMsg));

NodesStatsResponse stats = client.admin().cluster().prepareNodesStats().setBreaker(true).get();
int breaks = 0;
long breaks = 0;
for (NodeStats stat : stats.getNodes()) {
CircuitBreakerStats breakerStats = stat.getBreaker().getStats(CircuitBreaker.FIELDDATA);
breaks += breakerStats.getTrippedCount();
}
assertThat(breaks, greaterThanOrEqualTo(1));
assertThat(breaks, greaterThanOrEqualTo(1L));
}

public void testRamAccountingTermsEnum() throws Exception {
Expand Down Expand Up @@ -215,12 +215,12 @@ public void testRamAccountingTermsEnum() throws Exception {
assertFailures(searchRequest, RestStatus.INTERNAL_SERVER_ERROR, containsString(errMsg));

NodesStatsResponse stats = client.admin().cluster().prepareNodesStats().setBreaker(true).get();
int breaks = 0;
long breaks = 0;
for (NodeStats stat : stats.getNodes()) {
CircuitBreakerStats breakerStats = stat.getBreaker().getStats(CircuitBreaker.FIELDDATA);
breaks += breakerStats.getTrippedCount();
}
assertThat(breaks, greaterThanOrEqualTo(1));
assertThat(breaks, greaterThanOrEqualTo(1L));
}

public void testRequestBreaker() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ public void testCloseWhileIndexingDocuments() throws Exception {
final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
createIndex(indexName);

int nbDocs = 0;
long nbDocs = 0;
try (BackgroundIndexer indexer = new BackgroundIndexer(indexName, client(), MAX_DOCS)) {
indexer.setFailureAssertion(t -> assertException(t, indexName));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ public void testSingleValuedFieldWithRandomOffset() throws Exception {
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets));

int docsCounted = 0;
long docsCounted = 0;
for (int i = 0; i < expectedNumberOfBuckets; ++i) {
Histogram.Bucket bucket = histo.getBuckets().get(i);
assertThat(bucket, notNullValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ private static byte[] getUUIDBytes(Random random) {
* The high field of th clock sequence multiplexed with the variant.
* We set only the MSB of the variant*/
randomBytes[8] &= 0x3f; /* clear the 2 most significant bits */
randomBytes[8] |= 0x80; /* set the variant (MSB is set)*/
randomBytes[8] |= (byte) 0x80; /* set the variant (MSB is set)*/
return randomBytes;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ public void reset(byte[] bytes, int offset, int len) {
}

public void skipBytes(long count) {
pos += count;
pos += (int) count;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ private static Tuple<byte[], byte[]> getLowerUpper(Tuple<InetAddress, Integer> c
// Borrowed from Lucene
for (int i = prefixLength; i < 8 * lower.length; i++) {
int m = 1 << (7 - (i & 7));
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
lower[i >> 3] &= (byte) ~m;
upper[i >> 3] |= (byte) m;
}
return new Tuple<>(lower, upper);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,11 +233,11 @@ private static byte[] encode(long l, int sign) {
}

// write the header
encoded[0] |= sign << 7;
encoded[0] |= (byte) (sign << 7);
if (sign > 0) {
encoded[0] |= numAdditionalBytes << 3;
encoded[0] |= (byte) (numAdditionalBytes << 3);
} else {
encoded[0] |= (15 - numAdditionalBytes) << 3;
encoded[0] |= (byte) ((15 - numAdditionalBytes) << 3);
}
return encoded;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,8 @@ private Query cidrQuery(String term, SearchExecutionContext context) {
byte upper[] = addr.getAddress();
for (int i = prefixLength; i < 8 * lower.length; i++) {
int m = 1 << (7 - (i & 7));
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
lower[i >> 3] &= (byte) ~m;
upper[i >> 3] |= (byte) m;
}
// Force the terms into IPv6
BytesRef lowerBytes = new BytesRef(InetAddressPoint.encode(InetAddressPoint.decode(lower)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -444,8 +444,8 @@ private static Range parseIpRangeFromCidr(final XContentParser parser) throws IO
byte[] upper = lower.clone();
for (int i = cidr.v2(); i < 8 * lower.length; i++) {
int m = 1 << 7 - (i & 7);
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
lower[i >> 3] &= (byte) ~m;
upper[i >> 3] |= (byte) m;
}
try {
return new Range(RangeType.IP, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper), true, true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public FloatDenseVectorFunction(

if (normalizeQuery) {
for (int dim = 0; dim < this.queryVector.length; dim++) {
this.queryVector[dim] /= queryMagnitude;
this.queryVector[dim] /= (float) queryMagnitude;
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException {
final PackedLongValues.Iterator buckets = entry.buckets.iterator();
int doc = 0;
for (long i = 0, end = entry.docDeltas.size(); i < end; ++i) {
doc += docDeltaIterator.next();
doc += (int) docDeltaIterator.next();
final long bucket = buckets.next();
final long rebasedBucket = this.selectedBuckets.find(bucket);
if (rebasedBucket != -1) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,8 @@ public static class Range implements ToXContentObject, Writeable {
byte upper[] = address.getAddress();
for (int i = prefixLength; i < 8 * lower.length; i++) {
int m = 1 << (7 - (i & 7));
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
lower[i >> 3] &= (byte) ~m;
upper[i >> 3] |= (byte) m;
}
this.key = key;
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ private static int getTotalQueryHits(AtomicArray<SearchPhaseResult> results) {
for (SearchPhaseResult shardResult : results.asList()) {
TopDocs topDocs = shardResult.queryResult().topDocs().topDocs;
assert topDocs.totalHits.relation == Relation.EQUAL_TO;
resultCount += topDocs.totalHits.value;
resultCount += (int) topDocs.totalHits.value;
}
return resultCount;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public void testAddIndices() throws IOException {
final long maxSeqNo = randomNonNegativeLong();
final long maxUnsafeAutoIdTimestamp = randomNonNegativeLong();
StoreRecovery.addIndices(indexStats, target, indexSort, dirs, maxSeqNo, maxUnsafeAutoIdTimestamp, null, 0, false, false);
int numFiles = 0;
long numFiles = 0;
Predicate<String> filesFilter = (f) -> f.startsWith("segments") == false
&& f.equals("write.lock") == false
&& f.startsWith("extra") == false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ public void testGetPartSize() {
);
int numBytes = 0;
for (int i = 0; i < info.numberOfParts(); i++) {
numBytes += info.partBytes(i);
numBytes += (int) info.partBytes(i);
}
assertEquals(numBytes, 36);

Expand All @@ -179,7 +179,7 @@ public void testGetPartSize() {
);
numBytes = 0;
for (int i = 0; i < info.numberOfParts(); i++) {
numBytes += info.partBytes(i);
numBytes += (int) info.partBytes(i);
}
assertEquals(numBytes, 35);
final int numIters = randomIntBetween(10, 100);
Expand All @@ -193,7 +193,7 @@ public void testGetPartSize() {
info = new BlobStoreIndexShardSnapshot.FileInfo("foo", metadata, ByteSizeValue.ofBytes(randomIntBetween(1, 1000)));
numBytes = 0;
for (int i = 0; i < info.numberOfParts(); i++) {
numBytes += info.partBytes(i);
numBytes += (int) info.partBytes(i);
}
assertEquals(numBytes, metadata.length());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -835,7 +835,7 @@ private InputStream getFaultyInputStream(DownloadFileErrorType downloadFileError
System.arraycopy(fileData, 0, fileDataCopy, 0, fileData.length);
// Corrupt the file
for (int i = 0; i < randomIntBetween(1, fileDataCopy.length); i++) {
fileDataCopy[i] ^= 0xFF;
fileDataCopy[i] ^= (byte) 0xFF;
}
return new ByteArrayInputStream(fileDataCopy);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ private byte[] mask(int prefixLength) {
int m = 0;
int b = 0x80;
for (int i = 0; i < prefixLength; i++) {
mask[m] |= b;
mask[m] |= (byte) b;
b = b >> 1;
if (b == 0) {
m++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ protected void assertReduced(InternalBinaryRange reduced, List<InternalBinaryRan
for (Range.Bucket bucket : reduced.getBuckets()) {
int expectedCount = 0;
for (InternalBinaryRange input : inputs) {
expectedCount += input.getBuckets().get(pos).getDocCount();
expectedCount += (int) input.getBuckets().get(pos).getDocCount();
}
assertEquals(expectedCount, bucket.getDocCount());
pos++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ static Translog.Operation[] getOperations(
"not exposing operations from [" + fromSeqNo + "] greater than the global checkpoint [" + globalCheckpoint + "]"
);
}
int seenBytes = 0;
long seenBytes = 0;
// - 1 is needed, because toSeqNo is inclusive
long toSeqNo = Math.min(globalCheckpoint, (fromSeqNo + maxOperationCount) - 1);
assert fromSeqNo <= toSeqNo : "invalid range from_seqno[" + fromSeqNo + "] > to_seqno[" + toSeqNo + "]";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1399,13 +1399,13 @@ private static byte[] decode_base64(String s, int maxolen) throws IllegalArgumen
c2 = char64(s.charAt(off++));
if (c1 == -1 || c2 == -1) break;
o = (byte) (c1 << 2);
o |= (c2 & 0x30) >> 4;
o |= (byte) ((c2 & 0x30) >> 4);
rs.append((char) o);
if (++olen >= maxolen || off >= slen) break;
c3 = char64(s.charAt(off++));
if (c3 == -1) break;
o = (byte) ((c2 & 0x0f) << 4);
o |= (c3 & 0x3c) >> 2;
o |= (byte) ((c3 & 0x3c) >> 2);
rs.append((char) o);
if (++olen >= maxolen || off >= slen) break;
c4 = char64(s.charAt(off++));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ public void testIsTraining_GivenRowsWithDependentVariableValue_AndTrainingPercen

long expectedTotalTrainingCount = 0;
for (long classCount : classCounts.values()) {
expectedTotalTrainingCount += trainingFraction * classCount;
expectedTotalTrainingCount += (long) (trainingFraction * classCount);
}
assertThat(actualTotalTrainingCount, greaterThanOrEqualTo(expectedTotalTrainingCount - 2));
assertThat(actualTotalTrainingCount, lessThanOrEqualTo(expectedTotalTrainingCount));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ void writeMissingBitset(Iterable<?> values) throws IOException {
bits = 0;
}
if (v != null) {
bits |= 1 << (count & 7);
bits |= (byte) (1 << (count & 7));
}
count++;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -996,7 +996,7 @@ private void writePresenceBits(FSTCompiler<T> fstCompiler, FSTCompiler.UnCompile
presenceIndex -= Byte.SIZE;
}
// Set the bit at presenceIndex to flag that the corresponding arc is present.
presenceBits |= 1 << presenceIndex;
presenceBits |= (byte) (1 << presenceIndex);
previousLabel = label;
}
assert presenceIndex == (nodeIn.arcs[nodeIn.numArcs - 1].label - nodeIn.arcs[0].label) % 8;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public void readBytes(byte[] b, int offset, int len) {

@Override
public void skipBytes(long count) {
pos += count;
pos += (int) count;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public void readBytes(byte[] b, int offset, int len) {

@Override
public void skipBytes(long count) {
pos -= count;
pos -= (int) count;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ public byte[] onRead(byte[] actualContents, long position, long length) {
final byte[] disruptedContents = actualContents == null ? null : Arrays.copyOf(actualContents, actualContents.length);
if (actualContents != null && countDown.countDown()) {
// CRC32 should always detect a single bit flip
disruptedContents[Math.toIntExact(position + randomLongBetween(0, length - 1))] ^= 1 << between(0, 7);
disruptedContents[Math.toIntExact(position + randomLongBetween(0, length - 1))] ^= (byte) (1 << between(0, 7));
}
return disruptedContents;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,17 +146,17 @@ private void writeNode(StreamOutput out, int parentMaxX, int parentMaxY, BytesSt

private void writeMetadata(StreamOutput out) throws IOException {
byte metadata = 0;
metadata |= (left != null) ? (1 << 0) : 0;
metadata |= (right != null) ? (1 << 1) : 0;
metadata |= (byte) ((left != null) ? (1 << 0) : 0);
metadata |= (byte) ((right != null) ? (1 << 1) : 0);
if (component.type == ShapeField.DecodedTriangle.TYPE.POINT) {
metadata |= (1 << 2);
} else if (component.type == ShapeField.DecodedTriangle.TYPE.LINE) {
metadata |= (1 << 3);
metadata |= (component.ab) ? (1 << 4) : 0;
metadata |= (byte) ((component.ab) ? (1 << 4) : 0);
} else {
metadata |= (component.ab) ? (1 << 4) : 0;
metadata |= (component.bc) ? (1 << 5) : 0;
metadata |= (component.ca) ? (1 << 6) : 0;
metadata |= (byte) ((component.ab) ? (1 << 4) : 0);
metadata |= (byte) ((component.bc) ? (1 << 5) : 0);
metadata |= (byte) ((component.ca) ? (1 << 6) : 0);
}
out.writeByte(metadata);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ private static ZonedDateTime dateTimeWithFractions(String secondAndFractionsSinc
int fractions = b.remainder(BigDecimal.ONE).movePointRight(9).intValueExact();
int adjustment = 0;
if (fractions < 0) {
fractions += 1e9;
fractions += (int) 1e9;
adjustment = -1;
}
return dateTime((seconds + adjustment) * 1000).withNano(fractions);
Expand Down

0 comments on commit 1cc1d12

Please sign in to comment.