Skip to content

Commit

Permalink
[Remote Store] Support to emit multiple streams for a file content ea…
Browse files Browse the repository at this point in the history
…ch responsible for processing a specific part of the file (opensearch-project#7000) (opensearch-project#7983)

* Support to emit multiple streams for a file content each responsible for processing a specific part of the file


(cherry picked from commit 0c1a29a)

Signed-off-by: Raghuvansh Raj <raghraaj@amazon.com>
Signed-off-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
  • Loading branch information
2 people authored and gaiksaya committed Jun 26, 2023
1 parent 5012f73 commit 6c78332
Show file tree
Hide file tree
Showing 33 changed files with 1,580 additions and 8 deletions.
2 changes: 2 additions & 0 deletions buildSrc/version.properties
Original file line number Diff line number Diff line change
Expand Up @@ -58,3 +58,5 @@ jmh = 1.35

# compression
zstd = 1.5.5-3

jzlib = 1.1.3
4 changes: 0 additions & 4 deletions modules/transport-netty4/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -186,10 +186,6 @@ thirdPartyAudit {

'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder',
Expand Down
4 changes: 0 additions & 4 deletions plugins/transport-nio/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,6 @@ thirdPartyAudit {

'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder',
Expand Down
3 changes: 3 additions & 0 deletions server/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,9 @@ dependencies {
// jna
api "net.java.dev.jna:jna:${versions.jna}"

// jcraft
api "com.jcraft:jzlib:${versions.jzlib}"

// protobuf
api "com.google.protobuf:protobuf-java:${versions.protobuf}"
api "jakarta.annotation:jakarta.annotation-api:${versions.jakarta_annotation}"
Expand Down
1 change: 1 addition & 0 deletions server/licenses/jzlib-1.1.3.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
c01428efa717624f7aabf4df319939dda9646b2d
29 changes: 29 additions & 0 deletions server/licenses/jzlib-LICENSE.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
JZlib 0.0.* were released under the GNU LGPL license. Later, we have switched
over to a BSD-style license.

------------------------------------------------------------------------------
Copyright (c) 2000-2011 ymnk, JCraft,Inc. All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.

Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.

The name of the authors may not be used to endorse or promote products
derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL JCRAFT, INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Empty file.
19 changes: 19 additions & 0 deletions server/src/main/java/org/opensearch/common/CheckedTriFunction.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common;

/**
* A {@link TriFunction}-like interface which allows throwing checked exceptions.
*
* @opensearch.internal
*/
@FunctionalInterface
public interface CheckedTriFunction<S, T, U, R, E extends Exception> {
R apply(S s, T t, U u) throws E;
}
66 changes: 66 additions & 0 deletions server/src/main/java/org/opensearch/common/StreamContext.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common;

import org.opensearch.common.io.InputStreamContainer;

import java.io.IOException;

/**
* StreamContext is used to supply streams to vendor plugins using {@link StreamContext#provideStream}
*
* @opensearch.internal
*/
public class StreamContext {

private final CheckedTriFunction<Integer, Long, Long, InputStreamContainer, IOException> streamSupplier;
private final long partSize;
private final long lastPartSize;
private final int numberOfParts;

/**
* Construct a new StreamProvider object
*
* @param streamSupplier A {@link CheckedTriFunction} that will be called with the <code>partNumber</code>, <code>partSize</code> and <code>position</code> in the stream
* @param partSize Size of all parts apart from the last one
* @param lastPartSize Size of the last part
* @param numberOfParts Total number of parts
*/
public StreamContext(
CheckedTriFunction<Integer, Long, Long, InputStreamContainer, IOException> streamSupplier,
long partSize,
long lastPartSize,
int numberOfParts
) {
this.streamSupplier = streamSupplier;
this.partSize = partSize;
this.lastPartSize = lastPartSize;
this.numberOfParts = numberOfParts;
}

/**
* Vendor plugins can use this method to create new streams only when they are required for processing
* New streams won't be created till this method is called with the specific <code>partNumber</code>
*
* @param partNumber The index of the part
* @return A stream reference to the part requested
*/
public InputStreamContainer provideStream(int partNumber) throws IOException {
long position = partSize * partNumber;
long size = (partNumber == numberOfParts - 1) ? lastPartSize : partSize;
return streamSupplier.apply(partNumber, size, position);
}

/**
* @return The number of parts in which this file is supposed to be uploaded
*/
public int getNumberOfParts() {
return numberOfParts;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.blobstore.exception;

import java.io.IOException;

/**
* Exception thrown when remote integrity checks
*
* @opensearch.internal
*/
public class CorruptFileException extends IOException {

private final String fileName;

public CorruptFileException(String message, String fileName) {
super(message);
this.fileName = fileName;
}

public CorruptFileException(String message, Throwable cause, String fileName) {
super(message, cause);
this.fileName = fileName;
}

public CorruptFileException(Throwable cause, String fileName) {
super(cause);
this.fileName = fileName;
}

public String getFileName() {
return fileName;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

/** Exceptions for blobstore abstractions */
package org.opensearch.common.blobstore.exception;
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

/** Abstractions for stream based file transfers */
package org.opensearch.common.blobstore.stream;
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.blobstore.stream.write;

import org.opensearch.common.StreamContext;

/**
* Will return the <code>StreamContext</code> to the caller given the part size
*
* @opensearch.internal
*/
@FunctionalInterface
public interface StreamContextSupplier {

/**
* @param partSize The size of a single part to be uploaded
* @return The <code>StreamContext</code> based on the part size provided
*/
StreamContext supplyStreamContext(long partSize);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.blobstore.stream.write;

import org.opensearch.common.CheckedConsumer;
import org.opensearch.common.Nullable;
import org.opensearch.common.StreamContext;

import java.io.IOException;

/**
* WriteContext is used to encapsulate all data needed by <code>BlobContainer#writeStreams</code>
*
* @opensearch.internal
*/
public class WriteContext {

private final String fileName;
private final StreamContextSupplier streamContextSupplier;
private final long fileSize;
private final boolean failIfAlreadyExists;
private final WritePriority writePriority;
private final CheckedConsumer<Boolean, IOException> uploadFinalizer;
private final boolean doRemoteDataIntegrityCheck;
private final Long expectedChecksum;

/**
* Construct a new WriteContext object
*
* @param fileName The name of the file being uploaded
* @param streamContextSupplier A supplier that will provide StreamContext to the plugin
* @param fileSize The total size of the file being uploaded
* @param failIfAlreadyExists A boolean to fail the upload is the file exists
* @param writePriority The <code>WritePriority</code> of this upload
* @param doRemoteDataIntegrityCheck A boolean to inform vendor plugins whether remote data integrity checks need to be done
* @param expectedChecksum This parameter expected only when the vendor plugin is expected to do server side data integrity verification
*/
public WriteContext(
String fileName,
StreamContextSupplier streamContextSupplier,
long fileSize,
boolean failIfAlreadyExists,
WritePriority writePriority,
CheckedConsumer<Boolean, IOException> uploadFinalizer,
boolean doRemoteDataIntegrityCheck,
@Nullable Long expectedChecksum
) {
this.fileName = fileName;
this.streamContextSupplier = streamContextSupplier;
this.fileSize = fileSize;
this.failIfAlreadyExists = failIfAlreadyExists;
this.writePriority = writePriority;
this.uploadFinalizer = uploadFinalizer;
this.doRemoteDataIntegrityCheck = doRemoteDataIntegrityCheck;
this.expectedChecksum = expectedChecksum;
}

/**
* @return The file name
*/
public String getFileName() {
return fileName;
}

/**
* @return The boolean representing whether to fail the file upload if it exists
*/
public boolean isFailIfAlreadyExists() {
return failIfAlreadyExists;
}

/**
* @param partSize The size of a single part to be uploaded
* @return The stream context which will be used by the plugin to initialize streams from the file
*/
public StreamContext getStreamProvider(long partSize) {
return streamContextSupplier.supplyStreamContext(partSize);
}

/**
* @return The total size of the file
*/
public long getFileSize() {
return fileSize;
}

/**
* @return The <code>WritePriority</code> of the upload
*/
public WritePriority getWritePriority() {
return writePriority;
}

/**
* @return The <code>UploadFinalizer</code> for this upload
*/
public CheckedConsumer<Boolean, IOException> getUploadFinalizer() {
return uploadFinalizer;
}

/**
* @return A boolean for whether remote data integrity check has to be done for this upload or not
*/
public boolean doRemoteDataIntegrityCheck() {
return doRemoteDataIntegrityCheck;
}

/**
* @return The CRC32 checksum associated with this file
*/
public Long getExpectedChecksum() {
return expectedChecksum;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.blobstore.stream.write;

/**
* WritePriority for upload
*
* @opensearch.internal
*/
public enum WritePriority {
NORMAL,
HIGH
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

/** Abstractions for stream based file writes */
package org.opensearch.common.blobstore.stream.write;
Loading

0 comments on commit 6c78332

Please sign in to comment.