Skip to content

Commit

Permalink
add callZooFunc and change all callBigDlFunc to callZooFunc (intel-an…
Browse files Browse the repository at this point in the history
  • Loading branch information
qiuxin2012 committed Nov 26, 2019
1 parent 16d32b9 commit d143bce
Showing 1 changed file with 55 additions and 53 deletions.
108 changes: 55 additions & 53 deletions python/orca/src/bigdl/orca/inference/inference_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
# limitations under the License.
#

from bigdl.util.common import JavaValue, callBigDlFunc
from bigdl.util.common import JavaValue
from zoo.common.utils import callZooFunc
from bigdl.nn.layer import Layer
from zoo.pipeline.api.keras.engine import KerasNet

Expand All @@ -28,6 +29,7 @@ class InferenceModel(JavaValue):
# Arguments
supported_concurrent_num: Int. How many concurrent threads to invoke. Default is 1.
"""

def __init__(self, supported_concurrent_num=1, bigdl_type="float"):
super(InferenceModel, self).__init__(None, bigdl_type, supported_concurrent_num)

Expand All @@ -38,8 +40,8 @@ def load(self, model_path, weight_path=None):
:param model_path: String. The file path to the model.
:param weight_path: String. The file path to the weights if any. Default is None.
"""
callBigDlFunc(self.bigdl_type, "inferenceModelLoad",
self.value, model_path, weight_path)
callZooFunc(self.bigdl_type, "inferenceModelLoad",
self.value, model_path, weight_path)

def load_caffe(self, model_path, weight_path):
"""
Expand All @@ -48,8 +50,8 @@ def load_caffe(self, model_path, weight_path):
:param model_path: String. The file path to the prototxt file.
:param weight_path: String. The file path to the Caffe model.
"""
callBigDlFunc(self.bigdl_type, "inferenceModelLoadCaffe",
self.value, model_path, weight_path)
callZooFunc(self.bigdl_type, "inferenceModelLoadCaffe",
self.value, model_path, weight_path)

def load_openvino(self, model_path, weight_path, batch_size=0):
"""
Expand All @@ -59,8 +61,8 @@ def load_openvino(self, model_path, weight_path, batch_size=0):
:param weight_path: String. The file path to the OpenVINO IR bin file.
:param batch_size: Int. Set batch Size, default is 0 (use default batch size).
"""
callBigDlFunc(self.bigdl_type, "inferenceModelLoadOpenVINO",
self.value, model_path, weight_path, batch_size)
callZooFunc(self.bigdl_type, "inferenceModelLoadOpenVINO",
self.value, model_path, weight_path, batch_size)

def load_tf(self, model_path, backend="tensorflow",
intra_op_parallelism_threads=1, inter_op_parallelism_threads=1,
Expand Down Expand Up @@ -94,24 +96,24 @@ def load_tf(self, model_path, backend="tensorflow",
"""
backend = backend.lower()
if backend == "tensorflow" or backend == "tf":
callBigDlFunc(self.bigdl_type, "inferenceModelTensorFlowLoadTF",
self.value, model_path, intra_op_parallelism_threads,
inter_op_parallelism_threads, use_per_session_threads)
callZooFunc(self.bigdl_type, "inferenceModelTensorFlowLoadTF",
self.value, model_path, intra_op_parallelism_threads,
inter_op_parallelism_threads, use_per_session_threads)
elif backend == "openvino" or backend == "ov":
if model_type:
if ov_pipeline_config_path:
callBigDlFunc(self.bigdl_type, "inferenceModelOpenVINOLoadTF",
self.value, model_path, model_type, ov_pipeline_config_path, None)
callZooFunc(self.bigdl_type, "inferenceModelOpenVINOLoadTF",
self.value, model_path, model_type, ov_pipeline_config_path, None)
else:
callBigDlFunc(self.bigdl_type, "inferenceModelOpenVINOLoadTF",
self.value, model_path, model_type)
callZooFunc(self.bigdl_type, "inferenceModelOpenVINOLoadTF",
self.value, model_path, model_type)
else:
if ov_pipeline_config_path is None and ov_extensions_config_path is None:
raise Exception("For openvino backend, you must provide either model_type or "
"both pipeline_config_path and extensions_config_path")
callBigDlFunc(self.bigdl_type, "inferenceModelOpenVINOLoadTF",
self.value, model_path, ov_pipeline_config_path,
ov_extensions_config_path)
callZooFunc(self.bigdl_type, "inferenceModelOpenVINOLoadTF",
self.value, model_path, ov_pipeline_config_path,
ov_extensions_config_path)
else:
raise ValueError("Currently only tensorflow and openvino are supported as backend")

Expand All @@ -129,13 +131,13 @@ def load_tf_object_detection_as_openvino(self,
:param extensions_config_path: String, the path of the extensions configure file
:return:
"""
callBigDlFunc(self.bigdl_type,
"inferenceModelOpenVINOLoadTF",
self.value,
model_path,
object_detection_model_type,
pipeline_config_path,
extensions_config_path)
callZooFunc(self.bigdl_type,
"inferenceModelOpenVINOLoadTF",
self.value,
model_path,
object_detection_model_type,
pipeline_config_path,
extensions_config_path)

def load_tf_image_classification_as_openvino(self,
model_path,
Expand All @@ -159,16 +161,16 @@ def load_tf_image_classification_as_openvino(self,
:param scale: Float, the scale value, to be used for the input image per channel.
:return:
"""
callBigDlFunc(self.bigdl_type,
"inferenceModelOpenVINOLoadTF",
self.value,
model_path,
image_classification_model_type,
checkpoint_path,
input_shape,
if_reverse_input_channels,
[float(value) for value in mean_values],
float(scale))
callZooFunc(self.bigdl_type,
"inferenceModelOpenVINOLoadTF",
self.value,
model_path,
image_classification_model_type,
checkpoint_path,
input_shape,
if_reverse_input_channels,
[float(value) for value in mean_values],
float(scale))

def load_tf_as_calibrated_openvino(self,
model_path,
Expand Down Expand Up @@ -207,20 +209,20 @@ def load_tf_as_calibrated_openvino(self,
please also refer to https://github.com/opencv/opencv.
:return:
"""
callBigDlFunc(self.bigdl_type,
"inferenceModelOpenVINOLoadTFAsCalibratedOpenVINO",
self.value,
model_path,
model_type,
checkpoint_path,
input_shape,
if_reverse_input_channels,
[float(value) for value in mean_values],
float(scale),
network_type,
validation_file_path,
subset,
opencv_lib_path)
callZooFunc(self.bigdl_type,
"inferenceModelOpenVINOLoadTFAsCalibratedOpenVINO",
self.value,
model_path,
model_type,
checkpoint_path,
input_shape,
if_reverse_input_channels,
[float(value) for value in mean_values],
float(scale),
network_type,
validation_file_path,
subset,
opencv_lib_path)

def predict(self, inputs):
"""
Expand All @@ -229,9 +231,9 @@ def predict(self, inputs):
:param inputs: A numpy array or a list of numpy arrays or JTensor or a list of JTensors.
"""
jinputs, input_is_table = Layer.check_input(inputs)
output = callBigDlFunc(self.bigdl_type,
"inferenceModelPredict",
self.value,
jinputs,
input_is_table)
output = callZooFunc(self.bigdl_type,
"inferenceModelPredict",
self.value,
jinputs,
input_is_table)
return KerasNet.convert_output(output)

0 comments on commit d143bce

Please sign in to comment.