diff --git a/apps/model-inference-examples/recommendation-inference/pom.xml b/apps/model-inference-examples/recommendation-inference/pom.xml
index feb16bde7b1..8362c26ba3f 100644
--- a/apps/model-inference-examples/recommendation-inference/pom.xml
+++ b/apps/model-inference-examples/recommendation-inference/pom.xml
@@ -11,7 +11,7 @@
simple example of ncf recommender inference
2.4.3
- 0.10.0
+ 0.11.1
0.9.0-SNAPSHOT
diff --git a/docker/cluster-serving/ClusterServingSparkVersionDockerfile b/docker/cluster-serving/ClusterServingSparkVersionDockerfile
index 7fa21608e84..693f236b03e 100644
--- a/docker/cluster-serving/ClusterServingSparkVersionDockerfile
+++ b/docker/cluster-serving/ClusterServingSparkVersionDockerfile
@@ -5,7 +5,7 @@ MAINTAINER The Analytics Zoo Authors https://github.com/intel-analytics/analytic
WORKDIR /opt/work
ARG SPARK_VERSION=2.4.3
-ARG BIGDL_VERSION=0.10.0
+ARG BIGDL_VERSION=0.11.1
ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
ENV SPARK_HOME /opt/work/spark-${SPARK_VERSION}
diff --git a/docker/cluster-serving/Dockerfile b/docker/cluster-serving/Dockerfile
index 56e28860b13..388b150cacf 100644
--- a/docker/cluster-serving/Dockerfile
+++ b/docker/cluster-serving/Dockerfile
@@ -5,7 +5,7 @@ MAINTAINER The Analytics Zoo Authors https://github.com/intel-analytics/analytic
WORKDIR /opt/work
ARG SPARK_VERSION=2.4.3
-ARG BIGDL_VERSION=0.10.0
+ARG BIGDL_VERSION=0.11.1
ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
ARG FLINK_VERSION=1.10.0
diff --git a/docker/cluster-serving/download-cluster-serving-all-zip.sh b/docker/cluster-serving/download-cluster-serving-all-zip.sh
index 447576de8f6..9e3417b9444 100644
--- a/docker/cluster-serving/download-cluster-serving-all-zip.sh
+++ b/docker/cluster-serving/download-cluster-serving-all-zip.sh
@@ -17,7 +17,7 @@
#
if [ -z "${ANALYTICS_ZOO_VERSION}" ]; then
export ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
- export BIGDL_VERSION=0.10.0
+ export BIGDL_VERSION=0.11.1
export SPARK_VERSION=2.4.3
echo "You did not specify ANALYTICS_ZOO_VERSION, will download "$ANALYTICS_ZOO_VERSION
fi
diff --git a/docker/hyperzoo/Dockerfile b/docker/hyperzoo/Dockerfile
index 05587977800..9381dfa12f2 100644
--- a/docker/hyperzoo/Dockerfile
+++ b/docker/hyperzoo/Dockerfile
@@ -3,7 +3,7 @@ FROM ubuntu:18.04
MAINTAINER The Analytics-Zoo Authors https://github.com/intel-analytics/analytics-zoo
ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
-ARG BIGDL_VERSION=0.10.0
+ARG BIGDL_VERSION=0.11.1
ARG SPARK_VERSION=2.4.3
ENV ANALYTICS_ZOO_VERSION ${ANALYTICS_ZOO_VERSION}
diff --git a/docker/zoo/Dockerfile b/docker/zoo/Dockerfile
index 061a7c56540..883ce336cfc 100644
--- a/docker/zoo/Dockerfile
+++ b/docker/zoo/Dockerfile
@@ -21,7 +21,7 @@ MAINTAINER The Analytics-Zoo Authors https://github.com/intel-analytics/analytic
WORKDIR /opt/work
ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
-ARG BIGDL_VERSION=0.10.0
+ARG BIGDL_VERSION=0.11.1
ARG SPARK_VERSION=2.4.3
ARG RUNTIME_SPARK_MASTER=local[4]
ARG RUNTIME_DRIVER_CORES=4
diff --git a/docs/docs/DockerUserGuide/index.md b/docs/docs/DockerUserGuide/index.md
index ba3a632c5d4..f0975a5be04 100644
--- a/docs/docs/DockerUserGuide/index.md
+++ b/docs/docs/DockerUserGuide/index.md
@@ -249,7 +249,7 @@ sudo docker build \
```
sudo docker build \
--build-arg ANALYTICS_ZOO_VERSION=0.7.0 \
- --build-arg BIGDL_VERSION=0.10.0 \
+ --build-arg BIGDL_VERSION=0.11.1 \
--build-arg SPARK_VERSION=2.4.3 \
--rm -t intelanalytics/analytics-zoo:0.7.0-bigdl_0.10.0-spark_2.4.3 .
```
diff --git a/docs/docs/PythonUserGuide/install.md b/docs/docs/PythonUserGuide/install.md
index 840ac8b0f03..81b5ce3bd72 100644
--- a/docs/docs/PythonUserGuide/install.md
+++ b/docs/docs/PythonUserGuide/install.md
@@ -43,7 +43,7 @@ sc = init_nncontext()
1. We've tested this package with pip 9.0.1. `pip install --upgrade pip` if necessary.
2. Pip install supports __Mac__ and __Linux__ platforms.
3. You need to install Java __>= JDK8__ before running Analytics Zoo, which is required by `pyspark`.
-4. `bigdl==0.10.0`, `pyspark==2.4.3`, `conda-pack==0.3.1` and their dependencies will be automatically installed if they haven't been detected in the current Python environment.
+4. `bigdl==0.11.1`, `pyspark==2.4.3`, `conda-pack==0.3.1` and their dependencies will be automatically installed if they haven't been detected in the current Python environment.
---
## **Install from pip for Yarn cluster**
diff --git a/pyzoo/setup.py b/pyzoo/setup.py
index 56d4733b072..556031b3e80 100755
--- a/pyzoo/setup.py
+++ b/pyzoo/setup.py
@@ -136,7 +136,7 @@ def setup_package():
license='Apache License, Version 2.0',
url='https://github.com/intel-analytics/analytics-zoo',
packages=packages,
- install_requires=['pyspark==2.4.3', 'bigdl==0.10.0', 'conda-pack==0.3.1'],
+ install_requires=['pyspark==2.4.3', 'bigdl==0.11.1', 'conda-pack==0.3.1'],
extras_require={'ray': ['ray==0.8.4', 'psutil', 'aiohttp',
'setproctitle', 'pyarrow==0.17.0'],
'automl': ['tensorflow>=1.15.0,<2.0.0', 'ray[tune]==0.8.4', 'psutil',
diff --git a/scripts/cluster-serving/download-serving-jar.sh b/scripts/cluster-serving/download-serving-jar.sh
index a3a82e0e0dc..b5e3a589adf 100644
--- a/scripts/cluster-serving/download-serving-jar.sh
+++ b/scripts/cluster-serving/download-serving-jar.sh
@@ -17,7 +17,7 @@
#
if [ -z "${ANALYTICS_ZOO_VERSION}" ]; then
export ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
- export BIGDL_VERSION=0.10.0
+ export BIGDL_VERSION=0.11.1
export SPARK_VERSION=2.4.3
echo "You did not specify ANALYTICS_ZOO_VERSION, will download "$ANALYTICS_ZOO_VERSION
fi
diff --git a/zoo/pom.xml b/zoo/pom.xml
index 64672d61505..909d659bd6a 100644
--- a/zoo/pom.xml
+++ b/zoo/pom.xml
@@ -27,7 +27,7 @@
2.1.0
2.2.4
2.4.3
- 0.10.0
+ 0.11.1
zoo-core-dist-all
pom
http://download.tensorflow.org
@@ -623,7 +623,7 @@
com.google.protobuf
- com.intel.analytics.zoo.shaded.protobuf
+ com.intel.analytics.shaded.protobuf_v_3_5_1
io.netty
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/CaffeLoader.scala b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/CaffeLoader.scala
index dcd29713869..9760d98f86c 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/CaffeLoader.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/CaffeLoader.scala
@@ -22,8 +22,8 @@ import caffe.Caffe._
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn._
-import com.intel.analytics.bigdl.shaded.protobuf.TextFormat.ParseException
-import com.intel.analytics.bigdl.shaded.protobuf.{CodedInputStream, GeneratedMessage, TextFormat}
+import com.intel.analytics.shaded.protobuf_v_3_5_1.TextFormat.ParseException
+import com.intel.analytics.shaded.protobuf_v_3_5_1.{CodedInputStream, GeneratedMessage, TextFormat}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{Node, Table}
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/Converter.scala b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/Converter.scala
index c67ca5a52a0..6bc250ef24a 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/Converter.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/Converter.scala
@@ -24,7 +24,7 @@ import caffe.Caffe._
import com.intel.analytics.bigdl.nn.Graph._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
-import com.intel.analytics.bigdl.shaded.protobuf.GeneratedMessage
+import com.intel.analytics.shaded.protobuf_v_3_5_1.GeneratedMessage
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.caffe.CaffeConversionException
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/LayerConverter.scala b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/LayerConverter.scala
index 559446ba7bb..303c4ec395a 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/LayerConverter.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/LayerConverter.scala
@@ -20,7 +20,7 @@ import caffe.Caffe
import caffe.Caffe.EltwiseParameter.EltwiseOp
import caffe.Caffe.LRNParameter.NormRegion
import caffe.Caffe.{BlobProto, PoolingParameter, _}
-import com.intel.analytics.bigdl.shaded.protobuf.GeneratedMessage
+import com.intel.analytics.shaded.protobuf_v_3_5_1.GeneratedMessage
import com.intel.analytics.bigdl.nn.Graph._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/V1LayerConverter.scala b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/V1LayerConverter.scala
index 9c3176447be..669abbfba0e 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/V1LayerConverter.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/models/caffe/V1LayerConverter.scala
@@ -21,7 +21,7 @@ import caffe.Caffe.EltwiseParameter.EltwiseOp
import caffe.Caffe.LRNParameter.NormRegion
import caffe.Caffe.V1LayerParameter.LayerType
import caffe.Caffe._
-import com.intel.analytics.bigdl.shaded.protobuf.GeneratedMessage
+import com.intel.analytics.shaded.protobuf_v_3_5_1.GeneratedMessage
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/models/Topology.scala b/zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/models/Topology.scala
index afd5c99dd2a..c93c40a11ba 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/models/Topology.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/models/Topology.scala
@@ -29,7 +29,7 @@ import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.keras.{KerasLayer, KerasLayerSerializable}
import com.intel.analytics.bigdl.nn.mkldnn.MklDnnModule
import com.intel.analytics.bigdl.nn.{Container, Graph, Module, StaticGraph, Sequential => TSequential}
-import com.intel.analytics.bigdl.optim.DistriOptimizer.Cache
+import com.intel.analytics.bigdl.optim.DistriOptimizer.{Cache, CacheV1}
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.parameters.AllReduceParameter
import com.intel.analytics.bigdl.serialization.Bigdl.BigDLModule
@@ -1007,14 +1007,14 @@ private[zoo] object InternalOptimizerUtil {
def initThreadModels[T: ClassTag](
args: Object*)(
- implicit ev: TensorNumeric[T]): (RDD[DistriOptimizer.Cache[T]], ModelBroadcast[T]) = {
+ implicit ev: TensorNumeric[T]): (RDD[DistriOptimizer.CacheV1[T]], ModelBroadcast[T]) = {
KerasUtils.invokeMethodWithEv(DistriOptimizer,
"com$intel$analytics$bigdl$optim$DistriOptimizer$$initThreadModels",
- args: _*).asInstanceOf[(RDD[DistriOptimizer.Cache[T]], ModelBroadcast[T])]
+ args: _*).asInstanceOf[(RDD[DistriOptimizer.CacheV1[T]], ModelBroadcast[T])]
}
def clearState[T: ClassTag](
- models: RDD[DistriOptimizer.Cache[T]]): Unit = {
+ models: RDD[DistriOptimizer.CacheV1[T]]): Unit = {
KerasUtils.invokeMethod(DistriOptimizer,
"clearState", models, implicitly[reflect.ClassTag[T]])
}
@@ -1034,7 +1034,7 @@ private[zoo] object InternalOptimizerUtil {
// TODO: Delete this when switch to Bigdl 0.11.0.
def getTorchModel[T: ClassTag](
- models: RDD[Cache[T]],
+ models: RDD[CacheV1[T]],
parameters: AllReduceParameter[T],
trainingModel: TorchModel)(implicit ev: TensorNumeric[T]): TorchModel = {
val partitionNum = models.partitions.length
@@ -1113,7 +1113,7 @@ private[zoo] class InternalDistriOptimizer[T: ClassTag] (
import InternalDistriOptimizer._
protected var checkpointDir: Option[String] = None
protected var numSlice: Int = 1
- protected var cachedModels: RDD[DistriOptimizer.Cache[T]] = null
+ protected var cachedModels: RDD[DistriOptimizer.CacheV1[T]] = null
protected var modelBroadcast: ModelBroadcast[T] = null
protected var parameterSplits: Map[String, (Int, Int)] = null
protected var allReduceParameter: AllReduceParameter[T] = null
@@ -1457,7 +1457,7 @@ private[zoo] class InternalDistriOptimizer[T: ClassTag] (
val models = if (null != cachedModels) {
val bcVMethods = cachedModels.sparkContext.broadcast(validationMethod)
cachedModels.map{cache =>
- Cache[T](
+ CacheV1[T](
cache.localModels,
cache.modelWeights,
cache.modelGradients,
@@ -1474,7 +1474,7 @@ private[zoo] class InternalDistriOptimizer[T: ClassTag] (
val bcVMethods = validateRDD.sparkContext.broadcast(validationMethod)
val bcModel = ModelBroadcast[T]().broadcast(sc, _model)
validateRDD.mapPartitions{_ =>
- Iterator.single(Cache[T](
+ Iterator.single(CacheV1[T](
Array.tabulate(_subModelNumber)(_ => bcModel.value()),
null,
null,
@@ -1513,7 +1513,7 @@ object InternalDistriOptimizer {
protected def validate[T](validationFeatureSet: FeatureSet[MiniBatch[T]],
validationMethods: Array[ValidationMethod[T]],
- models: RDD[Cache[T]],
+ models: RDD[CacheV1[T]],
step: Int,
validationSummary: Option[ValidationSummary]
): Map[ValidationMethod[T], ValidationResult] = {
@@ -1590,7 +1590,7 @@ object InternalDistriOptimizer {
}
def unpersistCachedModel[T: ClassTag](
- models: RDD[DistriOptimizer.Cache[T]] ): Unit = {
+ models: RDD[DistriOptimizer.CacheV1[T]] ): Unit = {
models.mapPartitions { iter =>
iter.foreach { arrayModels =>
arrayModels.localModels.foreach(_.release())
@@ -1600,7 +1600,7 @@ object InternalDistriOptimizer {
models.unpersist()
}
- def getModel[T: ClassTag](models: RDD[Cache[T]],
+ def getModel[T: ClassTag](models: RDD[CacheV1[T]],
parameters: AllReduceParameter[T],
trainingModel: Module[T])(implicit ev: TensorNumeric[T])
: Module[T] = {
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/FileWriter.scala b/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/FileWriter.scala
index 12926d70240..eebaf7b5739 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/FileWriter.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/FileWriter.scala
@@ -20,8 +20,8 @@ import com.intel.analytics.bigdl.utils.{Engine, ThreadPool}
import com.intel.analytics.zoo.pipeline.api.keras.layers.utils.EngineRef
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
-import com.intel.analytics.bigdl.shaded.tensorflow.framework.GraphDef
-import com.intel.analytics.bigdl.shaded.tensorflow
+import org.tensorflow.framework
+import org.tensorflow.framework.{GraphDef, GradientDef}
import org.tensorflow.util.Event
/**
@@ -52,7 +52,7 @@ private[zoo] class FileWriter(val logDirectory : String, flushMillis: Int = 1000
* @param globalStep a consistent global count of the event.
* @return
*/
- def addSummary(summary: tensorflow.framework.Summary, globalStep: Long): this.type = {
+ def addSummary(summary: framework.Summary, globalStep: Long): this.type = {
val event = Event.newBuilder().setSummary(summary).build()
// val event = Builder()
addEvent(event, globalStep)
diff --git a/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/Summary.scala b/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/Summary.scala
index c9e058ad9f9..b1c6fe2a1f6 100644
--- a/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/Summary.scala
+++ b/zoo/src/main/scala/com/intel/analytics/zoo/tensorboard/Summary.scala
@@ -18,7 +18,7 @@ package com.intel.analytics.zoo.tensorboard
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
-import com.intel.analytics.bigdl.shaded.tensorflow
+import org.tensorflow
import scala.reflect.ClassTag