Skip to content

Commit

Permalink
update bigdl version (#2743)
Browse files Browse the repository at this point in the history
* update bigdl version
  • Loading branch information
Le-Zheng committed Aug 26, 2020
1 parent 1fccfee commit 65c92a3
Show file tree
Hide file tree
Showing 18 changed files with 32 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<description>simple example of ncf recommender inference</description>
<properties>
<spark.version>2.4.3</spark.version>
<bigdl.version>0.10.0</bigdl.version>
<bigdl.version>0.11.1</bigdl.version>
<zoo.version>0.9.0-SNAPSHOT</zoo.version>
</properties>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ MAINTAINER The Analytics Zoo Authors https://github.com/intel-analytics/analytic
WORKDIR /opt/work

ARG SPARK_VERSION=2.4.3
ARG BIGDL_VERSION=0.10.0
ARG BIGDL_VERSION=0.11.1
ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT

ENV SPARK_HOME /opt/work/spark-${SPARK_VERSION}
Expand Down
2 changes: 1 addition & 1 deletion docker/cluster-serving/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ MAINTAINER The Analytics Zoo Authors https://github.com/intel-analytics/analytic
WORKDIR /opt/work

ARG SPARK_VERSION=2.4.3
ARG BIGDL_VERSION=0.10.0
ARG BIGDL_VERSION=0.11.1
ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
ARG FLINK_VERSION=1.10.0

Expand Down
2 changes: 1 addition & 1 deletion docker/cluster-serving/download-cluster-serving-all-zip.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
#
if [ -z "${ANALYTICS_ZOO_VERSION}" ]; then
export ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
export BIGDL_VERSION=0.10.0
export BIGDL_VERSION=0.11.1
export SPARK_VERSION=2.4.3
echo "You did not specify ANALYTICS_ZOO_VERSION, will download "$ANALYTICS_ZOO_VERSION
fi
Expand Down
2 changes: 1 addition & 1 deletion docker/hyperzoo/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ FROM ubuntu:18.04
MAINTAINER The Analytics-Zoo Authors https://github.com/intel-analytics/analytics-zoo

ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
ARG BIGDL_VERSION=0.10.0
ARG BIGDL_VERSION=0.11.1
ARG SPARK_VERSION=2.4.3

ENV ANALYTICS_ZOO_VERSION ${ANALYTICS_ZOO_VERSION}
Expand Down
2 changes: 1 addition & 1 deletion docker/zoo/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ MAINTAINER The Analytics-Zoo Authors https://github.com/intel-analytics/analytic
WORKDIR /opt/work

ARG ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
ARG BIGDL_VERSION=0.10.0
ARG BIGDL_VERSION=0.11.1
ARG SPARK_VERSION=2.4.3
ARG RUNTIME_SPARK_MASTER=local[4]
ARG RUNTIME_DRIVER_CORES=4
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/DockerUserGuide/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ sudo docker build \
```
sudo docker build \
--build-arg ANALYTICS_ZOO_VERSION=0.7.0 \
--build-arg BIGDL_VERSION=0.10.0 \
--build-arg BIGDL_VERSION=0.11.1 \
--build-arg SPARK_VERSION=2.4.3 \
--rm -t intelanalytics/analytics-zoo:0.7.0-bigdl_0.10.0-spark_2.4.3 .
```
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/PythonUserGuide/install.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ sc = init_nncontext()
1. We've tested this package with pip 9.0.1. `pip install --upgrade pip` if necessary.
2. Pip install supports __Mac__ and __Linux__ platforms.
3. You need to install Java __>= JDK8__ before running Analytics Zoo, which is required by `pyspark`.
4. `bigdl==0.10.0`, `pyspark==2.4.3`, `conda-pack==0.3.1` and their dependencies will be automatically installed if they haven't been detected in the current Python environment.
4. `bigdl==0.11.1`, `pyspark==2.4.3`, `conda-pack==0.3.1` and their dependencies will be automatically installed if they haven't been detected in the current Python environment.

---
## **Install from pip for Yarn cluster**
Expand Down
2 changes: 1 addition & 1 deletion pyzoo/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def setup_package():
license='Apache License, Version 2.0',
url='https://github.com/intel-analytics/analytics-zoo',
packages=packages,
install_requires=['pyspark==2.4.3', 'bigdl==0.10.0', 'conda-pack==0.3.1'],
install_requires=['pyspark==2.4.3', 'bigdl==0.11.1', 'conda-pack==0.3.1'],
extras_require={'ray': ['ray==0.8.4', 'psutil', 'aiohttp',
'setproctitle', 'pyarrow==0.17.0'],
'automl': ['tensorflow>=1.15.0,<2.0.0', 'ray[tune]==0.8.4', 'psutil',
Expand Down
2 changes: 1 addition & 1 deletion scripts/cluster-serving/download-serving-jar.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
#
if [ -z "${ANALYTICS_ZOO_VERSION}" ]; then
export ANALYTICS_ZOO_VERSION=0.9.0-SNAPSHOT
export BIGDL_VERSION=0.10.0
export BIGDL_VERSION=0.11.1
export SPARK_VERSION=2.4.3
echo "You did not specify ANALYTICS_ZOO_VERSION, will download "$ANALYTICS_ZOO_VERSION
fi
Expand Down
4 changes: 2 additions & 2 deletions zoo/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<scala.macros.version>2.1.0</scala.macros.version>
<scalatest.version>2.2.4</scalatest.version>
<spark.version>2.4.3</spark.version>
<bigdl.version>0.10.0</bigdl.version>
<bigdl.version>0.11.1</bigdl.version>
<core.artifactId>zoo-core-dist-all</core.artifactId>
<core.dependencyType>pom</core.dependencyType>
<data-store-url>http://download.tensorflow.org</data-store-url>
Expand Down Expand Up @@ -623,7 +623,7 @@
<relocations>
<relocation>
<pattern>com.google.protobuf</pattern>
<shadedPattern>com.intel.analytics.zoo.shaded.protobuf</shadedPattern>
<shadedPattern>com.intel.analytics.shaded.protobuf_v_3_5_1</shadedPattern>
</relocation>
<relocation>
<pattern>io.netty</pattern>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ import caffe.Caffe._
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.shaded.protobuf.TextFormat.ParseException
import com.intel.analytics.bigdl.shaded.protobuf.{CodedInputStream, GeneratedMessage, TextFormat}
import com.intel.analytics.shaded.protobuf_v_3_5_1.TextFormat.ParseException
import com.intel.analytics.shaded.protobuf_v_3_5_1.{CodedInputStream, GeneratedMessage, TextFormat}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{Node, Table}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import caffe.Caffe._
import com.intel.analytics.bigdl.nn.Graph._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.shaded.protobuf.GeneratedMessage
import com.intel.analytics.shaded.protobuf_v_3_5_1.GeneratedMessage
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.caffe.CaffeConversionException

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import caffe.Caffe
import caffe.Caffe.EltwiseParameter.EltwiseOp
import caffe.Caffe.LRNParameter.NormRegion
import caffe.Caffe.{BlobProto, PoolingParameter, _}
import com.intel.analytics.bigdl.shaded.protobuf.GeneratedMessage
import com.intel.analytics.shaded.protobuf_v_3_5_1.GeneratedMessage
import com.intel.analytics.bigdl.nn.Graph._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import caffe.Caffe.EltwiseParameter.EltwiseOp
import caffe.Caffe.LRNParameter.NormRegion
import caffe.Caffe.V1LayerParameter.LayerType
import caffe.Caffe._
import com.intel.analytics.bigdl.shaded.protobuf.GeneratedMessage
import com.intel.analytics.shaded.protobuf_v_3_5_1.GeneratedMessage
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.keras.{KerasLayer, KerasLayerSerializable}
import com.intel.analytics.bigdl.nn.mkldnn.MklDnnModule
import com.intel.analytics.bigdl.nn.{Container, Graph, Module, StaticGraph, Sequential => TSequential}
import com.intel.analytics.bigdl.optim.DistriOptimizer.Cache
import com.intel.analytics.bigdl.optim.DistriOptimizer.{Cache, CacheV1}
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.parameters.AllReduceParameter
import com.intel.analytics.bigdl.serialization.Bigdl.BigDLModule
Expand Down Expand Up @@ -1007,14 +1007,14 @@ private[zoo] object InternalOptimizerUtil {

def initThreadModels[T: ClassTag](
args: Object*)(
implicit ev: TensorNumeric[T]): (RDD[DistriOptimizer.Cache[T]], ModelBroadcast[T]) = {
implicit ev: TensorNumeric[T]): (RDD[DistriOptimizer.CacheV1[T]], ModelBroadcast[T]) = {
KerasUtils.invokeMethodWithEv(DistriOptimizer,
"com$intel$analytics$bigdl$optim$DistriOptimizer$$initThreadModels",
args: _*).asInstanceOf[(RDD[DistriOptimizer.Cache[T]], ModelBroadcast[T])]
args: _*).asInstanceOf[(RDD[DistriOptimizer.CacheV1[T]], ModelBroadcast[T])]
}

def clearState[T: ClassTag](
models: RDD[DistriOptimizer.Cache[T]]): Unit = {
models: RDD[DistriOptimizer.CacheV1[T]]): Unit = {
KerasUtils.invokeMethod(DistriOptimizer,
"clearState", models, implicitly[reflect.ClassTag[T]])
}
Expand All @@ -1034,7 +1034,7 @@ private[zoo] object InternalOptimizerUtil {

// TODO: Delete this when switch to Bigdl 0.11.0.
def getTorchModel[T: ClassTag](
models: RDD[Cache[T]],
models: RDD[CacheV1[T]],
parameters: AllReduceParameter[T],
trainingModel: TorchModel)(implicit ev: TensorNumeric[T]): TorchModel = {
val partitionNum = models.partitions.length
Expand Down Expand Up @@ -1113,7 +1113,7 @@ private[zoo] class InternalDistriOptimizer[T: ClassTag] (
import InternalDistriOptimizer._
protected var checkpointDir: Option[String] = None
protected var numSlice: Int = 1
protected var cachedModels: RDD[DistriOptimizer.Cache[T]] = null
protected var cachedModels: RDD[DistriOptimizer.CacheV1[T]] = null
protected var modelBroadcast: ModelBroadcast[T] = null
protected var parameterSplits: Map[String, (Int, Int)] = null
protected var allReduceParameter: AllReduceParameter[T] = null
Expand Down Expand Up @@ -1457,7 +1457,7 @@ private[zoo] class InternalDistriOptimizer[T: ClassTag] (
val models = if (null != cachedModels) {
val bcVMethods = cachedModels.sparkContext.broadcast(validationMethod)
cachedModels.map{cache =>
Cache[T](
CacheV1[T](
cache.localModels,
cache.modelWeights,
cache.modelGradients,
Expand All @@ -1474,7 +1474,7 @@ private[zoo] class InternalDistriOptimizer[T: ClassTag] (
val bcVMethods = validateRDD.sparkContext.broadcast(validationMethod)
val bcModel = ModelBroadcast[T]().broadcast(sc, _model)
validateRDD.mapPartitions{_ =>
Iterator.single(Cache[T](
Iterator.single(CacheV1[T](
Array.tabulate(_subModelNumber)(_ => bcModel.value()),
null,
null,
Expand Down Expand Up @@ -1513,7 +1513,7 @@ object InternalDistriOptimizer {

protected def validate[T](validationFeatureSet: FeatureSet[MiniBatch[T]],
validationMethods: Array[ValidationMethod[T]],
models: RDD[Cache[T]],
models: RDD[CacheV1[T]],
step: Int,
validationSummary: Option[ValidationSummary]
): Map[ValidationMethod[T], ValidationResult] = {
Expand Down Expand Up @@ -1590,7 +1590,7 @@ object InternalDistriOptimizer {
}

def unpersistCachedModel[T: ClassTag](
models: RDD[DistriOptimizer.Cache[T]] ): Unit = {
models: RDD[DistriOptimizer.CacheV1[T]] ): Unit = {
models.mapPartitions { iter =>
iter.foreach { arrayModels =>
arrayModels.localModels.foreach(_.release())
Expand All @@ -1600,7 +1600,7 @@ object InternalDistriOptimizer {
models.unpersist()
}

def getModel[T: ClassTag](models: RDD[Cache[T]],
def getModel[T: ClassTag](models: RDD[CacheV1[T]],
parameters: AllReduceParameter[T],
trainingModel: Module[T])(implicit ev: TensorNumeric[T])
: Module[T] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ import com.intel.analytics.bigdl.utils.{Engine, ThreadPool}
import com.intel.analytics.zoo.pipeline.api.keras.layers.utils.EngineRef
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import com.intel.analytics.bigdl.shaded.tensorflow.framework.GraphDef
import com.intel.analytics.bigdl.shaded.tensorflow
import org.tensorflow.framework
import org.tensorflow.framework.{GraphDef, GradientDef}
import org.tensorflow.util.Event

/**
Expand Down Expand Up @@ -52,7 +52,7 @@ private[zoo] class FileWriter(val logDirectory : String, flushMillis: Int = 1000
* @param globalStep a consistent global count of the event.
* @return
*/
def addSummary(summary: tensorflow.framework.Summary, globalStep: Long): this.type = {
def addSummary(summary: framework.Summary, globalStep: Long): this.type = {
val event = Event.newBuilder().setSummary(summary).build()
// val event = Builder()
addEvent(event, globalStep)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package com.intel.analytics.zoo.tensorboard

import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.shaded.tensorflow
import org.tensorflow

import scala.reflect.ClassTag

Expand Down

0 comments on commit 65c92a3

Please sign in to comment.