update zoo related doc to bigdl (#4058)

This commit is contained in:
dding3 2022-02-18 11:07:36 -08:00 committed by GitHub
parent 193a063504
commit 63c96a9acb
2 changed files with 76 additions and 75 deletions

View file

@ -288,9 +288,9 @@ output:
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import SparseDense from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
from bigdl.util.common import JTensor from bigdl.dllib.utils.common import JTensor
model = Sequential() model = Sequential()
model.add(SparseDense(output_dim=2, input_shape=(3, 4))) model.add(SparseDense(output_dim=2, input_shape=(3, 4)))
@ -404,8 +404,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import SoftShrink from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(SoftShrink(0.6, input_shape=(2, 3, 4))) model.add(SoftShrink(0.6, input_shape=(2, 3, 4)))
@ -528,8 +528,9 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Reshape from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Reshape(target_shape=(3, 8), input_shape=(2, 3, 4))) model.add(Reshape(target_shape=(3, 8), input_shape=(2, 3, 4)))
@ -644,8 +645,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Merge, InputLayer from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
l1 = InputLayer(input_shape=(3, 4)) l1 = InputLayer(input_shape=(3, 4))
@ -739,8 +740,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import MaxoutDense from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(MaxoutDense(2, input_shape=(3, ))) model.add(MaxoutDense(2, input_shape=(3, )))
@ -814,8 +815,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Squeeze from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Squeeze(1, input_shape=(1, 1, 32))) model.add(Squeeze(1, input_shape=(1, 1, 32)))
@ -929,8 +930,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import BinaryThreshold from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(BinaryThreshold(input_shape=(2, 3, 4))) model.add(BinaryThreshold(input_shape=(2, 3, 4)))
@ -1023,8 +1024,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Sqrt from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Sqrt(input_shape=(3, ))) model.add(Sqrt(input_shape=(3, )))
@ -1107,8 +1108,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Mul from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Mul(input_shape=(3, 4))) model.add(Mul(input_shape=(3, 4)))
@ -1200,8 +1201,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import MulConstant from bigdl.dllib.keras.layers import *
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(MulConstant(2.2, input_shape=(3, 4))) model.add(MulConstant(2.2, input_shape=(3, 4)))
@ -1282,8 +1283,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Scale from bigdl.dllib.keras.layers import Scale
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Scale((2, 1), input_shape=(3, ))) model.add(Scale((2, 1), input_shape=(3, )))
@ -1368,8 +1369,8 @@ NaN -1.1666392 -0.36804697 -0.72597617
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Log from bigdl.dllib.keras.layers import Log
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Log(input_shape=(2, 4, 4))) model.add(Log(input_shape=(2, 4, 4)))
@ -1482,8 +1483,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Identity from bigdl.dllib.keras.layers import Identity
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Identity(input_shape=(4, 4))) model.add(Identity(input_shape=(4, 4)))
@ -1587,8 +1588,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import Select from bigdl.dllib.keras.layers import Select
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -1662,8 +1663,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import Dense from bigdl.dllib.keras.layers import Dense
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(Dense(5, activation="relu", input_shape=(4, ))) model.add(Dense(5, activation="relu", input_shape=(4, )))
@ -1736,8 +1737,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import Negative from bigdl.dllib.keras.layers import Negative
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -1824,8 +1825,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import CAdd from bigdl.dllib.keras.layers import CAdd
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -1910,8 +1911,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.layers import RepeatVector from bigdl.dllib.keras.layers import RepeatVector
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
model = Sequential() model = Sequential()
model.add(RepeatVector(4, input_shape=(3, ))) model.add(RepeatVector(4, input_shape=(3, )))
@ -2009,8 +2010,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
from zoo.pipeline.api.keras.layers import GaussianSampler from bigdl.dllib.keras.layers import GaussianSampler
model = Sequential() model = Sequential()
model.add(GaussianSampler(input_shape=[(3,),(3,)])) model.add(GaussianSampler(input_shape=[(3,),(3,)]))
@ -2118,8 +2119,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
from zoo.pipeline.api.keras.layers import Exp from bigdl.dllib.keras.layers import Exp
model = Sequential() model = Sequential()
model.add(Exp(input_shape=(2, 3, 4))) model.add(Exp(input_shape=(2, 3, 4)))
@ -2250,8 +2251,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
import numpy as np import numpy as np
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
from zoo.pipeline.api.keras.layers import Square from bigdl.dllib.keras.layers import Square
model = Sequential() model = Sequential()
model.add(Square(input_shape=(2, 3, 4))) model.add(Square(input_shape=(2, 3, 4)))
@ -2359,8 +2360,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import Power from bigdl.dllib.keras.layers import Power
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -2443,8 +2444,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import AddConstant from bigdl.dllib.keras.layers import AddConstant
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -2562,8 +2563,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import Narrow from bigdl.dllib.keras.layers import Narrow
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -2673,8 +2674,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import Permute from bigdl.dllib.keras.layers import Permute
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()
@ -2788,8 +2789,8 @@ output: com.intel.analytics.bigdl.nn.abstractnn.Activity =
**Python example:** **Python example:**
```python ```python
from zoo.pipeline.api.keras.layers import ResizeBilinear from bigdl.dllib.keras.layers import ResizeBilinear
from zoo.pipeline.api.keras.models import Sequential from bigdl.dllib.keras.models import Sequential
import numpy as np import numpy as np
model = Sequential() model = Sequential()

View file

@ -8,14 +8,14 @@
The examples are included in the DLlib source code. The examples are included in the DLlib source code.
- image classification: model inference using pre-trained Inception v1 model. (See [Scala version](https://github.com/intel-analytics/analytics-zoo/tree/master/zoo/src/main/scala/com/intel/analytics/zoo/examples/nnframes/imageInference) and [Python version](https://github.com/intel-analytics/analytics-zoo/tree/master/pyzoo/zoo/examples/nnframes/imageInference)) - image classification: model inference using pre-trained Inception v1 model. (See [Python version](https://github.com/intel-analytics/BigDL/tree/branch-2.0/python/dllib/examples/nnframes/imageInference))
- image classification: transfer learning from pre-trained Inception v1 model. (See [Scala version](https://github.com/intel-analytics/analytics-zoo/tree/master/zoo/src/main/scala/com/intel/analytics/zoo/examples/nnframes/imageTransferLearning) and [Python version](https://github.com/intel-analytics/analytics-zoo/tree/master/pyzoo/zoo/examples/nnframes/imageTransferLearning)) - image classification: transfer learning from pre-trained Inception v1 model. (See [Python version](https://github.com/intel-analytics/BigDL/tree/branch-2.0/python/dllib/examples/nnframes/imageTransferLearning))
## 2. Primary APIs ## 2. Primary APIs
- **NNEstimator and NNModel** - **NNEstimator and NNModel**
Analytics Zoo provides `NNEstimator` for model training with Spark DataFrame, which provides high level API for training a BigDL Model with the Apache Spark [Estimator](https://spark.apache.org/docs/2.1.1/ml-pipeline.html#estimators) and [Transfomer](https://spark.apache.org/docs/2.1.1/ml-pipeline.html#transformers) pattern, thus users can conveniently fit Analytics Zoo into a ML pipeline. The fit result of `NNEstimator` is a NNModel, which is a Spark ML Transformer. BigDL DLLib provides `NNEstimator` for model training with Spark DataFrame, which provides high level API for training a BigDL Model with the Apache Spark [Estimator](https://spark.apache.org/docs/2.1.1/ml-pipeline.html#estimators) and [Transfomer](https://spark.apache.org/docs/2.1.1/ml-pipeline.html#transformers) pattern, thus users can conveniently fit BigDL DLLib into a ML pipeline. The fit result of `NNEstimator` is a NNModel, which is a Spark ML Transformer.
- **NNClassifier and NNClassifierModel** - **NNClassifier and NNClassifierModel**
@ -46,13 +46,13 @@ to allow users to combine the components of BigDL and Spark MLlib.
`NNEstimator` supports different feature and label data types through `Preprocessing`. During fit (training), NNEstimator will extract feature and label data from input DataFrame and use the `Preprocessing` to convert data for the model, typically converts the feature and label to Tensors or converts the (feature, option[Label]) tuple to a BigDL `Sample`. `NNEstimator` supports different feature and label data types through `Preprocessing`. During fit (training), NNEstimator will extract feature and label data from input DataFrame and use the `Preprocessing` to convert data for the model, typically converts the feature and label to Tensors or converts the (feature, option[Label]) tuple to a BigDL `Sample`.
Each`Preprocessing` conducts a data conversion step in the preprocessing phase, multiple `Preprocessing` can be combined into a `ChainedPreprocessing`. Some pre-defined Each`Preprocessing` conducts a data conversion step in the preprocessing phase, multiple `Preprocessing` can be combined into a `ChainedPreprocessing`. Some pre-defined
`Preprocessing` for popular data types like Image, Array or Vector are provided in package `com.intel.analytics.zoo.feature`, while user can also develop customized `Preprocessing`. `Preprocessing` for popular data types like Image, Array or Vector are provided in package `com.intel.analytics.bigdl.dllib.feature`, while user can also develop customized `Preprocessing`.
NNEstimator and NNClassifier also supports setting the caching level for the training data. Options are "DRAM", "PMEM" or "DISK_AND_DRAM". If DISK_AND_DRAM(numSlice) is used, only 1/numSlice data will be loaded into memory during training time. By default, DRAM mode is used and all data are cached in memory. NNEstimator and NNClassifier also supports setting the caching level for the training data. Options are "DRAM", "PMEM" or "DISK_AND_DRAM". If DISK_AND_DRAM(numSlice) is used, only 1/numSlice data will be loaded into memory during training time. By default, DRAM mode is used and all data are cached in memory.
By default, `SeqToTensor` is used to convert an array or Vector to a 1-dimension Tensor. Using the `Preprocessing` allows `NNEstimator` to cache only the raw data and decrease the memory consumption during feature conversion and training, it also enables the model to digest extra data types that DataFrame does not support currently. By default, `SeqToTensor` is used to convert an array or Vector to a 1-dimension Tensor. Using the `Preprocessing` allows `NNEstimator` to cache only the raw data and decrease the memory consumption during feature conversion and training, it also enables the model to digest extra data types that DataFrame does not support currently.
More concrete examples are available in package `com.intel.analytics.zoo.examples.nnframes` More concrete examples are available in package `com.intel.analytics.bigdl.dllib.examples.nnframes`
`NNEstimator` can be created with various parameters for different scenarios. `NNEstimator` can be created with various parameters for different scenarios.
@ -78,9 +78,9 @@ Meanwhile, for advanced use cases (e.g. model with multiple input tensor), `NNEs
**Scala Example:** **Scala Example:**
```scala ```scala
import com.intel.analytics.bigdl.nn._ import com.intel.analytics.bigdl.dllib.nn._
import com.intel.analytics.zoo.pipeline.nnframes.NNEstimator import com.intel.analytics.bigdl.dllib.nnframes.NNEstimator
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric.NumericFloat import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric.NumericFloat
val model = Sequential().add(Linear(2, 2)) val model = Sequential().add(Linear(2, 2))
val criterion = MSECriterion() val criterion = MSECriterion()
@ -99,11 +99,11 @@ nnModel.transform(df).show(false)
**Python Example:** **Python Example:**
```python ```python
from bigdl.nn.layer import * from bigdl.dllib.nn.layer import *
from bigdl.nn.criterion import * from bigdl.dllib.nn.criterion import *
from bigdl.util.common import * from bigdl.dllib.utils.common import *
from zoo.pipeline.nnframes.nn_classifier import * from bigdl.dllib.nnframes.nn_classifier import *
from zoo.feature.common import * from bigdl.dllib.feature.common import *
data = self.sc.parallelize([ data = self.sc.parallelize([
((2.0, 1.0), (1.0, 2.0)), ((2.0, 1.0), (1.0, 2.0)),
@ -252,9 +252,9 @@ Meanwhile, for advanced use cases (e.g. model with multiple input tensor), `NNCl
**Scala example:** **Scala example:**
```scala ```scala
import com.intel.analytics.bigdl.nn._ import com.intel.analytics.bigdl.dllib.nn._
import com.intel.analytics.zoo.pipeline.nnframes.NNClassifier import com.intel.analytics.bigdl.dllib.nnframes.NNClassifier
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric.NumericFloat import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric.NumericFloat
val model = Sequential().add(Linear(2, 2)) val model = Sequential().add(Linear(2, 2))
val criterion = MSECriterion() val criterion = MSECriterion()
@ -280,7 +280,7 @@ from bigdl.util.common import *
from bigdl.dlframes.dl_classifier import * from bigdl.dlframes.dl_classifier import *
from pyspark.sql.types import * from pyspark.sql.types import *
#Logistic Regression with BigDL layers and Analytics zoo NNClassifier #Logistic Regression with BigDL layers and NNClassifier
model = Sequential().add(Linear(2, 2)).add(LogSoftMax()) model = Sequential().add(Linear(2, 2)).add(LogSoftMax())
criterion = ZooClassNLLCriterion() criterion = ZooClassNLLCriterion()
estimator = NNClassifier(model, criterion, [2]).setBatchSize(4).setMaxEpoch(10) estimator = NNClassifier(model, criterion, [2]).setBatchSize(4).setMaxEpoch(10)
@ -358,7 +358,7 @@ classifier.setBatchSize(4).setMaxEpoch(10).setLearningRate(0.01).setOptimMethod(
NNEstimator/NNCLassifer supports training with Spark's [DataFrame/DataSet](https://spark.apache.org/docs/latest/sql-programming-guide.html#datasets-and-dataframes) NNEstimator/NNCLassifer supports training with Spark's [DataFrame/DataSet](https://spark.apache.org/docs/latest/sql-programming-guide.html#datasets-and-dataframes)
Suppose `df` is the training data, simple call `fit` method and let Analytics Zoo train the model for you. Suppose `df` is the training data, simple call `fit` method and let BigDL DLLib train the model for you.
**Scala:** **Scala:**
@ -393,8 +393,8 @@ nnModel.transform(df).show(false)
``` ```
For the complete examples of NNFrames, please refer to: For the complete examples of NNFrames, please refer to:
[Scala examples](https://github.com/intel-analytics/analytics-zoo/tree/master/zoo/src/main/scala/com/intel/analytics/zoo/examples/nnframes) [Scala examples](https://github.com/intel-analytics/BigDL/tree/branch-2.0/scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/example/nnframes)
[Python examples](https://github.com/intel-analytics/analytics-zoo/tree/master/pyzoo/zoo/examples/nnframes) [Python examples](https://github.com/intel-analytics/BigDL/tree/branch-2.0/python/dllib/examples/nnframes)
### 2.8 NNImageReader ### 2.8 NNImageReader
@ -411,7 +411,7 @@ Python:
image_frame = NNImageReader.readImages(image_path, self.sc) image_frame = NNImageReader.readImages(image_path, self.sc)
``` ```
The output DataFrame contains a sinlge column named "image". The schema of "image" column can be accessed from `com.intel.analytics.zoo.pipeline.nnframes.DLImageSchema.byteSchema`. Each record in "image" column represents one image record, in the format of Row(origin, height, width, num of channels, mode, data), where origin contains the URI for the image file, and `data` holds the original file bytes for the image file. `mode` represents the OpenCV-compatible type: CV_8UC3, CV_8UC1 in most cases. The output DataFrame contains a sinlge column named "image". The schema of "image" column can be accessed from `com.intel.analytics.bigdl.dllib.nnframes.DLImageSchema.byteSchema`. Each record in "image" column represents one image record, in the format of Row(origin, height, width, num of channels, mode, data), where origin contains the URI for the image file, and `data` holds the original file bytes for the image file. `mode` represents the OpenCV-compatible type: CV_8UC3, CV_8UC1 in most cases.
```scala ```scala
val byteSchema = StructType( val byteSchema = StructType(
@ -425,4 +425,4 @@ val byteSchema = StructType(
StructField("data", BinaryType, false) :: Nil) StructField("data", BinaryType, false) :: Nil)
``` ```
After loading the image, user can compose the preprocess steps with the `Preprocessing` defined in `com.intel.analytics.zoo.feature.image`. After loading the image, user can compose the preprocess steps with the `Preprocessing` defined in `com.intel.analytics.bigdl.dllib.feature.image`.