class DLModel[T] extends com.intel.analytics.bigdl.dlframes.DLModel[T]
Deprecated. Please refer to package com.intel.analytics.bigdl.dlframes.
DLModel helps embed a BigDL model into a Spark Transformer, thus Spark users can conveniently merge BigDL into Spark ML pipeline. DLModel supports feature data in the format of Array[Double], Array[Float], org.apache.spark.mllib.linalg.{Vector, VectorUDT}, org.apache.spark.ml.linalg.{Vector, VectorUDT}, Double and Float. Internally DLModel use features column as storage of the feature data, and create Tensors according to the constructor parameter featureSize.
DLModel is compatible with both spark 1.5-plus and 2.0 by extending ML Transformer.
- Annotations
- @deprecated
- Deprecated
(Since version 0.5.0)
- Alphabetic
- By Inheritance
- DLModel
- DLModel
- DLParams
- HasBatchSize
- VectorCompatibility
- HasPredictionCol
- HasPredictionCol
- HasFeaturesCol
- HasFeaturesCol
- DLTransformerBase
- Model
- Transformer
- PipelineStage
- Logging
- Params
- Serializable
- Serializable
- Identifiable
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
DLModel(model: Module[T], featureSize: Array[Int], uid: String = "DLModel")(implicit arg0: ClassTag[T], ev: TensorNumeric[T])
- model
trainned BigDL models to use in prediction.
- featureSize
The size (Tensor dimensions) of the feature data. (e.g. an image may be with featureSize = 28 * 28).
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
$[T](param: Param[T]): T
- Attributes
- protected
- Definition Classes
- Params
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
final
val
batchSize: Param[Int]
- Definition Classes
- HasBatchSize
-
final
def
clear(param: Param[_]): DLModel.this.type
- Definition Classes
- Params
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
def
copy(extra: ParamMap): com.intel.analytics.bigdl.dlframes.DLModel[T]
- Definition Classes
- DLModel → DLTransformerBase → Model → Transformer → PipelineStage → Params
-
def
copyValues[T <: Params](to: T, extra: ParamMap): T
- Attributes
- protected
- Definition Classes
- Params
-
final
def
defaultCopy[T <: Params](extra: ParamMap): T
- Attributes
- protected
- Definition Classes
- Params
-
final
val
endWhen: Param[Trigger]
When to stop the training, passed in a Trigger.
When to stop the training, passed in a Trigger. E.g. Trigger.maxIterations
- Definition Classes
- DLParams
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
explainParam(param: Param[_]): String
- Definition Classes
- Params
-
def
explainParams(): String
- Definition Classes
- Params
-
final
def
extractParamMap(): ParamMap
- Definition Classes
- Params
-
final
def
extractParamMap(extra: ParamMap): ParamMap
- Definition Classes
- Params
-
var
featureSize: Array[Int]
- Definition Classes
- DLModel
-
final
val
featuresCol: Param[String]
- Definition Classes
- HasFeaturesCol
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
get[T](param: Param[T]): Option[T]
- Definition Classes
- Params
-
def
getBatchSize: Int
- Definition Classes
- HasBatchSize
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getConvertFunc(colType: DataType): (Row, Int) ⇒ Seq[AnyVal]
Get conversion function to extract data from original DataFrame Default: 0
Get conversion function to extract data from original DataFrame Default: 0
- Attributes
- protected
- Definition Classes
- DLParams
-
final
def
getDefault[T](param: Param[T]): Option[T]
- Definition Classes
- Params
-
def
getEndWhen: Trigger
- Definition Classes
- DLParams
-
def
getFeatureSize: Array[Int]
- Definition Classes
- DLModel
-
final
def
getFeaturesCol: String
- Definition Classes
- HasFeaturesCol
-
def
getLearningRate: Double
- Definition Classes
- DLParams
-
def
getLearningRateDecay: Double
- Definition Classes
- DLParams
-
def
getMaxEpoch: Int
- Definition Classes
- DLParams
-
def
getOptimMethod: OptimMethod[T]
- Definition Classes
- DLParams
-
final
def
getOrDefault[T](param: Param[T]): T
- Definition Classes
- Params
-
def
getParam(paramName: String): Param[Any]
- Definition Classes
- Params
-
final
def
getPredictionCol: String
- Definition Classes
- HasPredictionCol
-
def
getVectorSeq(row: Row, colType: DataType, index: Int): Seq[AnyVal]
- Definition Classes
- VectorCompatibility
-
final
def
hasDefault[T](param: Param[T]): Boolean
- Definition Classes
- Params
-
def
hasParam(paramName: String): Boolean
- Definition Classes
- Params
-
def
hasParent: Boolean
- Definition Classes
- Model
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
initializeLogIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
internalTransform(dataFrame: DataFrame): DataFrame
Perform a prediction on featureCol, and write result to the predictionCol.
Perform a prediction on featureCol, and write result to the predictionCol.
- Attributes
- protected
- Definition Classes
- DLModel → DLTransformerBase
-
final
def
isDefined(param: Param[_]): Boolean
- Definition Classes
- Params
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
isSet(param: Param[_]): Boolean
- Definition Classes
- Params
-
def
isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
final
val
learningRate: DoubleParam
learning rate for the optimizer in the DLEstimator.
learning rate for the optimizer in the DLEstimator. Default: 0.001
- Definition Classes
- DLParams
-
final
val
learningRateDecay: DoubleParam
learning rate decay for each iteration.
learning rate decay for each iteration. Default: 0
- Definition Classes
- DLParams
-
def
log: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
final
val
maxEpoch: IntParam
Number of max Epoch for the training, an epoch refers to a traverse over the training data Default: 50
Number of max Epoch for the training, an epoch refers to a traverse over the training data Default: 50
- Definition Classes
- DLParams
- val model: Module[T]
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
val
optimMethod: Param[OptimMethod[T]]
optimization method to be used.
optimization method to be used. BigDL supports many optimization methods like Adam, SGD and LBFGS. Refer to package com.intel.analytics.bigdl.optim for all the options. Default: SGD
- Definition Classes
- DLParams
-
def
outputToPrediction(output: Tensor[T]): Any
- Attributes
- protected
- Definition Classes
- DLModel
-
lazy val
params: Array[Param[_]]
- Definition Classes
- Params
-
var
parent: Estimator[com.intel.analytics.bigdl.dlframes.DLModel[T]]
- Definition Classes
- Model
-
final
val
predictionCol: Param[String]
- Definition Classes
- HasPredictionCol
-
final
def
set(paramPair: ParamPair[_]): DLModel.this.type
- Attributes
- protected
- Definition Classes
- Params
-
final
def
set(param: String, value: Any): DLModel.this.type
- Attributes
- protected
- Definition Classes
- Params
-
final
def
set[T](param: Param[T], value: T): DLModel.this.type
- Definition Classes
- Params
-
def
setBatchSize(value: Int): DLModel.this.type
- Definition Classes
- DLModel
-
final
def
setDefault(paramPairs: ParamPair[_]*): DLModel.this.type
- Attributes
- protected
- Definition Classes
- Params
-
final
def
setDefault[T](param: Param[T], value: T): DLModel.this.type
- Attributes
- protected
- Definition Classes
- Params
-
def
setFeatureSize(value: Array[Int]): DLModel.this.type
- Definition Classes
- DLModel
-
def
setFeaturesCol(featuresColName: String): DLModel.this.type
- Definition Classes
- DLModel
-
def
setParent(parent: Estimator[com.intel.analytics.bigdl.dlframes.DLModel[T]]): com.intel.analytics.bigdl.dlframes.DLModel[T]
- Definition Classes
- Model
-
def
setPredictionCol(value: String): DLModel.this.type
- Definition Classes
- DLModel
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- Identifiable → AnyRef → Any
-
def
transform(dataset: Dataset[_]): DataFrame
- Definition Classes
- DLTransformerBase → Transformer
-
def
transform(dataset: Dataset[_], paramMap: ParamMap): DataFrame
- Definition Classes
- Transformer
- Annotations
- @Since( "2.0.0" )
-
def
transform(dataset: Dataset[_], firstParamPair: ParamPair[_], otherParamPairs: ParamPair[_]*): DataFrame
- Definition Classes
- Transformer
- Annotations
- @Since( "2.0.0" ) @varargs()
-
def
transformSchema(schema: StructType): StructType
- Definition Classes
- DLModel → PipelineStage
-
def
transformSchema(schema: StructType, logging: Boolean): StructType
- Attributes
- protected
- Definition Classes
- PipelineStage
- Annotations
- @DeveloperApi()
- val uid: String
-
val
validVectorTypes: Seq[UserDefinedType[_ >: Vector with Vector <: Serializable] { def sqlType: org.apache.spark.sql.types.StructType }]
- Definition Classes
- VectorCompatibility
-
def
validateDataType(schema: StructType, colName: String): Unit
Validate if feature and label columns are of supported data types.
Validate if feature and label columns are of supported data types. Default: 0
- Attributes
- protected
- Definition Classes
- DLParams
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )