class ClusterServingInference extends AnyRef
- Alphabetic
- By Inheritance
- ClusterServingInference
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
ClusterServingInference(modelKey: String = null)
- modelKey
Whether multiple Cluster Serving jobs share a same process (JVM) If not sharing, modelKey should be null
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- def batchInput(seq: Seq[(String, Activity)], batchSize: Int, useMultiThreading: Boolean, resizeFlag: Boolean = true): Activity
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- val helper: ClusterServingHelper
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val logger: Logger
- def multiThreadInference(in: List[(String, Activity)]): List[(String, String)]
- def multiThreadPipeline(in: List[(String, String, String)]): List[(String, String)]
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def preProcess(in: List[(String, String, String)], multiThread: Boolean = false): List[(String, Activity)]
- val preProcessing: PreProcessing
-
def
singleThreadBatchInference(in: List[(String, Activity)]): List[(String, String)]
Deprecated Current used for OpenVINO model, use multiple thread to inference, and single thread to do other operations, normally only one model is used, and every thread in pipeline try to get this model if it goes to inference stage Do not need to set resize label, because only OpenVINO use it, and OpenVINO only support fixed size of input, thus mutable batch size is not supported
- def singleThreadInference(in: List[(String, Activity)]): List[(String, String)]
- def singleThreadPipeline(in: List[(String, String, String)]): List[(String, String)]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
def
typeCheck(input: Activity): Activity
Use for single thread inference, to construct a batchSize = 1 input Also return a Tensor if input Table has only one element
Use for single thread inference, to construct a batchSize = 1 input Also return a Tensor if input Table has only one element
- input
Input table or tensor
- returns
input with single element batch constructed
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
Deprecated Value Members
-
def
dimCheck(input: Activity, op: String, modelType: String): Activity
Add or remove the singleton dimension for some specific model types
Add or remove the singleton dimension for some specific model types
- input
the input to change dimension
- op
String, "add" or "remove"
- modelType
model type
- returns
input with dimension changed
- Annotations
- @deprecated
- Deprecated