Packages

c

org.apache.spark.sql.execution.streaming.sources

RatePerMicroBatchStream

class RatePerMicroBatchStream extends SupportsTriggerAvailableNow with MicroBatchStream with Logging

Linear Supertypes
Logging, MicroBatchStream, SupportsTriggerAvailableNow, SupportsAdmissionControl, SparkDataStream, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. RatePerMicroBatchStream
  2. Logging
  3. MicroBatchStream
  4. SupportsTriggerAvailableNow
  5. SupportsAdmissionControl
  6. SparkDataStream
  7. AnyRef
  8. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new RatePerMicroBatchStream(rowsPerBatch: Long, numPartitions: Int, startTimestamp: Long, advanceMsPerBatch: Int, options: CaseInsensitiveStringMap)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  6. def commit(end: connector.read.streaming.Offset): Unit
    Definition Classes
    RatePerMicroBatchStream → SparkDataStream
  7. def createReaderFactory(): PartitionReaderFactory
    Definition Classes
    RatePerMicroBatchStream → MicroBatchStream
  8. def deserializeOffset(json: String): connector.read.streaming.Offset
    Definition Classes
    RatePerMicroBatchStream → SparkDataStream
  9. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  10. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  11. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  12. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  13. def getDefaultReadLimit(): ReadLimit
    Definition Classes
    RatePerMicroBatchStream → SupportsAdmissionControl
  14. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  15. def initialOffset(): connector.read.streaming.Offset
    Definition Classes
    RatePerMicroBatchStream → SparkDataStream
  16. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  17. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  18. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  19. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  20. def latestOffset(startOffset: connector.read.streaming.Offset, limit: ReadLimit): connector.read.streaming.Offset
    Definition Classes
    RatePerMicroBatchStream → SupportsAdmissionControl
  21. def latestOffset(): connector.read.streaming.Offset
    Definition Classes
    RatePerMicroBatchStream → MicroBatchStream
  22. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  23. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  24. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  25. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  26. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  27. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  28. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  29. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  30. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  31. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  32. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  33. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  34. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  35. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  36. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  37. def planInputPartitions(start: connector.read.streaming.Offset, end: connector.read.streaming.Offset): Array[InputPartition]
    Definition Classes
    RatePerMicroBatchStream → MicroBatchStream
  38. def prepareForTriggerAvailableNow(): Unit
    Definition Classes
    RatePerMicroBatchStream → SupportsTriggerAvailableNow
  39. def reportLatestOffset(): connector.read.streaming.Offset
    Definition Classes
    SupportsAdmissionControl
  40. def stop(): Unit
    Definition Classes
    RatePerMicroBatchStream → SparkDataStream
  41. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  42. def toString(): String
    Definition Classes
    RatePerMicroBatchStream → AnyRef → Any
  43. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  44. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  45. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from Logging

Inherited from MicroBatchStream

Inherited from SupportsTriggerAvailableNow

Inherited from SupportsAdmissionControl

Inherited from SparkDataStream

Inherited from AnyRef

Inherited from Any

Ungrouped