case class ExecuteEventsManager(executeHolder: ExecuteHolder, clock: Clock) extends Product with Serializable
Post request Connect events to @link org.apache.spark.scheduler.LiveListenerBus.
- Alphabetic
- By Inheritance
- ExecuteEventsManager
- Serializable
- Product
- Equals
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Instance Constructors
- new ExecuteEventsManager(executeHolder: ExecuteHolder, clock: Clock)
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- val clock: Clock
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- def createQueryPlanningTracker(): QueryPlanningTracker
- returns
\@link A org.apache.spark.sql.catalyst.QueryPlanningTracker that calls postAnalyzed & postReadyForExecution after analysis & prior execution.
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- val executeHolder: ExecuteHolder
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def postAnalyzed(analyzedPlan: Option[LogicalPlan] = None): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationAnalyzed.
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationAnalyzed.
- analyzedPlan
The analyzed plan generated by the Connect request plan. None when the request does not generate a plan.
- def postCanceled(): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationCanceled.
- def postClosed(): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationClosed.
- def postFailed(errorMessage: String): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationFailed.
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationFailed.
- errorMessage
The message of the error thrown during the request.
- def postFinished(producedRowsCountOpt: Option[Long] = None): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationFinished.
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationFinished.
- producedRowsCountOpt
Number of rows that are returned to the user. None is expected when the operation does not return any rows.
- def postReadyForExecution(): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationReadyForExecution.
- def postStarted(): Unit
Post @link org.apache.spark.sql.connect.service.SparkListenerConnectOperationStarted.
- def productElementNames: Iterator[String]
- Definition Classes
- Product
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()