Packages

object TrampolineUtil

Linear Supertypes
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TrampolineUtil
  2. AnyRef
  3. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def addShutdownHook(priority: Int, runnable: Runnable): AnyRef

    Add shutdown hook with priority

  5. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  6. def asNullable(dt: DataType): DataType
  7. def bytesToString(size: Long): String

    Get a human-readable string, e.g.: "4.0 MiB", for a value in bytes.

  8. def classForName[C](className: String, initialize: Boolean = true, noSparkClassLoader: Boolean = false): Class[C]
  9. def cleanupAnyExistingSession(): Unit

    Shuts down and cleans up any existing Spark session

  10. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  11. def createCodec(conf: SparkConf): CompressionCodec
  12. def createCodec(conf: SparkConf, codecName: String): CompressionCodec
  13. def createSerializerManager(conf: SparkConf): SerializerManager
  14. def dataTypeExistsRecursively(dt: DataType, f: (DataType) ⇒ Boolean): Boolean

    Return true if the provided predicate function returns true for any type node within the datatype tree.

  15. def doExecuteBroadcast[T](child: SparkPlan): Broadcast[T]
  16. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  17. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  18. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  19. def fromAttributes(attrs: Seq[Attribute]): StructType
  20. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  21. def getCodecShortName(codecName: String): String
  22. def getCoresInLocalMode(master: String, conf: SparkConf): Int
  23. def getFSBytesReadOnThreadCallback(): () ⇒ Long

    Returns a function that can be called to find Hadoop FileSystem bytes read.

    Returns a function that can be called to find Hadoop FileSystem bytes read. If getFSBytesReadOnThreadCallback is called from thread r at time t, the returned callback will return the bytes read on r since t.

  24. def getSerializerManager(): SerializerManager
  25. def getSimpleName(cls: Class[_]): String

    Get the simple name of a class with fixup for any Scala internal errors

  26. def getSparkConf(spark: SparkSession): SQLConf
  27. def getTaskMemoryManager(): TaskMemoryManager
  28. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  29. def incBytesRead(inputMetrics: InputMetrics, bytesRead: Long): Unit

    Set the bytes read task input metric

  30. def incInputRecordsRows(inputMetrics: InputMetrics, rows: Long): Unit
  31. def incTaskMetricsDiskBytesSpilled(amountSpilled: Long): Unit

    Increment the task's disk bytes spilled metric.

    Increment the task's disk bytes spilled metric. If the current thread does not correspond to a Spark task then this call does nothing.

    amountSpilled

    amount of memory spilled in bytes

  32. def incTaskMetricsMemoryBytesSpilled(amountSpilled: Long): Unit

    Increment the task's memory bytes spilled metric.

    Increment the task's memory bytes spilled metric. If the current thread does not correspond to a Spark task then this call does nothing.

    amountSpilled

    amount of memory spilled in bytes

  33. def isCompressSpill(conf: SparkConf): Boolean
  34. def isDriver(sparkConf: SparkConf): Boolean
  35. def isDriver(env: SparkEnv): Boolean

    Returns true if called from code running on the Spark driver.

  36. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  37. def isSupportedRelation(mode: BroadcastMode): Boolean
  38. def jsonValue(dataType: DataType): JValue
  39. def makeSparkUpgradeException(version: String, message: String, cause: Throwable): SparkUpgradeException
  40. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  41. def newBlockManagerId(execId: String, host: String, port: Int, topologyInfo: Option[String] = None): BlockManagerId

    Create a BlockManagerId instance

  42. def newInputMetrics(): InputMetrics

    Return a new InputMetrics instance

  43. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  44. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  45. def sameType(a: DataType, b: DataType): Boolean

    Check if a and b are the same data type when ignoring nullability (StructField.nullable, ArrayType.containsNull, and MapType.valueContainsNull).

  46. def setExecutorEnv(sc: SparkContext, key: String, value: String): Unit
  47. def setTaskContext(tc: TaskContext): Unit

    Set the task context for the current thread

  48. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  49. def throwAnalysisException(msg: String): Nothing

    Throw a Spark analysis exception

  50. def toAttributes(structType: StructType): Seq[Attribute]
  51. def toString(): String
    Definition Classes
    AnyRef → Any
  52. def unionLikeMerge(left: DataType, right: DataType): DataType
  53. def unsetTaskContext(): Unit

    Remove the task context for the current thread

  54. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  55. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  56. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from AnyRef

Inherited from Any

Ungrouped