object DeltaFileProviderUtils
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- DeltaFileProviderUtils
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- Protected
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getDeltaFilesInVersionRange(spark: SparkSession, deltaLog: DeltaLog, startVersion: Long, endVersion: Long): Seq[FileStatus]
Get the Delta json files present in the delta log in the range [startVersion, endVersion].
Get the Delta json files present in the delta log in the range [startVersion, endVersion]. Returns the files in sorted order, and throws if any in the range are missing.
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val jsonStatsParseOption: Map[String, String]
Put any future parsing options here.
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def parallelReadAndParseDeltaFilesAsIterator(deltaLog: DeltaLog, spark: SparkSession, files: Seq[FileStatus]): Seq[ClosableIterator[String]]
Helper method to read and parse the delta files parallelly into Actions.
- def parallelReadDeltaFilesBase[A](spark: SparkSession, files: Seq[FileStatus], hadoopConf: Configuration, f: (FileStatus) => A): Seq[A]
- Attributes
- protected
- def readThreadPool: DeltaThreadPool
- Attributes
- protected
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()