Packages

o

org.apache.spark.sql.delta

DeltaTableUtils

object DeltaTableUtils extends PredicateHelper with DeltaLogging

Linear Supertypes
DeltaLogging, DatabricksLogging, DeltaProgressReporter, Logging, PredicateHelper, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. DeltaTableUtils
  2. DeltaLogging
  3. DatabricksLogging
  4. DeltaProgressReporter
  5. Logging
  6. PredicateHelper
  7. AnyRef
  8. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def canEvaluate(expr: Expression, plan: LogicalPlan): Boolean
    Attributes
    protected
    Definition Classes
    PredicateHelper
  6. def canEvaluateWithinJoin(expr: Expression): Boolean
    Attributes
    protected
    Definition Classes
    PredicateHelper
  7. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  8. def combineWithCatalogMetadata(sparkSession: SparkSession, table: CatalogTable): CatalogTable

    Enrich the metadata received from the catalog on Delta tables with the Delta table metadata.

  9. def containsSubquery(condition: Expression): Boolean

    Check if condition involves a subquery expression.

  10. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  11. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  12. def extractIfPathContainsTimeTravel(session: SparkSession, path: String): (String, Option[DeltaTimeTravelSpec])

    Check if the given path contains time travel syntax with the @.

    Check if the given path contains time travel syntax with the @. If the path genuinely exists, return None. If the path doesn't exist, but is specifying time travel, return the DeltaTimeTravelSpec as well as the real path.

  13. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  14. def findDeltaTableRoot(spark: SparkSession, path: Path, options: Map[String, String] = Map.empty): Option[Path]

    Find the root of a Delta table from the provided path.

  15. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  16. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  17. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  18. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  19. def isCatalogTable(catalog: SessionCatalog, tableIdent: TableIdentifier): Boolean

    Checks whether TableIdentifier is a path or a table name We assume it is a path unless the table and database both exist in the catalog

    Checks whether TableIdentifier is a path or a table name We assume it is a path unless the table and database both exist in the catalog

    catalog

    session catalog used to check whether db/table exist

    tableIdent

    the provided table or path

    returns

    true if using table name, false if using path, error otherwise

  20. def isDeltaTable(spark: SparkSession, path: Path): Boolean

    Check if the provided path is the root or the children of a Delta table.

  21. def isDeltaTable(spark: SparkSession, tableName: TableIdentifier): Boolean

    Check whether the provided table name is a Delta table based on information from the Catalog.

  22. def isDeltaTable(table: CatalogTable): Boolean

    Check whether this table is a Delta table based on information from the Catalog.

  23. def isHiddenDirectory(partitionColumnNames: Seq[String], pathName: String): Boolean

    Whether a path should be hidden for delta-related file operations, such as Vacuum and Fsck.

  24. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  25. def isPredicateMetadataOnly(condition: Expression, partitionColumns: Seq[String], spark: SparkSession): Boolean

    Check if condition can be evaluated using only metadata.

    Check if condition can be evaluated using only metadata. In Delta, this means the condition only references partition columns and involves no subquery.

  26. def isPredicatePartitionColumnsOnly(condition: Expression, partitionColumns: Seq[String], spark: SparkSession): Boolean

    Does the predicate only contains partition columns?

  27. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  28. def isValidPath(tableIdent: TableIdentifier): Boolean

    tableIdent

    the provided table or path

    returns

    whether or not the provided TableIdentifier can specify a path for parquet or delta

  29. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  30. def logConsole(line: String): Unit
    Definition Classes
    DatabricksLogging
  31. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  32. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  33. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  34. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  35. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  36. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  37. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  38. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  39. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  40. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  41. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  42. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  43. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  44. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  45. def recordDeltaEvent(deltaLog: DeltaLog, opType: String, tags: Map[TagDefinition, String] = Map.empty, data: AnyRef = null): Unit

    Used to record the occurrence of a single event or report detailed, operation specific statistics.

    Used to record the occurrence of a single event or report detailed, operation specific statistics.

    Attributes
    protected
    Definition Classes
    DeltaLogging
  46. def recordDeltaOperation[A](deltaLog: DeltaLog, opType: String, tags: Map[TagDefinition, String] = Map.empty)(thunk: ⇒ A): A

    Used to report the duration as well as the success or failure of an operation.

    Used to report the duration as well as the success or failure of an operation.

    Attributes
    protected
    Definition Classes
    DeltaLogging
  47. def recordEvent(metric: MetricDefinition, additionalTags: Map[TagDefinition, String] = Map.empty, blob: String = null, trimBlob: Boolean = true): Unit
    Definition Classes
    DatabricksLogging
  48. def recordOperation[S](opType: OpType, opTarget: String = null, extraTags: Map[TagDefinition, String], isSynchronous: Boolean = true, alwaysRecordStats: Boolean = false, allowAuthTags: Boolean = false, killJvmIfStuck: Boolean = false, outputMetric: MetricDefinition = null, silent: Boolean = true)(thunk: ⇒ S): S
    Definition Classes
    DatabricksLogging
  49. def recordUsage(metric: MetricDefinition, quantity: Double, additionalTags: Map[TagDefinition, String] = Map.empty, blob: String = null, forceSample: Boolean = false, trimBlob: Boolean = true, silent: Boolean = false): Unit
    Definition Classes
    DatabricksLogging
  50. def replaceAlias(condition: Expression, aliases: AttributeMap[Expression]): Expression
    Attributes
    protected
    Definition Classes
    PredicateHelper
  51. def replaceFileIndex(target: LogicalPlan, fileIndex: FileIndex): LogicalPlan

    Replace the file index in a logical plan and return the updated plan.

    Replace the file index in a logical plan and return the updated plan. It's a common pattern that, in Delta commands, we use data skipping to determine a subset of files that can be affected by the command, so we replace the whole-table file index in the original logical plan with a new index of potentially affected files, while everything else in the original plan, e.g., resolved references, remain unchanged.

    target

    the logical plan in which we replace the file index

    fileIndex

    the new file index

  52. def resolveTimeTravelVersion(conf: SQLConf, deltaLog: DeltaLog, tt: DeltaTimeTravelSpec): (Long, String)

    Given a time travel node, resolve which version it is corresponding to for the given table and return the resolved version as well as the access type, i.e.

    Given a time travel node, resolve which version it is corresponding to for the given table and return the resolved version as well as the access type, i.e. by version or timestamp.

  53. def splitConjunctivePredicates(condition: Expression): Seq[Expression]
    Attributes
    protected
    Definition Classes
    PredicateHelper
  54. def splitDisjunctivePredicates(condition: Expression): Seq[Expression]
    Attributes
    protected
    Definition Classes
    PredicateHelper
  55. def splitMetadataAndDataPredicates(condition: Expression, partitionColumns: Seq[String], spark: SparkSession): (Seq[Expression], Seq[Expression])

    Partition the given condition into two sequence of conjunctive predicates: - predicates that can be evaluated using metadata only.

    Partition the given condition into two sequence of conjunctive predicates: - predicates that can be evaluated using metadata only. - other predicates.

  56. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  57. def toString(): String
    Definition Classes
    AnyRef → Any
  58. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  59. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  60. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  61. def withStatusCode[T](statusCode: String, defaultMessage: String, data: Map[String, Any] = Map.empty)(body: ⇒ T): T

    Report a log to indicate some command is running.

    Report a log to indicate some command is running.

    Definition Classes
    DeltaProgressReporter

Inherited from DeltaLogging

Inherited from DatabricksLogging

Inherited from DeltaProgressReporter

Inherited from Logging

Inherited from PredicateHelper

Inherited from AnyRef

Inherited from Any

Ungrouped