object DeltaTableUtils extends PredicateHelper with DeltaLogging
- Alphabetic
- By Inheritance
- DeltaTableUtils
- DeltaLogging
- DatabricksLogging
- DeltaProgressReporter
- Logging
- PredicateHelper
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
canEvaluate(expr: Expression, plan: LogicalPlan): Boolean
- Attributes
- protected
- Definition Classes
- PredicateHelper
-
def
canEvaluateWithinJoin(expr: Expression): Boolean
- Attributes
- protected
- Definition Classes
- PredicateHelper
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
combineWithCatalogMetadata(sparkSession: SparkSession, table: CatalogTable): CatalogTable
Enrich the metadata received from the catalog on Delta tables with the Delta table metadata.
-
def
containsSubquery(condition: Expression): Boolean
Check if condition involves a subquery expression.
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
extractIfPathContainsTimeTravel(session: SparkSession, path: String): (String, Option[DeltaTimeTravelSpec])
Check if the given path contains time travel syntax with the
@.Check if the given path contains time travel syntax with the
@. If the path genuinely exists, returnNone. If the path doesn't exist, but is specifying time travel, return theDeltaTimeTravelSpecas well as the real path. -
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
findDeltaTableRoot(spark: SparkSession, path: Path, options: Map[String, String] = Map.empty): Option[Path]
Find the root of a Delta table from the provided path.
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
initializeLogIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
isCatalogTable(catalog: SessionCatalog, tableIdent: TableIdentifier): Boolean
Checks whether TableIdentifier is a path or a table name We assume it is a path unless the table and database both exist in the catalog
Checks whether TableIdentifier is a path or a table name We assume it is a path unless the table and database both exist in the catalog
- catalog
session catalog used to check whether db/table exist
- tableIdent
the provided table or path
- returns
true if using table name, false if using path, error otherwise
-
def
isDeltaTable(spark: SparkSession, path: Path): Boolean
Check if the provided path is the root or the children of a Delta table.
-
def
isDeltaTable(spark: SparkSession, tableName: TableIdentifier): Boolean
Check whether the provided table name is a Delta table based on information from the Catalog.
-
def
isDeltaTable(table: CatalogTable): Boolean
Check whether this table is a Delta table based on information from the Catalog.
-
def
isHiddenDirectory(partitionColumnNames: Seq[String], pathName: String): Boolean
Whether a path should be hidden for delta-related file operations, such as Vacuum and Fsck.
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isPredicateMetadataOnly(condition: Expression, partitionColumns: Seq[String], spark: SparkSession): Boolean
Check if condition can be evaluated using only metadata.
Check if condition can be evaluated using only metadata. In Delta, this means the condition only references partition columns and involves no subquery.
-
def
isPredicatePartitionColumnsOnly(condition: Expression, partitionColumns: Seq[String], spark: SparkSession): Boolean
Does the predicate only contains partition columns?
-
def
isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
isValidPath(tableIdent: TableIdentifier): Boolean
- tableIdent
the provided table or path
- returns
whether or not the provided TableIdentifier can specify a path for parquet or delta
-
def
log: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
logConsole(line: String): Unit
- Definition Classes
- DatabricksLogging
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
recordDeltaEvent(deltaLog: DeltaLog, opType: String, tags: Map[TagDefinition, String] = Map.empty, data: AnyRef = null): Unit
Used to record the occurrence of a single event or report detailed, operation specific statistics.
Used to record the occurrence of a single event or report detailed, operation specific statistics.
- Attributes
- protected
- Definition Classes
- DeltaLogging
-
def
recordDeltaOperation[A](deltaLog: DeltaLog, opType: String, tags: Map[TagDefinition, String] = Map.empty)(thunk: ⇒ A): A
Used to report the duration as well as the success or failure of an operation.
Used to report the duration as well as the success or failure of an operation.
- Attributes
- protected
- Definition Classes
- DeltaLogging
-
def
recordEvent(metric: MetricDefinition, additionalTags: Map[TagDefinition, String] = Map.empty, blob: String = null, trimBlob: Boolean = true): Unit
- Definition Classes
- DatabricksLogging
-
def
recordOperation[S](opType: OpType, opTarget: String = null, extraTags: Map[TagDefinition, String], isSynchronous: Boolean = true, alwaysRecordStats: Boolean = false, allowAuthTags: Boolean = false, killJvmIfStuck: Boolean = false, outputMetric: MetricDefinition = null, silent: Boolean = true)(thunk: ⇒ S): S
- Definition Classes
- DatabricksLogging
-
def
recordUsage(metric: MetricDefinition, quantity: Double, additionalTags: Map[TagDefinition, String] = Map.empty, blob: String = null, forceSample: Boolean = false, trimBlob: Boolean = true, silent: Boolean = false): Unit
- Definition Classes
- DatabricksLogging
-
def
replaceAlias(condition: Expression, aliases: AttributeMap[Expression]): Expression
- Attributes
- protected
- Definition Classes
- PredicateHelper
-
def
replaceFileIndex(target: LogicalPlan, fileIndex: FileIndex): LogicalPlan
Replace the file index in a logical plan and return the updated plan.
Replace the file index in a logical plan and return the updated plan. It's a common pattern that, in Delta commands, we use data skipping to determine a subset of files that can be affected by the command, so we replace the whole-table file index in the original logical plan with a new index of potentially affected files, while everything else in the original plan, e.g., resolved references, remain unchanged.
- target
the logical plan in which we replace the file index
- fileIndex
the new file index
-
def
resolveTimeTravelVersion(conf: SQLConf, deltaLog: DeltaLog, tt: DeltaTimeTravelSpec): (Long, String)
Given a time travel node, resolve which version it is corresponding to for the given table and return the resolved version as well as the access type, i.e.
Given a time travel node, resolve which version it is corresponding to for the given table and return the resolved version as well as the access type, i.e. by version or timestamp.
-
def
splitConjunctivePredicates(condition: Expression): Seq[Expression]
- Attributes
- protected
- Definition Classes
- PredicateHelper
-
def
splitDisjunctivePredicates(condition: Expression): Seq[Expression]
- Attributes
- protected
- Definition Classes
- PredicateHelper
-
def
splitMetadataAndDataPredicates(condition: Expression, partitionColumns: Seq[String], spark: SparkSession): (Seq[Expression], Seq[Expression])
Partition the given condition into two sequence of conjunctive predicates: - predicates that can be evaluated using metadata only.
Partition the given condition into two sequence of conjunctive predicates: - predicates that can be evaluated using metadata only. - other predicates.
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
withStatusCode[T](statusCode: String, defaultMessage: String, data: Map[String, Any] = Map.empty)(body: ⇒ T): T
Report a log to indicate some command is running.
Report a log to indicate some command is running.
- Definition Classes
- DeltaProgressReporter