object Constraints
Utilities for handling constraints. Right now this includes: - Column-level invariants delegated to Invariants, including both NOT NULL constraints and an old style of CHECK constraint specified in the column metadata - Table-level CHECK constraints
- Alphabetic
- By Inheritance
- Constraints
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Type Members
- case class Check(name: String, expression: Expression) extends Constraint with Product with Serializable
A SQL expression to check for when writing out data.
- case class NotNull(column: Seq[String]) extends Constraint with Product with Serializable
A constraint that the specified column must not be NULL.
A constraint that the specified column must not be NULL. Note that when the column is nested, this implies its parents must also not be NULL.
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def checkConstraintPropertyName(constraintName: String): String
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- def findDependentConstraints(sparkSession: SparkSession, columnName: Seq[String], metadata: Metadata): Map[String, String]
Find all the check constraints that reference the given column name.
Find all the check constraints that reference the given column name. Returns a map of constraint names to their corresponding expression.
- def getAll(metadata: Metadata, spark: SparkSession): Seq[Constraint]
Extract all constraints from the given Delta table metadata.
- def getCheckConstraintNames(metadata: Metadata): Seq[String]
- def getCheckConstraints(metadata: Metadata, spark: SparkSession): Seq[Constraint]
Extract CHECK constraints from the table properties.
Extract CHECK constraints from the table properties. Note that some CHECK constraints may also come from schema metadata; these constraints were never released in a public API but are maintained for protocol compatibility.
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getExprTextByName(name: String, metadata: Metadata, spark: SparkSession): Option[String]
Get the expression text for a constraint with the given name, if present.
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()