trait Spark31Xuntil33XShims extends SparkShims
- Alphabetic
- By Inheritance
- Spark31Xuntil33XShims
- SparkShims
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Abstract Value Members
-
abstract
def
ansiCastRule: ExprRule[_ <: Expression]
Return the replacement rule for AnsiCast.
Return the replacement rule for AnsiCast. 'AnsiCast' is removed from Spark 3.4.0, so need to handle it separately.
- Definition Classes
- SparkShims
-
abstract
def
aqeShuffleReaderExec: ExecRule[_ <: SparkPlan]
- Definition Classes
- SparkShims
-
abstract
def
attachTreeIfSupported[TreeType <: TreeNode[_], A](tree: TreeType, msg: String = "")(f: ⇒ A): A
dropped by SPARK-34234
dropped by SPARK-34234
- Definition Classes
- SparkShims
-
abstract
def
avroRebaseReadKey: String
- Definition Classes
- SparkShims
-
abstract
def
avroRebaseWriteKey: String
- Definition Classes
- SparkShims
-
abstract
def
broadcastModeTransform(mode: BroadcastMode, toArray: Array[InternalRow]): Any
- Definition Classes
- SparkShims
-
abstract
def
columnarAdaptivePlan(a: AdaptiveSparkPlanExec, goal: CoalesceSizeGoal): SparkPlan
- Definition Classes
- SparkShims
-
abstract
def
filesFromFileIndex(fileCatalog: PartitioningAwareFileIndex): Seq[FileStatus]
- Definition Classes
- SparkShims
-
abstract
def
findOperators(plan: SparkPlan, predicate: (SparkPlan) ⇒ Boolean): Seq[SparkPlan]
Walk the plan recursively and return a list of operators that match the predicate
Walk the plan recursively and return a list of operators that match the predicate
- Definition Classes
- SparkShims
-
abstract
def
getAdaptiveInputPlan(adaptivePlan: AdaptiveSparkPlanExec): SparkPlan
- Definition Classes
- SparkShims
-
abstract
def
getDateFormatter(): DateFormatter
- Definition Classes
- SparkShims
-
abstract
def
getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]]
- Definition Classes
- SparkShims
-
abstract
def
getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]]
- Definition Classes
- SparkShims
-
abstract
def
getParquetFilters(schema: MessageType, pushDownDate: Boolean, pushDownTimestamp: Boolean, pushDownDecimal: Boolean, pushDownStartWith: Boolean, pushDownInFilterThreshold: Int, caseSensitive: Boolean, lookupFileMeta: (String) ⇒ String, dateTimeRebaseModeFromConf: String): ParquetFilters
- Definition Classes
- SparkShims
-
abstract
def
getScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]]
- Definition Classes
- SparkShims
-
abstract
def
hasAliasQuoteFix: Boolean
- Definition Classes
- SparkShims
-
abstract
def
hasCastFloatTimestampUpcast: Boolean
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseRead(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseReadKey: String
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseWrite(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseWriteKey: String
- Definition Classes
- SparkShims
-
abstract
def
isAqePlan(p: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isCustomReaderExec(x: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isEmptyRelation(relation: Any): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isExchangeOp(plan: SparkPlanMeta[_]): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isWindowFunctionExec(plan: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
abstract
def
leafNodeDefaultParallelism(ss: SparkSession): Int
- Definition Classes
- SparkShims
-
abstract
def
newBroadcastQueryStageExec(old: BroadcastQueryStageExec, newPlan: SparkPlan): BroadcastQueryStageExec
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseRead(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseReadKey: String
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseWrite(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseWriteKey: String
- Definition Classes
- SparkShims
-
abstract
def
reproduceEmptyStringBug: Boolean
Handle regexp_replace inconsistency from https://issues.apache.org/jira/browse/SPARK-39107
Handle regexp_replace inconsistency from https://issues.apache.org/jira/browse/SPARK-39107
- Definition Classes
- SparkShims
-
abstract
def
reusedExchangeExecPfn: PartialFunction[SparkPlan, ReusedExchangeExec]
- Definition Classes
- SparkShims
-
abstract
def
sessionFromPlan(plan: SparkPlan): SparkSession
- Definition Classes
- SparkShims
-
abstract
def
shouldFailDivOverflow: Boolean
- Definition Classes
- SparkShims
-
abstract
def
skipAssertIsOnTheGpu(plan: SparkPlan): Boolean
Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.
Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.
- Definition Classes
- SparkShims
-
abstract
def
supportsColumnarAdaptivePlans: Boolean
Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec.
Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec. This feature was introduced in Spark 3.2 as part of SPARK-35881.
- Definition Classes
- SparkShims
-
abstract
def
tryTransformIfEmptyRelation(mode: BroadcastMode): Option[Any]
This call can produce an
EmptyHashedRelationor an empty array, allowing the AQE ruleEliminateJoinToEmptyRelationin Spark 3.1.x to optimize certain joins.This call can produce an
EmptyHashedRelationor an empty array, allowing the AQE ruleEliminateJoinToEmptyRelationin Spark 3.1.x to optimize certain joins.In Spark 3.2.0, the optimization is still performed (under
AQEPropagateEmptyRelation), but the AQE optimizer is looking at the metrics for the query stage to determine if numRows == 0, and if so it can eliminate certain joins.The call is implemented only for Spark 3.1.x+. It is disabled in Databricks because it requires a task context to perform the
BroadcastMode.transformcall, but we'd like to call this from the driver.- Definition Classes
- SparkShims
-
abstract
def
v1RepairTableCommand(tableName: TableIdentifier): RunnableCommand
- Definition Classes
- SparkShims
Concrete Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
addExecBroadcastShuffle(p: SparkPlan): SparkPlan
If the shim doesn't support executor broadcast, just return the plan passed in
If the shim doesn't support executor broadcast, just return the plan passed in
- Definition Classes
- SparkShims
-
def
addRowShuffleToQueryStageTransitionIfNeeded(c2r: ColumnarToRowTransition, sqse: ShuffleQueryStageExec): SparkPlan
Adds a row-based shuffle to the transititonal shuffle query stage if needed.
Adds a row-based shuffle to the transititonal shuffle query stage if needed. This is needed when AQE plans a GPU shuffleexchange to be reused by a parent plan exec that consumes rows
- Definition Classes
- SparkShims
-
def
applyPostShimPlanRules(plan: SparkPlan): SparkPlan
- Definition Classes
- SparkShims
-
def
applyShimPlanRules(plan: SparkPlan, conf: RapidsConf): SparkPlan
- Definition Classes
- SparkShims
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
checkCToRWithExecBroadcastAQECoalPart(p: SparkPlan, parent: Option[SparkPlan]): Boolean
- Definition Classes
- SparkShims
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getDataWriteCmds: Map[Class[_ <: DataWritingCommand], DataWritingCommandRule[_ <: DataWritingCommand]]
- Definition Classes
- Spark31Xuntil33XShims → SparkShims
-
def
getFileScanRDD(sparkSession: SparkSession, readFunction: (PartitionedFile) ⇒ Iterator[InternalRow], filePartitions: Seq[FilePartition], readDataSchema: StructType, metadataColumns: Seq[AttributeReference], fileFormat: Option[FileFormat]): RDD[InternalRow]
- Definition Classes
- Spark31Xuntil33XShims → SparkShims
-
def
getRunnableCmds: Map[Class[_ <: RunnableCommand], RunnableCommandRule[_ <: RunnableCommand]]
- Definition Classes
- Spark31Xuntil33XShims → SparkShims
-
def
getShuffleFromCToRWithExecBroadcastAQECoalPart(p: SparkPlan): Option[SparkPlan]
- Definition Classes
- SparkShims
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
isCastingStringToNegDecimalScaleSupported: Boolean
- Definition Classes
- SparkShims
-
def
isExecutorBroadcastShuffle(shuffle: ShuffleExchangeLike): Boolean
- Definition Classes
- SparkShims
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
neverReplaceShowCurrentNamespaceCommand: ExecRule[_ <: SparkPlan]
- Definition Classes
- Spark31Xuntil33XShims → SparkShims
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
shuffleParentReadsShuffleData(shuffle: ShuffleExchangeLike, parent: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()