case class HashAggregateExecTransformer(requiredChildDistributionExpressions: Option[Seq[Expression]], groupingExpressions: Seq[NamedExpression], aggregateExpressions: Seq[AggregateExpression], aggregateAttributes: Seq[Attribute], initialInputBufferOffset: Int, resultExpressions: Seq[NamedExpression], child: SparkPlan) extends HashAggregateExecBaseTransformer with Product with Serializable
- Alphabetic
- By Inheritance
- HashAggregateExecTransformer
- HashAggregateExecBaseTransformer
- UnaryTransformSupport
- TransformSupport
- GlutenPlan
- LogLevelUtil
- BaseAggregateExec
- AliasAwareOutputPartitioning
- AliasAwareOutputExpression
- UnaryExecNode
- UnaryLike
- SparkPlan
- Serializable
- Serializable
- Logging
- QueryPlan
- SQLConfHelper
- TreeNode
- TreePatternBits
- Product
- Equals
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
- new HashAggregateExecTransformer(requiredChildDistributionExpressions: Option[Seq[Expression]], groupingExpressions: Seq[NamedExpression], aggregateExpressions: Seq[AggregateExpression], aggregateAttributes: Seq[Attribute], initialInputBufferOffset: Int, resultExpressions: Seq[NamedExpression], child: SparkPlan)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
addFunctionNode(args: AnyRef, aggregateFunction: AggregateFunction, childrenNodeList: List[ExpressionNode], aggregateMode: AggregateMode, aggregateNodeList: List[AggregateFunctionNode]): Unit
- Attributes
- protected
- Definition Classes
- HashAggregateExecTransformer → HashAggregateExecBaseTransformer
-
val
aggregateAttributes: Seq[Attribute]
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
val
aggregateBufferAttributes: Seq[AttributeReference]
- Attributes
- protected
- Definition Classes
- BaseAggregateExec
-
val
aggregateExpressions: Seq[AggregateExpression]
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
lazy val
allAggregateResultAttributes: List[Attribute]
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
lazy val
allAttributes: AttributeSeq
- Definition Classes
- HashAggregateExecBaseTransformer → QueryPlan
-
def
apply(number: Int): TreeNode[_]
- Definition Classes
- TreeNode
-
def
applyExtractStruct(context: SubstraitContext, aggRel: RelNode, operatorId: Long, validation: Boolean): RelNode
Add a projection after aggregation to extract subfields from Struct.
Add a projection after aggregation to extract subfields from Struct.
- context
the Substrait context
- aggRel
the aggregation rel
- operatorId
the operator id
- returns
a project rel
-
def
applyPostProjection(context: SubstraitContext, aggRel: RelNode, operatorId: Long, validation: Boolean): RelNode
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
argString(maxFields: Int): String
- Definition Classes
- TreeNode
-
def
asCode: String
- Definition Classes
- TreeNode
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
final
lazy val
canonicalized: SparkPlan
- Definition Classes
- QueryPlan
- Annotations
- @transient()
-
def
checkAggFuncModeSupport(aggFunc: AggregateFunction, mode: AggregateMode): Boolean
- Attributes
- protected
- Definition Classes
- HashAggregateExecTransformer → HashAggregateExecBaseTransformer
-
def
checkType(dataType: DataType): Boolean
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
val
child: SparkPlan
- Definition Classes
- HashAggregateExecTransformer → UnaryLike
-
final
lazy val
children: Seq[SparkPlan]
- Definition Classes
- UnaryLike
- Annotations
- @transient()
-
def
cleanupResources(): Unit
- Attributes
- protected[org.apache.spark.sql]
- Definition Classes
- SparkPlan
-
def
clone(): SparkPlan
- Definition Classes
- TreeNode → AnyRef
-
def
collect[B](pf: PartialFunction[SparkPlan, B]): Seq[B]
- Definition Classes
- TreeNode
-
def
collectFirst[B](pf: PartialFunction[SparkPlan, B]): Option[B]
- Definition Classes
- TreeNode
-
def
collectLeaves(): Seq[SparkPlan]
- Definition Classes
- TreeNode
-
def
collectWithSubqueries[B](f: PartialFunction[SparkPlan, B]): Seq[B]
- Definition Classes
- QueryPlan
-
final
def
columnarInputRDDs: Seq[RDD[ColumnarBatch]]
- Definition Classes
- UnaryTransformSupport → TransformSupport
-
def
conf: SQLConf
- Definition Classes
- SparkPlan → SQLConfHelper
-
final
def
containsAllPatterns(patterns: TreePattern*): Boolean
- Definition Classes
- TreePatternBits
-
final
def
containsAnyPattern(patterns: TreePattern*): Boolean
- Definition Classes
- TreePatternBits
-
lazy val
containsChild: Set[TreeNode[_]]
- Definition Classes
- TreeNode
-
final
def
containsPattern(t: TreePattern): Boolean
- Definition Classes
- TreePatternBits
- Annotations
- @inline()
-
def
copyTagsFrom(other: SparkPlan): Unit
- Definition Classes
- TreeNode
-
def
doCanonicalize(): SparkPlan
- Attributes
- protected
- Definition Classes
- QueryPlan
-
final
def
doExecute(): RDD[InternalRow]
- Definition Classes
- TransformSupport → SparkPlan
-
def
doExecuteBroadcast[T](): Broadcast[T]
- Attributes
- protected[org.apache.spark.sql]
- Definition Classes
- SparkPlan
-
def
doExecuteColumnar(): RDD[ColumnarBatch]
- Definition Classes
- HashAggregateExecBaseTransformer → SparkPlan
-
def
doNativeValidation(context: SubstraitContext, node: RelNode): ValidationResult
- Attributes
- protected
- Definition Classes
- GlutenPlan
-
def
doPrepare(): Unit
- Attributes
- protected
- Definition Classes
- SparkPlan
-
def
doTransform(context: SubstraitContext): TransformContext
- Definition Classes
- HashAggregateExecBaseTransformer → TransformSupport
-
final
def
doValidate(): ValidationResult
- Definition Classes
- GlutenPlan
-
def
doValidateInternal(): ValidationResult
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer → GlutenPlan
-
lazy val
enableNativeValidation: Boolean
- Attributes
- protected
- Definition Classes
- GlutenPlan
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
execute(): RDD[InternalRow]
- Definition Classes
- SparkPlan
-
final
def
executeBroadcast[T](): Broadcast[T]
- Definition Classes
- SparkPlan
-
def
executeCollect(): Array[InternalRow]
- Definition Classes
- SparkPlan
-
def
executeCollectPublic(): Array[Row]
- Definition Classes
- SparkPlan
-
final
def
executeColumnar(): RDD[ColumnarBatch]
- Definition Classes
- SparkPlan
-
final
def
executeQuery[T](query: ⇒ T): T
- Attributes
- protected
- Definition Classes
- SparkPlan
-
def
executeTail(n: Int): Array[InternalRow]
- Definition Classes
- SparkPlan
-
def
executeTake(n: Int): Array[InternalRow]
- Definition Classes
- SparkPlan
-
def
executeToIterator(): Iterator[InternalRow]
- Definition Classes
- SparkPlan
-
final
def
expressions: Seq[Expression]
- Definition Classes
- QueryPlan
-
def
fastEquals(other: TreeNode[_]): Boolean
- Definition Classes
- TreeNode
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
find(f: (SparkPlan) ⇒ Boolean): Option[SparkPlan]
- Definition Classes
- TreeNode
-
def
flatMap[A](f: (SparkPlan) ⇒ TraversableOnce[A]): Seq[A]
- Definition Classes
- TreeNode
-
def
foreach(f: (SparkPlan) ⇒ Unit): Unit
- Definition Classes
- TreeNode
-
def
foreachUp(f: (SparkPlan) ⇒ Unit): Unit
- Definition Classes
- TreeNode
-
def
formattedNodeName: String
- Attributes
- protected
- Definition Classes
- QueryPlan
-
def
generateTreeString(depth: Int, lastChildren: Seq[Boolean], append: (String) ⇒ Unit, verbose: Boolean, prefix: String, addSuffix: Boolean, maxFields: Int, printNodeId: Boolean, indent: Int): Unit
- Definition Classes
- TreeNode
-
def
getAdvancedExtension(validation: Boolean, originalInputAttributes: Seq[Attribute]): AdvancedExtensionNode
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
getAggRel(context: SubstraitContext, operatorId: Long, aggParams: AggregationParams, input: RelNode = null, validation: Boolean = false): RelNode
Create and return the Rel for the this aggregation.
Create and return the Rel for the this aggregation.
- context
the Substrait context
- operatorId
the operator id
- aggParams
the params for aggregation mainly used for metrics updating
- input
tht input rel node
- validation
whether this is for native validation
- returns
the rel node for this aggregation
- Attributes
- protected
- Definition Classes
- HashAggregateExecTransformer → HashAggregateExecBaseTransformer
-
def
getAggRelAfterProject(context: SubstraitContext, selections: Seq[Int], filterSelections: Seq[Int], inputRel: RelNode, operatorId: Long): RelNode
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
getAggRelWithPreProjection(context: SubstraitContext, originalInputAttributes: Seq[Attribute], operatorId: Long, input: RelNode, validation: Boolean): RelNode
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
getAggRelWithoutPreProjection(context: SubstraitContext, originalInputAttributes: Seq[Attribute], operatorId: Long, input: RelNode, validation: Boolean): RelNode
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
getAttrForAggregateExpr(exp: AggregateExpression, aggregateAttributeList: Seq[Attribute], aggregateAttr: ListBuffer[Attribute], index: Int): Int
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
getAttrForAggregateExprs(aggregateExpressions: Seq[AggregateExpression], aggregateAttributeList: Seq[Attribute]): List[Attribute]
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getColumnarInputRDDs(plan: SparkPlan): Seq[RDD[ColumnarBatch]]
- Attributes
- protected
- Definition Classes
- TransformSupport
-
def
getDefaultTreePatternBits: BitSet
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
getPartialAggOutTypes: List[TypeNode]
Return the output types after partial aggregation through Velox.
-
def
getTagValue[T](tag: TreeNodeTag[T]): Option[T]
- Definition Classes
- TreeNode
-
def
glutenConf: GlutenConfig
- Attributes
- protected
- Definition Classes
- GlutenPlan
-
val
groupingExpressions: Seq[NamedExpression]
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
def
hasAlias: Boolean
- Attributes
- protected
- Definition Classes
- AliasAwareOutputExpression
-
def
hashCode(): Int
- Definition Classes
- TreeNode → AnyRef → Any
-
val
id: Int
- Definition Classes
- SparkPlan
- val initialInputBufferOffset: Int
-
def
initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
initializeLogIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
innerChildren: Seq[QueryPlan[_]]
- Definition Classes
- QueryPlan → TreeNode
-
def
inputAttributes: Seq[Attribute]
- Attributes
- protected
- Definition Classes
- BaseAggregateExec
-
def
inputSet: AttributeSet
- Definition Classes
- QueryPlan
-
def
isCanonicalizedPlan: Boolean
- Attributes
- protected
- Definition Classes
- QueryPlan
-
def
isCapableForStreamingAggregation: Boolean
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isRuleIneffective(ruleId: RuleId): Boolean
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
isStreaming: Boolean
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
def
isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
jsonFields: List[JField]
- Attributes
- protected
- Definition Classes
- TreeNode
-
final
def
legacyWithNewChildren(newChildren: Seq[SparkPlan]): SparkPlan
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
log: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logOnLevel(level: String, msg: ⇒ String, e: Throwable): Unit
- Attributes
- protected
- Definition Classes
- LogLevelUtil
-
def
logOnLevel(level: String, msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- LogLevelUtil
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logicalLink: Option[LogicalPlan]
- Definition Classes
- SparkPlan
-
def
longMetric(name: String): SQLMetric
- Definition Classes
- SparkPlan
-
def
makeCopy(newArgs: Array[AnyRef]): SparkPlan
- Definition Classes
- SparkPlan → TreeNode
-
def
map[A](f: (SparkPlan) ⇒ A): Seq[A]
- Definition Classes
- TreeNode
-
final
def
mapChildren(f: (SparkPlan) ⇒ SparkPlan): SparkPlan
- Definition Classes
- UnaryLike
-
def
mapExpressions(f: (Expression) ⇒ Expression): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
mapProductIterator[B](f: (Any) ⇒ B)(implicit arg0: ClassTag[B]): Array[B]
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
markRuleAsIneffective(ruleId: RuleId): Unit
- Attributes
- protected
- Definition Classes
- TreeNode
-
lazy val
metrics: Map[String, SQLMetric]
- Definition Classes
- HashAggregateExecBaseTransformer → SparkPlan
- Annotations
- @transient()
-
def
metricsUpdater(): MetricsUpdater
- Definition Classes
- HashAggregateExecBaseTransformer → TransformSupport
-
final
def
missingInput: AttributeSet
- Definition Classes
- QueryPlan
-
def
mixedPartialAndMerge: Boolean
Whether this is a mixed aggregation of partial and partial-merge aggregation functions.
Whether this is a mixed aggregation of partial and partial-merge aggregation functions.
- returns
whether partial and partial-merge functions coexist.
-
def
modeToKeyWord(aggregateMode: AggregateMode): String
- Attributes
- protected
- Definition Classes
- HashAggregateExecTransformer → HashAggregateExecBaseTransformer
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
needsPostProjection(aggOutAttributes: List[Attribute]): Boolean
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
needsPreProjection: Boolean
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer
-
def
nodeName: String
- Definition Classes
- TreeNode
-
val
nodePatterns: Seq[TreePattern]
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
normalizeExpression(exp: Expression): Expression
- Attributes
- protected
- Definition Classes
- AliasAwareOutputExpression
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
numShufflePartitions: Option[Int]
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
def
numberedTreeString: String
- Definition Classes
- TreeNode
-
val
origin: Origin
- Definition Classes
- TreeNode
-
def
otherCopyArgs: Seq[AnyRef]
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
output: Seq[Attribute]
- Definition Classes
- BaseAggregateExec → QueryPlan
-
def
outputExpressions: Seq[NamedExpression]
- Attributes
- protected
- Definition Classes
- HashAggregateExecBaseTransformer → BaseAggregateExec → AliasAwareOutputExpression
-
def
outputOrdering: Seq[SortOrder]
- Definition Classes
- SparkPlan
-
final
def
outputPartitioning: Partitioning
- Definition Classes
- AliasAwareOutputPartitioning → SparkPlan
-
lazy val
outputSet: AttributeSet
- Definition Classes
- QueryPlan
- Annotations
- @transient()
-
def
p(number: Int): SparkPlan
- Definition Classes
- TreeNode
-
final
def
prepare(): Unit
- Definition Classes
- SparkPlan
-
def
prepareSubqueries(): Unit
- Attributes
- protected
- Definition Classes
- SparkPlan
-
def
prettyJson: String
- Definition Classes
- TreeNode
-
def
printSchema(): Unit
- Definition Classes
- QueryPlan
-
def
producedAttributes: AttributeSet
- Definition Classes
- BaseAggregateExec → QueryPlan
-
lazy val
references: AttributeSet
- Definition Classes
- QueryPlan
- Annotations
- @transient()
-
def
requiredChildDistribution: List[Distribution]
- Definition Classes
- BaseAggregateExec → SparkPlan
-
val
requiredChildDistributionExpressions: Option[Seq[Expression]]
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
def
requiredChildOrdering: Seq[Seq[SortOrder]]
- Definition Classes
- SparkPlan
-
def
resetMetrics(): Unit
- Definition Classes
- SparkPlan
-
val
resultExpressions: Seq[NamedExpression]
- Definition Classes
- HashAggregateExecTransformer → BaseAggregateExec
-
def
rewriteAttrs(attrMap: AttributeMap[Attribute]): SparkPlan
- Definition Classes
- QueryPlan
-
final
def
sameResult(other: SparkPlan): Boolean
- Definition Classes
- QueryPlan
-
lazy val
schema: StructType
- Definition Classes
- QueryPlan
-
def
schemaString: String
- Definition Classes
- QueryPlan
-
final
def
semanticHash(): Int
- Definition Classes
- QueryPlan
-
final
val
session: SparkSession
- Definition Classes
- SparkPlan
-
def
setLogicalLink(logicalPlan: LogicalPlan): Unit
- Definition Classes
- SparkPlan
-
def
setTagValue[T](tag: TreeNodeTag[T], value: T): Unit
- Definition Classes
- TreeNode
-
def
simpleString(maxFields: Int): String
- Definition Classes
- HashAggregateExecBaseTransformer → QueryPlan → TreeNode
-
def
simpleStringWithNodeId(): String
- Definition Classes
- QueryPlan → TreeNode
-
def
sparkContext: SparkContext
- Attributes
- protected
- Definition Classes
- SparkPlan
-
def
statePrefix: String
- Attributes
- protected
- Definition Classes
- QueryPlan
-
def
stringArgs: Iterator[Any]
- Attributes
- protected
- Definition Classes
- TreeNode
-
def
subqueries: Seq[SparkPlan]
- Definition Classes
- QueryPlan
-
def
subqueriesAll: Seq[SparkPlan]
- Definition Classes
- QueryPlan
-
final
lazy val
supportsColumnar: Boolean
- Definition Classes
- TransformSupport → SparkPlan
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toJSON: String
- Definition Classes
- TreeNode
-
def
toString(): String
- Definition Classes
- TreeNode → AnyRef → Any
-
def
transform(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformAllExpressions(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformAllExpressionsWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformDown(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformDownWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformDownWithSubqueries(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- QueryPlan
-
def
transformDownWithSubqueriesAndPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- QueryPlan
-
def
transformExpressions(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformExpressionsDown(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformExpressionsDownWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformExpressionsUp(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformExpressionsUpWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformExpressionsWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): HashAggregateExecTransformer.this.type
- Definition Classes
- QueryPlan
-
def
transformUp(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformUpWithBeforeAndAfterRuleOnChildren(cond: (SparkPlan) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[(SparkPlan, SparkPlan), SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformUpWithNewOutput(rule: PartialFunction[SparkPlan, (SparkPlan, Seq[(Attribute, Attribute)])], skipCond: (SparkPlan) ⇒ Boolean, canGetOutput: (SparkPlan) ⇒ Boolean): SparkPlan
- Definition Classes
- QueryPlan
-
def
transformUpWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformUpWithSubqueries(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- QueryPlan
-
def
transformWithPruning(cond: (TreePatternBits) ⇒ Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
def
transformWithSubqueries(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
- Definition Classes
- QueryPlan
-
lazy val
treePatternBits: BitSet
- Definition Classes
- QueryPlan → TreeNode → TreePatternBits
-
def
treeString(append: (String) ⇒ Unit, verbose: Boolean, addSuffix: Boolean, maxFields: Int, printOperatorId: Boolean): Unit
- Definition Classes
- TreeNode
-
final
def
treeString(verbose: Boolean, addSuffix: Boolean, maxFields: Int, printOperatorId: Boolean): String
- Definition Classes
- TreeNode
-
final
def
treeString: String
- Definition Classes
- TreeNode
-
def
unsetTagValue[T](tag: TreeNodeTag[T]): Unit
- Definition Classes
- TreeNode
-
def
updateOuterReferencesInSubquery(plan: SparkPlan, attrMap: AttributeMap[Attribute]): SparkPlan
- Attributes
- protected
- Definition Classes
- QueryPlan
-
def
vectorTypes: Option[Seq[String]]
- Definition Classes
- SparkPlan
-
def
verboseString(maxFields: Int): String
- Definition Classes
- HashAggregateExecBaseTransformer → QueryPlan → TreeNode
-
def
verboseStringWithOperatorId(): String
- Definition Classes
- BaseAggregateExec → UnaryExecNode → QueryPlan
-
def
verboseStringWithSuffix(maxFields: Int): String
- Definition Classes
- TreeNode
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
waitForSubqueries(): Unit
- Attributes
- protected
- Definition Classes
- SparkPlan
-
def
withNewChildInternal(newChild: SparkPlan): HashAggregateExecTransformer
- Attributes
- protected
- Definition Classes
- HashAggregateExecTransformer → UnaryLike
-
final
def
withNewChildren(newChildren: Seq[SparkPlan]): SparkPlan
- Definition Classes
- TreeNode
-
final
def
withNewChildrenInternal(newChildren: IndexedSeq[SparkPlan]): SparkPlan
- Definition Classes
- UnaryLike