object ExprChecks
- Alphabetic
- By Inheritance
- ExprChecks
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
aggNotGroupByOrReduction(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
An aggregation check where window operations are supported by the plugin, but Spark also supports group by and reduction on these.
An aggregation check where window operations are supported by the plugin, but Spark also supports group by and reduction on these. This is now really for 'collect_list' which is only supported by windowing.
-
def
aggNotWindow(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
An aggregation check where group by and reduction are supported by the plugin, but Spark also supports window operations on these.
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
binaryProjectNotLambda(outputCheck: TypeSig, sparkOutputSig: TypeSig, param1: (String, TypeSig, TypeSig), param2: (String, TypeSig, TypeSig)): ExprChecks
Helper function for a binary expression where the plugin only supports project but Spark support lambda too.
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
fullAgg(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
Aggregate operation where window, reduction, and group by agg are all supported the same.
-
def
fullAggAndProject(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
For a generic expression that can work as both an aggregation and in the project context.
For a generic expression that can work as both an aggregation and in the project context. This is really just for PythonUDF.
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
val
mathUnary: ExprChecks
Math unary checks where input and output are both DoubleType.
Math unary checks where input and output are both DoubleType. Spark supports these for both project and lambda, but the plugin only support project.
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
projectNotLambda(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
A check for an expression that only supports project in the plugin, but Spark also supports this expression in lambda.
-
def
projectOnly(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
A check for an expression that only supports project, both in Spark and in the plugin.
-
def
reductionAndGroupByAgg(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
Aggregate operation where only group by agg and reduction is supported in the plugin and in Spark.
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
def
unaryProject(outputCheck: TypeSig, sparkOutputSig: TypeSig, inputCheck: TypeSig, sparkInputSig: TypeSig): ExprChecks
A check for a unary expression that only support project both in Spark and the plugin.
-
def
unaryProjectNotLambda(outputCheck: TypeSig, sparkOutputSig: TypeSig, inputCheck: TypeSig, sparkInputSig: TypeSig): ExprChecks
A check for a unary expression that only support project, but Spark also supports this expression in lambda.
-
def
unaryProjectNotLambdaInputMatchesOutput(check: TypeSig, sparkSig: TypeSig): ExprChecks
Unary expression checks for project where the input matches the output, but Spark also supports this expression in lambda mode.
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
def
windowOnly(outputCheck: TypeSig, sparkOutputSig: TypeSig, paramCheck: Seq[ParamCheck] = Seq.empty, repeatingParamCheck: Option[RepeatingParamCheck] = None): ExprChecks
Window only operations.
Window only operations. Spark does not support these operations as anything but a window operation.