object GpuTypeShims
- Alphabetic
- By Inheritance
- GpuTypeShims
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
additionalArithmeticSupportedTypes: TypeSig
Get additional arithmetic supported types for this Shim
-
def
additionalCommonOperatorSupportedTypes: TypeSig
Get additional common operators supported types for this Shim (filter, sample, project, alias, table scan ......
Get additional common operators supported types for this Shim (filter, sample, project, alias, table scan ...... which GPU supports from 330)
-
def
additionalCsvSupportedTypes: TypeSig
Get additional Csv supported types for this Shim
-
def
additionalParquetSupportedTypes: TypeSig
Get additional Parquet supported types for this Shim
-
def
additionalPredicateSupportedTypes: TypeSig
Get additional predicate supported types for this Shim
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
columnarCopy(cv: ColumnVector, b: RapidsHostColumnBuilder, dataType: DataType, rows: Int): Unit
Copy a column for computing on GPU.
Copy a column for computing on GPU. Better to check if the type is supported first by calling 'isColumnarCopySupportedForType'
Data type is passed explicitly to allow overriding the reported type from the column vector. There are cases where the type reported by the column vector does not match the data. See https://github.com/apache/iceberg/issues/6116.
- def csvRead(cv: ColumnVector, dt: DataType): ColumnVector
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getConverterForType(t: DataType, nullable: Boolean): TypeConverter
Get the TypeConverter of the data type for this Shim Note should first calling hasConverterForType
Get the TypeConverter of the data type for this Shim Note should first calling hasConverterForType
- t
the data type
- nullable
is nullable
- returns
the row to column convert for the data type
-
def
hasConverterForType(otherType: DataType): Boolean
If Shim supports the data type for row to column converter
If Shim supports the data type for row to column converter
- otherType
the data type that should be checked in the Shim
- returns
true if Shim support the otherType, false otherwise.
- def hasSideEffectsIfCastFloatToTimestamp: Boolean
- def hasSideEffectsIfCastIntToDayTime(dt: DataType): Boolean
- def hasSideEffectsIfCastIntToYearMonth(ym: DataType): Boolean
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
isColumnarCopySupportedForType(colType: DataType): Boolean
Whether the Shim supports columnar copy for the given type
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def isParquetColumnarWriterSupportedForType(colType: DataType): Boolean
-
def
isSupportedDayTimeType(dt: DataType): Boolean
Whether the Shim supports day-time interval type for specific operator Alias, Add, Subtract, Positive...
Whether the Shim supports day-time interval type for specific operator Alias, Add, Subtract, Positive... operators do not support day-time interval type on this Shim Note: Spark 3.2.x does support
DayTimeIntervalType, this is for the GPU operators -
def
isSupportedYearMonthType(dt: DataType): Boolean
Whether the Shim supports year-month interval type Alias, Add, Subtract, Positive...
Whether the Shim supports year-month interval type Alias, Add, Subtract, Positive... operators do not support year-month interval type
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def supportCsvRead(dt: DataType): Boolean
-
def
supportToScalarForType(t: DataType): Boolean
Whether the Shim supports converting the given type to GPU Scalar
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toRapidsOrNull(t: DataType): DType
Get the cuDF type for the Spark data type
Get the cuDF type for the Spark data type
- t
the Spark data type
- returns
the cuDF type if the Shim supports
-
def
toScalarForType(t: DataType, v: Any): Nothing
Convert the given value to Scalar
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()