Packages

c

org.apache.spark.sql.rapids

GpuDataSource

case class GpuDataSource(sparkSession: SparkSession, className: String, paths: Seq[String] = Nil, userSpecifiedSchema: Option[StructType] = None, partitionColumns: Seq[String] = Seq.empty, bucketSpec: Option[BucketSpec] = None, options: Map[String, String] = Map.empty, catalogTable: Option[CatalogTable] = None, origProvider: Class[_], gpuFileFormat: ColumnarFileFormat) extends GpuDataSourceBase with Product with Serializable

Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. GpuDataSource
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. GpuDataSourceBase
  7. Logging
  8. AnyRef
  9. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new GpuDataSource(sparkSession: SparkSession, className: String, paths: Seq[String] = Nil, userSpecifiedSchema: Option[StructType] = None, partitionColumns: Seq[String] = Seq.empty, bucketSpec: Option[BucketSpec] = None, options: Map[String, String] = Map.empty, catalogTable: Option[CatalogTable] = None, origProvider: Class[_], gpuFileFormat: ColumnarFileFormat)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. val bucketSpec: Option[BucketSpec]
  6. val caseInsensitiveOptions: CaseInsensitiveMap[String]
    Attributes
    protected
    Definition Classes
    GpuDataSourceBase
  7. val catalogTable: Option[CatalogTable]
  8. val className: String
  9. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  10. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  11. val equality: Resolver
    Attributes
    protected
    Definition Classes
    GpuDataSourceBase
  12. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  13. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  14. def globPaths: Boolean

    Whether or not paths should be globbed before being used to access files.

    Whether or not paths should be globbed before being used to access files.

    Definition Classes
    GpuDataSourceBase
  15. val gpuFileFormat: ColumnarFileFormat
  16. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  17. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  18. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  19. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  20. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  21. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  22. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  23. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  24. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  25. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  26. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  27. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  28. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  29. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  30. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  31. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  32. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  33. def newHadoopConfiguration(): Configuration
    Attributes
    protected
    Definition Classes
    GpuDataSourceBase
  34. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  35. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  36. val options: Map[String, String]
  37. val origProvider: Class[_]
  38. def originalProvidingInstance(): Any
    Attributes
    protected
    Definition Classes
    GpuDataSourceBase
  39. val partitionColumns: Seq[String]
  40. val paths: Seq[String]
  41. def resolveRelation(checkFilesExist: Boolean = true): BaseRelation

    Create a resolved BaseRelation that can be used to read data from or write data into this DataSource

    Create a resolved BaseRelation that can be used to read data from or write data into this DataSource

    checkFilesExist

    Whether to confirm that the files exist when generating the non-streaming file based datasource. StructuredStreaming jobs already list file existence, and when generating incremental jobs, the batch is considered as a non-streaming file based data source. Since we know that files already exist, we don't need to check them again.

    Definition Classes
    GpuDataSourceBase
  42. val sparkSession: SparkSession
  43. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  44. val userSpecifiedSchema: Option[StructType]
  45. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  46. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  47. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  48. def writeAndRead(mode: SaveMode, data: LogicalPlan, outputColumnNames: Seq[String], physicalPlan: SparkPlan, useStableSort: Boolean, concurrentWriterPartitionFlushSize: Long): BaseRelation

    Writes the given LogicalPlan out to this DataSource and returns a BaseRelation for the following reading.

    Writes the given LogicalPlan out to this DataSource and returns a BaseRelation for the following reading.

    mode

    The save mode for this writing.

    data

    The input query plan that produces the data to be written. Note that this plan is analyzed and optimized.

    outputColumnNames

    The original output column names of the input query plan. The optimizer may not preserve the output column's names' case, so we need this parameter instead of data.output.

    physicalPlan

    The physical plan of the input query plan. We should run the writing command with this physical plan instead of creating a new physical plan, so that the metrics can be correctly linked to the given physical plan and shown in the web UI.

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from GpuDataSourceBase

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped