c
org.apache.spark.sql.execution.datasources.parquet
GeoParquetFileFormat
Companion object GeoParquetFileFormat
class GeoParquetFileFormat extends ParquetFileFormat with GeoParquetFileFormatBase with FileFormat with DataSourceRegister with Logging with Serializable
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- GeoParquetFileFormat
- GeoParquetFileFormatBase
- ParquetFileFormat
- Serializable
- Logging
- DataSourceRegister
- FileFormat
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- Protected
Instance Constructors
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def buildReader(sparkSession: SparkSession, dataSchema: StructType, partitionSchema: StructType, requiredSchema: StructType, filters: Seq[Filter], options: Map[String, String], hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow]
- Attributes
- protected
- Definition Classes
- FileFormat
- def buildReaderWithPartitionValues(sparkSession: SparkSession, dataSchema: StructType, partitionSchema: StructType, requiredSchema: StructType, filters: Seq[Filter], options: Map[String, String], hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow]
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → FileFormat
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(other: Any): Boolean
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → AnyRef → Any
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def hashCode(): Int
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → AnyRef → Any
- def inferSchema(sparkSession: SparkSession, parameters: Map[String, String], files: Seq[FileStatus]): Option[StructType]
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → FileFormat
- def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- Attributes
- protected
- Definition Classes
- Logging
- def initializeLogIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def isSplitable(sparkSession: SparkSession, options: Map[String, String], path: Path): Boolean
- Definition Classes
- ParquetFileFormat → FileFormat
- def isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
- def log: Logger
- Attributes
- protected
- Definition Classes
- Logging
- def logDebug(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logDebug(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logError(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logError(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logInfo(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logInfo(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logName: String
- Attributes
- protected
- Definition Classes
- Logging
- def logTrace(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logTrace(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logWarning(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logWarning(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def prepareWrite(sparkSession: SparkSession, job: Job, options: Map[String, String], dataSchema: StructType): OutputWriterFactory
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → FileFormat
- def shortName(): String
- Definition Classes
- GeoParquetFileFormatBase → DataSourceRegister
- val spatialFilter: Option[GeoParquetSpatialFilter]
- def supportBatch(sparkSession: SparkSession, schema: StructType): Boolean
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → FileFormat
- def supportDataType(dataType: DataType): Boolean
- Definition Classes
- GeoParquetFileFormat → ParquetFileFormat → FileFormat
- def supportFieldName(name: String): Boolean
- Definition Classes
- FileFormat
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- ParquetFileFormat → AnyRef → Any
- def vectorTypes(requiredSchema: StructType, partitionSchema: StructType, sqlConf: SQLConf): Option[Seq[String]]
- Definition Classes
- ParquetFileFormat → FileFormat
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- def withSpatialPredicates(spatialFilter: GeoParquetSpatialFilter): GeoParquetFileFormat
- Definition Classes
- GeoParquetFileFormat → GeoParquetFileFormatBase