public class Spark24HoodieVectorizedParquetRecordReader
extends org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader
org.apache.spark.sql.execution.datasources.parquet.SpecificParquetRecordReaderBase.NullIntIterator, org.apache.spark.sql.execution.datasources.parquet.SpecificParquetRecordReaderBase.RLEIntIterator, org.apache.spark.sql.execution.datasources.parquet.SpecificParquetRecordReaderBase.ValuesReaderIntIterator| Constructor and Description |
|---|
Spark24HoodieVectorizedParquetRecordReader(TimeZone convertTz,
boolean useOffHeap,
int capacity,
Map<Integer,org.apache.hudi.common.util.collection.Pair<org.apache.spark.sql.types.DataType,org.apache.spark.sql.types.DataType>> typeChangeInfos) |
| Modifier and Type | Method and Description |
|---|---|
void |
close() |
void |
enableReturningBatches() |
Object |
getCurrentValue() |
void |
initBatch(org.apache.spark.sql.types.StructType partitionColumns,
org.apache.spark.sql.catalyst.InternalRow partitionValues) |
void |
initialize(org.apache.hadoop.mapreduce.InputSplit inputSplit,
org.apache.hadoop.mapreduce.TaskAttemptContext taskAttemptContext) |
boolean |
nextBatch() |
boolean |
nextKeyValue() |
org.apache.spark.sql.vectorized.ColumnarBatch |
resultBatch() |
getProgress, initializepublic Spark24HoodieVectorizedParquetRecordReader(TimeZone convertTz, boolean useOffHeap, int capacity, Map<Integer,org.apache.hudi.common.util.collection.Pair<org.apache.spark.sql.types.DataType,org.apache.spark.sql.types.DataType>> typeChangeInfos)
public void initBatch(org.apache.spark.sql.types.StructType partitionColumns,
org.apache.spark.sql.catalyst.InternalRow partitionValues)
initBatch in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderpublic void initialize(org.apache.hadoop.mapreduce.InputSplit inputSplit,
org.apache.hadoop.mapreduce.TaskAttemptContext taskAttemptContext)
throws IOException,
InterruptedException,
UnsupportedOperationException
initialize in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderIOExceptionInterruptedExceptionUnsupportedOperationExceptionpublic void close()
throws IOException
close in interface Closeableclose in interface AutoCloseableclose in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderIOExceptionpublic org.apache.spark.sql.vectorized.ColumnarBatch resultBatch()
resultBatch in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderpublic boolean nextBatch()
throws IOException
nextBatch in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderIOExceptionpublic void enableReturningBatches()
enableReturningBatches in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderpublic Object getCurrentValue()
getCurrentValue in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderpublic boolean nextKeyValue()
throws IOException
nextKeyValue in class org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReaderIOExceptionCopyright © 2023 The Apache Software Foundation. All rights reserved.