org.apache.spark.sql.execution.datasources.FilePartition filePartition
org.apache.log4j.Logger log
org.apache.spark.sql.hudi.SparkAdapter sparkAdapter
boolean bitmap$0
scala.Function1<T1,R> read
org.apache.spark.sql.types.StructType schema
private Object readResolve()
scala.collection.immutable.List<A> files
private Object readResolve()
int index
HoodieBootstrapSplit split
private Object readResolve()
scala.Function1<T1,R> dataReadFunction
scala.Function1<T1,R> skeletonReadFunction
scala.Function1<T1,R> regularReadFunction
org.apache.spark.sql.types.StructType dataSchema
org.apache.spark.sql.types.StructType skeletonSchema
String[] requiredColumns
HoodieBootstrapTableState tableState
org.apache.spark.sql.execution.datasources.PartitionedFile dataFile
scala.Option<A> skeletonFile
private Object readResolve()
scala.collection.immutable.List<A> files
private Object readResolve()
org.apache.spark.sql.SparkSession spark
HoodieTableMetaClient metaClient
scala.Option<A> schemaSpec
scala.collection.immutable.Map<K,V> options
String configName
HoodieFileIndex$DataSkippingFailureMode$Val Fallback
HoodieFileIndex$DataSkippingFailureMode$Val Strict
private Object readResolve()
String value
private Object readResolve()
HoodieBaseRelation.BaseFileReader fullSchemaReader
HoodieBaseRelation.BaseFileReader requiredSchemaReader
HoodieBaseRelation.BaseFileReader requiredSchemaReaderSkipMerging
private Object readResolve()
scala.Option<A> dataFile
scala.collection.immutable.List<A> logFiles
private Object readResolve()
int index
HoodieMergeOnReadFileSplit split
private Object readResolve()
HoodieMergeOnReadBaseFileReaders fileReaders
HoodieTableSchema org$apache$hudi$HoodieMergeOnReadRDD$$tableSchema
HoodieTableSchema org$apache$hudi$HoodieMergeOnReadRDD$$requiredSchema
HoodieTableState org$apache$hudi$HoodieMergeOnReadRDD$$tableState
String mergeType
long maxCompactionMemoryInBytes
org.apache.spark.broadcast.Broadcast<T> confBroadcast
Properties org$apache$hudi$HoodieMergeOnReadRDD$$payloadProps
scala.collection.immutable.Set<A> whitelistedPayloadClasses
org.apache.hadoop.fs.Path basePath
String instantTime
String commitActionType
WriteOperationType operation
private Object readResolve()
org.apache.spark.sql.SQLContext sqlContext
scala.collection.immutable.Map<K,V> options
long latestBatchId
org.apache.log4j.Logger org$apache$hudi$HoodieStreamingSink$$log
int retryCnt
long retryIntervalMs
boolean ignoreFailedBatch
boolean org$apache$hudi$HoodieStreamingSink$$isAsyncCompactorServiceShutdownAbnormally
boolean org$apache$hudi$HoodieStreamingSink$$isAsyncClusteringServiceShutdownAbnormally
org.apache.spark.sql.SaveMode mode
AsyncCompactService asyncCompactorService
AsyncClusteringService asyncClusteringService
scala.Option<A> writeClient
scala.Option<A> hoodieTableConfig
org.apache.spark.sql.types.StructType structTypeSchema
String avroSchemaStr
scala.Option<A> internalSchema
private Object readResolve()
String tablePath
String latestCommitTimestamp
String recordKeyField
scala.Option<A> preCombineFieldOpt
boolean usesVirtualKeys
String recordPayloadClassName
HoodieMetadataConfig metadataConfig
private Object readResolve()
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder<T> encoder
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.Deserializer<T> deserializer
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.Serializer<T> serializer
BootstrapExecutorUtils.Config cfg
TypedProperties props
org.apache.hadoop.conf.Configuration configuration
HoodieWriteConfig bootstrapConfig
String bootstrapBasePath
String command
String srcPath
String targetPath
String tableName
String tableType
String rowKey
String partitionKey
int parallelism
String schemaFile
int retry
String propsFilePath
List<E> configs
TypedProperties props
TypedProperties config
org.apache.spark.api.java.JavaSparkContext jssc
Copyright © 2022 The Apache Software Foundation. All rights reserved.