public class HoodieBulkInsertDataInternalWriter extends Object implements org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>
DataWriter. This is used in data source implementation for bulk insert.| Constructor and Description |
|---|
HoodieBulkInsertDataInternalWriter(org.apache.hudi.table.HoodieTable hoodieTable,
org.apache.hudi.config.HoodieWriteConfig writeConfig,
String instantTime,
int taskPartitionId,
long taskId,
long taskEpochId,
org.apache.spark.sql.types.StructType structType,
boolean populateMetaFields,
boolean arePartitionRecordsSorted) |
| Modifier and Type | Method and Description |
|---|---|
void |
abort() |
org.apache.spark.sql.sources.v2.writer.WriterCommitMessage |
commit() |
void |
write(org.apache.spark.sql.catalyst.InternalRow record) |
public HoodieBulkInsertDataInternalWriter(org.apache.hudi.table.HoodieTable hoodieTable, org.apache.hudi.config.HoodieWriteConfig writeConfig, String instantTime, int taskPartitionId, long taskId, long taskEpochId, org.apache.spark.sql.types.StructType structType, boolean populateMetaFields, boolean arePartitionRecordsSorted)
public void write(org.apache.spark.sql.catalyst.InternalRow record)
throws IOException
write in interface org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>IOExceptionpublic org.apache.spark.sql.sources.v2.writer.WriterCommitMessage commit()
throws IOException
commit in interface org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>IOExceptionpublic void abort()
abort in interface org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>Copyright © 2023 The Apache Software Foundation. All rights reserved.