public class HoodieBulkInsertDataInternalWriter extends Object implements org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>
DataWriter. This is used in data source implementation for bulk insert.| 构造器和说明 |
|---|
HoodieBulkInsertDataInternalWriter(HoodieTable hoodieTable,
HoodieWriteConfig writeConfig,
String instantTime,
int taskPartitionId,
long taskId,
long taskEpochId,
org.apache.spark.sql.types.StructType structType,
boolean populateMetaFields,
boolean arePartitionRecordsSorted) |
public HoodieBulkInsertDataInternalWriter(HoodieTable hoodieTable, HoodieWriteConfig writeConfig, String instantTime, int taskPartitionId, long taskId, long taskEpochId, org.apache.spark.sql.types.StructType structType, boolean populateMetaFields, boolean arePartitionRecordsSorted)
public void write(org.apache.spark.sql.catalyst.InternalRow record)
throws IOException
write 在接口中 org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>IOExceptionpublic org.apache.spark.sql.sources.v2.writer.WriterCommitMessage commit()
throws IOException
commit 在接口中 org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>IOExceptionpublic void abort()
abort 在接口中 org.apache.spark.sql.sources.v2.writer.DataWriter<org.apache.spark.sql.catalyst.InternalRow>Copyright © 2023 The Apache Software Foundation. All rights reserved.