public class BigQueryIndirectDataSourceWriterContext extends Object implements DataSourceWriterContext
| Constructor and Description |
|---|
BigQueryIndirectDataSourceWriterContext(BigQueryClient bigQueryClient,
SparkBigQueryConfig config,
org.apache.hadoop.conf.Configuration hadoopConfiguration,
org.apache.spark.sql.types.StructType sparkSchema,
String writeUUID,
org.apache.spark.sql.SaveMode saveMode,
org.apache.hadoop.fs.Path gcsPath,
Optional<IntermediateDataCleaner> intermediateDataCleaner,
org.apache.spark.SparkContext sparkContext) |
| Modifier and Type | Method and Description |
|---|---|
void |
abort(WriterCommitMessageContext[] messages) |
void |
commit(WriterCommitMessageContext[] messages) |
DataWriterContextFactory<org.apache.spark.sql.catalyst.InternalRow> |
createWriterContextFactory() |
void |
setTableInfo(com.google.cloud.bigquery.TableInfo tableInfo) |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, waitcreate, onDataWriterCommit, useCommitCoordinatorpublic BigQueryIndirectDataSourceWriterContext(BigQueryClient bigQueryClient, SparkBigQueryConfig config, org.apache.hadoop.conf.Configuration hadoopConfiguration, org.apache.spark.sql.types.StructType sparkSchema, String writeUUID, org.apache.spark.sql.SaveMode saveMode, org.apache.hadoop.fs.Path gcsPath, Optional<IntermediateDataCleaner> intermediateDataCleaner, org.apache.spark.SparkContext sparkContext)
public DataWriterContextFactory<org.apache.spark.sql.catalyst.InternalRow> createWriterContextFactory()
createWriterContextFactory in interface DataSourceWriterContextpublic void commit(WriterCommitMessageContext[] messages)
commit in interface DataSourceWriterContextpublic void abort(WriterCommitMessageContext[] messages)
abort in interface DataSourceWriterContextpublic void setTableInfo(com.google.cloud.bigquery.TableInfo tableInfo)
setTableInfo in interface DataSourceWriterContextCopyright © 2024. All rights reserved.