public class DirectBigQueryRelation extends BigQueryRelation implements org.apache.spark.sql.sources.TableScan, org.apache.spark.sql.sources.PrunedScan, org.apache.spark.sql.sources.PrunedFilteredScan, org.apache.spark.sql.sources.InsertableRelation
| Modifier and Type | Field and Description |
|---|---|
static int |
emptyRowRDDsCreated |
| Constructor and Description |
|---|
DirectBigQueryRelation(SparkBigQueryConfig options,
com.google.cloud.bigquery.TableInfo table,
BigQueryClient bigQueryClient,
BigQueryClientFactory bigQueryReadClientFactory,
BigQueryTracerFactory bigQueryTracerFactory,
org.apache.spark.sql.SQLContext sqlContext) |
| Modifier and Type | Method and Description |
|---|---|
org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> |
buildScan() |
org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> |
buildScan(String[] requiredColumns) |
org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> |
buildScan(String[] requiredColumns,
org.apache.spark.sql.sources.Filter[] filters) |
boolean |
equals(Object o) |
BigQueryRDDFactory |
getBigQueryRDDFactory() |
int |
hashCode() |
void |
insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> data,
boolean overwrite) |
boolean |
needConversion() |
long |
sizeInBytes() |
String |
toString() |
org.apache.spark.sql.sources.Filter[] |
unhandledFilters(org.apache.spark.sql.sources.Filter[] filters) |
getTableId, getTableName, getTableNameForLogging, schema, sqlContextpublic DirectBigQueryRelation(SparkBigQueryConfig options, com.google.cloud.bigquery.TableInfo table, BigQueryClient bigQueryClient, BigQueryClientFactory bigQueryReadClientFactory, BigQueryTracerFactory bigQueryTracerFactory, org.apache.spark.sql.SQLContext sqlContext)
public boolean needConversion()
needConversion in class org.apache.spark.sql.sources.BaseRelationpublic long sizeInBytes()
sizeInBytes in class org.apache.spark.sql.sources.BaseRelationpublic org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> buildScan()
buildScan in interface org.apache.spark.sql.sources.TableScanpublic org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> buildScan(String[] requiredColumns)
buildScan in interface org.apache.spark.sql.sources.PrunedScanpublic org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> buildScan(String[] requiredColumns, org.apache.spark.sql.sources.Filter[] filters)
buildScan in interface org.apache.spark.sql.sources.PrunedFilteredScanpublic org.apache.spark.sql.sources.Filter[] unhandledFilters(org.apache.spark.sql.sources.Filter[] filters)
unhandledFilters in class org.apache.spark.sql.sources.BaseRelationpublic BigQueryRDDFactory getBigQueryRDDFactory()
public void insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> data,
boolean overwrite)
insert in interface org.apache.spark.sql.sources.InsertableRelationCopyright © 2024. All rights reserved.