| Modifier and Type | Field and Description |
|---|---|
static HiveType |
HiveColumnHandle.BUCKET_HIVE_TYPE |
static HiveType |
HiveType.HIVE_BINARY |
static HiveType |
HiveType.HIVE_BOOLEAN |
static HiveType |
HiveType.HIVE_BYTE |
static HiveType |
HiveType.HIVE_DATE |
static HiveType |
HiveType.HIVE_DOUBLE |
static HiveType |
HiveType.HIVE_FLOAT |
static HiveType |
HiveType.HIVE_INT |
static HiveType |
HiveType.HIVE_LONG |
static HiveType |
HiveType.HIVE_SHORT |
static HiveType |
HiveType.HIVE_STRING |
static HiveType |
HiveType.HIVE_TIMESTAMP |
static HiveType |
HiveColumnHandle.PATH_HIVE_TYPE |
| Modifier and Type | Method and Description |
|---|---|
HiveType |
HiveColumnHandle.getHiveType() |
HiveType |
HiveTypeName.toHiveType() |
static HiveType |
HiveType.toHiveType(TypeTranslator typeTranslator,
Type type) |
static HiveType |
HiveType.valueOf(String hiveTypeName) |
| Modifier and Type | Method and Description |
|---|---|
static List<HiveType> |
HiveUtil.extractStructFieldTypes(HiveType hiveType) |
List<HiveType> |
HivePageSourceProvider.BucketAdaptation.getBucketColumnHiveTypes() |
Optional<HiveType> |
HivePageSourceProvider.ColumnMapping.getCoercionFrom() |
Map<Integer,HiveType> |
HiveSplit.getColumnCoercions() |
List<HiveType> |
HivePartitioningHandle.getHiveTypes() |
static List<HiveType> |
HiveType.toHiveTypes(String hiveTypes) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
CoercionPolicy.canCoerce(HiveType fromType,
HiveType toType) |
boolean |
HiveCoercionPolicy.canCoerce(HiveType fromHiveType,
HiveType toHiveType) |
static List<HiveType> |
HiveUtil.extractStructFieldTypes(HiveType hiveType) |
static Optional<DecimalType> |
HiveUtil.getDecimalType(HiveType hiveType) |
static boolean |
HiveUtil.isStructuralType(HiveType hiveType) |
static boolean |
HiveWriteUtils.isWritableType(HiveType hiveType) |
| Modifier and Type | Method and Description |
|---|---|
static List<HivePageSourceProvider.ColumnMapping> |
HivePageSourceProvider.ColumnMapping.buildColumnMappings(List<HivePartitionKey> partitionKeys,
List<HiveColumnHandle> columns,
List<HiveColumnHandle> requiredInterimColumns,
Map<Integer,HiveType> columnCoercions,
org.apache.hadoop.fs.Path path,
OptionalInt bucketNumber) |
static Optional<ConnectorPageSource> |
HivePageSourceProvider.createHivePageSource(Set<HiveRecordCursorProvider> cursorProviders,
Set<HivePageSourceFactory> pageSourceFactories,
org.apache.hadoop.conf.Configuration configuration,
ConnectorSession session,
org.apache.hadoop.fs.Path path,
OptionalInt bucketNumber,
long start,
long length,
long fileSize,
Properties schema,
TupleDomain<HiveColumnHandle> effectivePredicate,
List<HiveColumnHandle> hiveColumns,
List<HivePartitionKey> partitionKeys,
org.joda.time.DateTimeZone hiveStorageTimeZone,
TypeManager typeManager,
Map<Integer,HiveType> columnCoercions,
Optional<HiveSplit.BucketConversion> bucketConversion,
boolean s3SelectPushdownEnabled) |
static HivePageSourceProvider.ColumnMapping |
HivePageSourceProvider.ColumnMapping.prefilled(HiveColumnHandle hiveColumnHandle,
String prefilledValue,
Optional<HiveType> coerceFrom) |
static HivePageSourceProvider.ColumnMapping |
HivePageSourceProvider.ColumnMapping.regular(HiveColumnHandle hiveColumnHandle,
int index,
Optional<HiveType> coerceFrom) |
| Constructor and Description |
|---|
HiveColumnHandle(String name,
HiveType hiveType,
TypeSignature typeSignature,
int hiveColumnIndex,
HiveColumnHandle.ColumnType columnType,
Optional<String> comment) |
| Constructor and Description |
|---|
BucketAdaptation(int[] bucketColumnIndices,
List<HiveType> bucketColumnHiveTypes,
int tableBucketCount,
int partitionBucketCount,
int bucketToKeep) |
HiveBucketAdapterRecordCursor(int[] bucketColumnIndices,
List<HiveType> bucketColumnHiveTypes,
int tableBucketCount,
int partitionBucketCount,
int bucketToKeep,
TypeManager typeManager,
RecordCursor delegate) |
HiveBucketFunction(int bucketCount,
List<HiveType> hiveTypes) |
HivePartitioningHandle(int bucketCount,
List<HiveType> hiveTypes,
OptionalInt maxCompatibleBucketCount) |
HiveSplit(String database,
String table,
String partitionName,
String path,
long start,
long length,
long fileSize,
Properties schema,
List<HivePartitionKey> partitionKeys,
List<HostAddress> addresses,
OptionalInt bucketNumber,
boolean forceLocalScheduling,
TupleDomain<HiveColumnHandle> effectivePredicate,
Map<Integer,HiveType> columnCoercions,
Optional<HiveSplit.BucketConversion> bucketConversion,
boolean s3SelectPushdownEnabled) |
| Modifier and Type | Method and Description |
|---|---|
HiveType |
Column.getType() |
| Modifier and Type | Method and Description |
|---|---|
void |
RecordingHiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
void |
CachingHiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
void |
HiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
void |
SemiTransactionalHiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
| Constructor and Description |
|---|
Column(String name,
HiveType type,
Optional<String> comment) |
| Modifier and Type | Method and Description |
|---|---|
void |
FileHiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
| Modifier and Type | Method and Description |
|---|---|
void |
GlueHiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
| Modifier and Type | Method and Description |
|---|---|
void |
BridgingHiveMetastore.addColumn(String databaseName,
String tableName,
String columnName,
HiveType columnType,
String columnComment) |
static org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj |
ThriftMetastoreUtil.createMetastoreColumnStatistics(String columnName,
HiveType columnType,
HiveColumnStatistics statistics,
OptionalLong rowCount) |
Copyright © 2012–2019. All rights reserved.