Class HiveWriteUtils

java.lang.Object
io.trino.plugin.hive.util.HiveWriteUtils

public final class HiveWriteUtils extends Object
  • Method Details

    • createRecordWriter

      public static org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter createRecordWriter(org.apache.hadoop.fs.Path target, org.apache.hadoop.mapred.JobConf conf, Properties properties, String outputFormatName, ConnectorSession session)
    • createRecordWriter

      public static org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter createRecordWriter(org.apache.hadoop.fs.Path target, org.apache.hadoop.mapred.JobConf conf, Properties properties, String outputFormatName, ConnectorSession session, Optional<TextHeaderWriter> textHeaderWriter)
    • initializeSerializer

      public static org.apache.hadoop.hive.serde2.Serializer initializeSerializer(org.apache.hadoop.conf.Configuration conf, Properties properties, String serializerName)
    • getJavaObjectInspector

      public static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector getJavaObjectInspector(Type type)
    • createPartitionValues

      public static List<String> createPartitionValues(List<Type> partitionColumnTypes, Page partitionColumns, int position)
    • getField

      public static Object getField(org.joda.time.DateTimeZone localZone, Type type, Block block, int position)
    • checkTableIsWritable

      public static void checkTableIsWritable(Table table, boolean writesToNonManagedTablesEnabled)
    • checkPartitionIsWritable

      public static void checkPartitionIsWritable(String partitionName, Partition partition)
    • getTableDefaultLocation

      public static Location getTableDefaultLocation(HdfsContext context, SemiTransactionalHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName)
    • getTableDefaultLocation

      public static Location getTableDefaultLocation(Database database, HdfsContext context, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName)
    • pathExists

      public static boolean pathExists(HdfsContext context, HdfsEnvironment hdfsEnvironment, org.apache.hadoop.fs.Path path)
    • isS3FileSystem

      public static boolean isS3FileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, org.apache.hadoop.fs.Path path)
    • isViewFileSystem

      public static boolean isViewFileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, org.apache.hadoop.fs.Path path)
    • isHdfsEncrypted

      public static boolean isHdfsEncrypted(HdfsContext context, HdfsEnvironment hdfsEnvironment, org.apache.hadoop.fs.Path path)
    • isFileCreatedByQuery

      public static boolean isFileCreatedByQuery(String fileName, String queryId)
    • createTemporaryPath

      public static Location createTemporaryPath(HdfsContext context, HdfsEnvironment hdfsEnvironment, org.apache.hadoop.fs.Path targetPath, String temporaryStagingDirectoryPath)
    • createDirectory

      public static void createDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, org.apache.hadoop.fs.Path path)
    • checkedDelete

      public static void checkedDelete(org.apache.hadoop.fs.FileSystem fileSystem, org.apache.hadoop.fs.Path file, boolean recursive) throws IOException
      Throws:
      IOException
    • isWritableType

      public static boolean isWritableType(HiveType hiveType)
    • getRowColumnInspectors

      public static List<org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector> getRowColumnInspectors(List<Type> types)
    • getRowColumnInspector

      public static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector getRowColumnInspector(Type type)
    • getHiveDecimal

      public static org.apache.hadoop.hive.common.type.HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position)