| Modifier and Type | Method and Description |
|---|---|
WriteEntity |
QueryPlan.getAcidAnalyzeTable() |
| Modifier and Type | Method and Description |
|---|---|
Map<LoadTableDesc,WriteEntity> |
Context.getLoadTableOutputMap() |
Map<WriteEntity,List<HiveLockObj>> |
Context.getOutputLockObjects() |
Set<WriteEntity> |
QueryPlan.getOutputs() |
| Modifier and Type | Method and Description |
|---|---|
void |
QueryPlan.setOutputs(HashSet<WriteEntity> outputs) |
| Modifier and Type | Method and Description |
|---|---|
Set<WriteEntity> |
DDLWork.getOutputs() |
| Modifier and Type | Method and Description |
|---|---|
static boolean |
DDLUtils.addIfAbsentByName(WriteEntity newWriteEntity,
DDLOperationContext context) |
static boolean |
DDLUtils.addIfAbsentByName(WriteEntity newWriteEntity,
Set<WriteEntity> outputs)
There are many places where "duplicate" Read/WriteEnity objects are added.
|
| Modifier and Type | Method and Description |
|---|---|
static void |
DDLUtils.addDbAndTableToOutputs(org.apache.hadoop.hive.metastore.api.Database database,
TableName tableName,
TableType type,
boolean isTemporary,
Map<String,String> properties,
Set<WriteEntity> outputs) |
static boolean |
DDLUtils.addIfAbsentByName(WriteEntity newWriteEntity,
Set<WriteEntity> outputs)
There are many places where "duplicate" Read/WriteEnity objects are added.
|
static void |
DDLUtils.addServiceOutput(HiveConf conf,
Set<WriteEntity> outputs) |
| Constructor and Description |
|---|
DDLWork(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
DDLDesc ddlDesc) |
DDLWork(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
DDLDesc ddlDesc,
boolean isReplication,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
DDLWork(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
DDLDesc ddlDesc,
boolean isReplication,
String dumpDirectory,
ReplicationMetricCollector metricCollector,
boolean executeInParallel) |
| Modifier and Type | Method and Description |
|---|---|
static void |
PartitionUtils.addTablePartsOutputs(Hive db,
Set<WriteEntity> outputs,
Table table,
List<Map<String,String>> partitionSpecs,
boolean allowMany,
WriteEntity.WriteType writeType)
Add the table partitions to be modified in the output, so that it is available for the pre-execution hook.
|
| Modifier and Type | Method and Description |
|---|---|
Set<WriteEntity> |
HookContext.getOutputs() |
| Modifier and Type | Method and Description |
|---|---|
void |
PostExecutePrinter.run(QueryState queryState,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LineageInfo linfo,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
PreExecutePrinter.run(QueryState queryState,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
EnforceReadOnlyTables.run(SessionState sess,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi,
boolean isExplain) |
void |
HookContext.setOutputs(Set<WriteEntity> outputs) |
| Modifier and Type | Method and Description |
|---|---|
static boolean |
AcidUtils.isExclusiveCTAS(Set<WriteEntity> outputs,
HiveConf conf) |
static List<org.apache.hadoop.hive.metastore.api.LockComponent> |
AcidUtils.makeLockComponents(Set<WriteEntity> outputs,
Set<ReadEntity> inputs,
Context.Operation operation,
HiveConf conf)
Create lock components from write/read entities.
|
| Modifier and Type | Method and Description |
|---|---|
default org.apache.hadoop.hive.metastore.api.LockType |
HiveStorageHandler.getLockType(WriteEntity writeEntity) |
| Modifier and Type | Method and Description |
|---|---|
Set<WriteEntity> |
GenMRProcContext.getOutputs()
Get the output set.
|
| Constructor and Description |
|---|
GenMRProcContext(HiveConf conf,
HashMap<Operator<? extends OperatorDesc>,Task<?>> opTaskMap,
ParseContext parseCtx,
List<Task<MoveWork>> mvTask,
List<Task<?>> rootTasks,
LinkedHashMap<Operator<? extends OperatorDesc>,GenMRProcContext.GenMapRedCtx> mapCurrCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
| Modifier and Type | Field and Description |
|---|---|
Set<WriteEntity> |
GenTezProcContext.outputs |
protected Set<WriteEntity> |
BaseSemanticAnalyzer.outputs
List of WriteEntities that are passed to the hooks.
|
| Modifier and Type | Method and Description |
|---|---|
WriteEntity |
SemanticAnalyzer.getAcidAnalyzeTable() |
WriteEntity |
BaseSemanticAnalyzer.getAcidAnalyzeTable() |
protected WriteEntity |
BaseSemanticAnalyzer.toWriteEntity(org.apache.hadoop.fs.Path location) |
static WriteEntity |
BaseSemanticAnalyzer.toWriteEntity(org.apache.hadoop.fs.Path location,
HiveConf conf) |
protected WriteEntity |
BaseSemanticAnalyzer.toWriteEntity(String location) |
| Modifier and Type | Method and Description |
|---|---|
Set<WriteEntity> |
SemanticAnalyzer.getAllOutputs() |
Set<WriteEntity> |
LoadSemanticAnalyzer.getAllOutputs() |
Set<WriteEntity> |
BaseSemanticAnalyzer.getAllOutputs() |
Set<WriteEntity> |
HiveSemanticAnalyzerHookContext.getOutputs() |
Set<WriteEntity> |
HiveSemanticAnalyzerHookContextImpl.getOutputs() |
Set<WriteEntity> |
EximUtil.SemanticAnalyzerWrapperContext.getOutputs() |
Set<WriteEntity> |
BaseSemanticAnalyzer.getOutputs() |
| Modifier and Type | Method and Description |
|---|---|
void |
TaskCompiler.compile(ParseContext pCtx,
List<Task<?>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
| Constructor and Description |
|---|
GenTezProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<?>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
SemanticAnalyzerWrapperContext(HiveConf conf,
Hive db,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<Task<?>> tasks,
org.slf4j.Logger LOG,
Context ctx) |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
HiveAuthorizationTaskFactory.createCreateRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createCreateRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createDropRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createDropRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createGrantRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createGrantRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createGrantTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createGrantTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createRevokeRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createRevokeRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createRevokeTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createRevokeTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createSetRoleTask(String roleName,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createSetRoleTask(String roleName,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowCurrentRoleTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowCurrentRoleTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<?> |
HiveAuthorizationTaskFactory.createShowGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowRoleGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowRoleGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
| Modifier and Type | Field and Description |
|---|---|
Set<WriteEntity> |
TableExport.AuthEntities.outputs |
| Modifier and Type | Method and Description |
|---|---|
Set<WriteEntity> |
MessageHandler.writeEntities() |
| Modifier and Type | Field and Description |
|---|---|
protected Set<WriteEntity> |
MoveWork.outputs
List of WriteEntities that are passed to the hooks.
|
| Modifier and Type | Method and Description |
|---|---|
Set<WriteEntity> |
ExplainWork.getOutputs() |
Set<WriteEntity> |
MoveWork.getOutputs() |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
ImportTableDesc.getCreateTableTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
HiveConf conf) |
Task<?> |
ImportTableDesc.getCreateTableTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
HiveConf conf,
boolean isReplication,
String dumpRoot,
ReplicationMetricCollector metricCollector,
boolean executeInParallel) |
void |
ExplainWork.setOutputs(Set<WriteEntity> outputs) |
void |
MoveWork.setOutputs(Set<WriteEntity> outputs) |
| Constructor and Description |
|---|
MoveWork(boolean isCTAS,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LoadTableDesc loadTableWork,
LoadFileDesc loadFileWork,
boolean checkFileFormat) |
MoveWork(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LoadTableDesc loadTableWork,
LoadFileDesc loadFileWork,
boolean checkFileFormat) |
MoveWork(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LoadTableDesc loadTableWork,
LoadFileDesc loadFileWork,
boolean checkFileFormat,
boolean srcLocal) |
MoveWork(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LoadTableDesc loadTableWork,
LoadFileDesc loadFileWork,
boolean checkFileFormat,
String dumpRoot,
ReplicationMetricCollector metricCollector,
boolean isReplication) |
| Modifier and Type | Method and Description |
|---|---|
void |
MetaStoreAuthzAPIAuthorizerEmbedOnly.authorizeDbLevelOperations(Privilege[] readRequiredPriv,
Privilege[] writeRequiredPriv,
Collection<ReadEntity> inputs,
Collection<WriteEntity> outputs) |
void |
HiveAuthorizationProvider.authorizeDbLevelOperations(Privilege[] readRequiredPriv,
Privilege[] writeRequiredPriv,
Collection<ReadEntity> inputs,
Collection<WriteEntity> outputs)
Authorization user level privileges.
|
void |
BitSetCheckedAuthorizationProvider.authorizeDbLevelOperations(Privilege[] inputRequiredPriv,
Privilege[] outputRequiredPriv,
Collection<ReadEntity> inputs,
Collection<WriteEntity> outputs) |
void |
StorageBasedAuthorizationProvider.authorizeDbLevelOperations(Privilege[] readRequiredPriv,
Privilege[] writeRequiredPriv,
Collection<ReadEntity> inputs,
Collection<WriteEntity> outputs) |
Copyright © 2024 The Apache Software Foundation. All rights reserved.