| Modifier and Type | Method and Description |
|---|---|
Task<?> |
TaskQueue.getRunnable(int maxthreads) |
| Modifier and Type | Method and Description |
|---|---|
List<Task<?>> |
QueryPlan.getRootTasks() |
| Modifier and Type | Method and Description |
|---|---|
boolean |
TaskQueue.addToRunnable(Task<?> tsk) |
static boolean |
TaskQueue.isLaunchable(Task<?> tsk)
Checks if a task can be launched.
|
void |
TaskQueue.remove(Task<?> task) |
void |
QueryDisplay.TaskDisplay.updateStatus(Task<?> tTask) |
void |
QueryDisplay.updateTaskStatus(Task<?> tTask) |
| Modifier and Type | Method and Description |
|---|---|
void |
HookRunner.runPostAnalyzeHooks(HiveSemanticAnalyzerHookContext hookCtx,
List<Task<?>> allRootTasks) |
void |
QueryPlan.setRootTasks(List<Task<?>> rootTasks) |
| Constructor and Description |
|---|
TaskDisplay(Task task) |
| Modifier and Type | Class and Description |
|---|---|
class |
DDLTask
DDLTask implementation.
|
| Modifier and Type | Method and Description |
|---|---|
protected void |
AlterTableAddPartitionAnalyzer.postProcess(TableName tableName,
Table table,
AlterTableAddPartitionDesc desc,
Task<DDLWork> ddlTask)
Add partition for Transactional tables needs to add (copy/rename) the data so that it lands
in a delta_x_x/ folder in the partition dir.
|
protected void |
AlterViewAddPartitionAnalyzer.postProcess(TableName tableName,
Table table,
AlterTableAddPartitionDesc desc,
Task<DDLWork> ddlTask) |
| Modifier and Type | Class and Description |
|---|---|
class |
ColumnStatsUpdateTask
ColumnStatsUpdateTask implementation.
|
class |
ConditionalTask
Conditional Task implementation.
|
class |
CopyTask
CopyTask implementation.
|
class |
DependencyCollectionTask
DependencyCollectionTask.
|
class |
ExplainSQRewriteTask |
class |
ExplainTask
ExplainTask implementation.
|
class |
ExportTask |
class |
FetchTask
FetchTask implementation.
|
class |
MoveTask
MoveTask implementation.
|
class |
ReplCopyTask |
class |
ReplTxnTask
ReplTxnTask.
|
class |
StatsTask
StatsTask implementation.
|
| Modifier and Type | Field and Description |
|---|---|
protected Task<?> |
Task.backupTask |
protected Task<?> |
TaskRunner.tsk |
| Modifier and Type | Field and Description |
|---|---|
protected List<Task<?>> |
Task.backupChildrenTasks |
protected List<Task<?>> |
Task.childTasks |
protected List<Task<?>> |
Task.feedSubscribers |
protected List<Task<?>> |
Task.parentTasks |
Class<? extends Task<T>> |
TaskFactory.TaskTuple.taskClass |
| Modifier and Type | Method and Description |
|---|---|
static <T extends Serializable> |
TaskFactory.get(T work) |
static <T extends Serializable> |
TaskFactory.get(T work,
HiveConf conf) |
Task<?> |
Task.getAndInitBackupTask() |
static <T extends Serializable> |
TaskFactory.getAndMakeChild(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
Task<?> |
Task.getBackupTask() |
static Task<?> |
ReplCopyTask.getDumpCopyTask(ReplicationSpec replicationSpec,
org.apache.hadoop.fs.Path srcPath,
org.apache.hadoop.fs.Path dstPath,
HiveConf conf,
boolean readSourceAsFileList,
boolean overWrite,
boolean deleteDestination,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
static Task<?> |
ReplCopyTask.getDumpCopyTask(ReplicationSpec replicationSpec,
org.apache.hadoop.fs.Path srcPath,
org.apache.hadoop.fs.Path dstPath,
HiveConf conf,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
static Task<?> |
ReplCopyTask.getLoadCopyTask(ReplicationSpec replicationSpec,
org.apache.hadoop.fs.Path srcPath,
org.apache.hadoop.fs.Path dstPath,
HiveConf conf,
boolean isAutoPurge,
boolean needRecycle,
boolean readSourceAsFileList,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
static Task<?> |
ReplCopyTask.getLoadCopyTask(ReplicationSpec replicationSpec,
org.apache.hadoop.fs.Path srcPath,
org.apache.hadoop.fs.Path dstPath,
HiveConf conf,
boolean readSourceAsFileList,
boolean overWrite,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
static Task<?> |
ReplCopyTask.getLoadCopyTask(ReplicationSpec replicationSpec,
org.apache.hadoop.fs.Path srcPath,
org.apache.hadoop.fs.Path dstPath,
HiveConf conf,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
Task<?> |
TaskRunner.getTask() |
| Modifier and Type | Method and Description |
|---|---|
static List<Task<?>> |
Task.findLeafs(List<Task<?>> rootTasks) |
List<Task<?>> |
Task.getBackupChildrenTasks() |
List<Task<?>> |
Task.getChildTasks() |
List<Task<?>> |
ConditionalTask.getDependentTasks() |
List<Task<?>> |
Task.getDependentTasks()
The default dependent tasks are just child tasks, but different types could implement their own
(e.g.
|
List<Task<?>> |
Task.getFeedSubscribers() |
List<Task<?>> |
ConditionalTask.getListTasks() |
List<Task<?>> |
Task.getParentTasks() |
| Modifier and Type | Method and Description |
|---|---|
boolean |
ConditionalTask.addDependentTask(Task<?> dependent)
Add a dependent task on the current conditional task.
|
boolean |
Task.addDependentTask(Task<?> dependent)
Add a dependent task on the current task.
|
static <T extends Serializable> |
TaskFactory.getAndMakeChild(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType,
String cboInfo,
String cboPlan,
String optimizedSQL) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType,
String cboInfo,
String cboPlan,
String optimizedSQL,
String stageIdRearrange) |
static void |
TaskFactory.makeChild(Task<?> ret,
Task<?>... tasklist) |
static void |
TaskFactory.makeChild(Task<?> ret,
Task<?>... tasklist) |
void |
Task.removeDependentTask(Task<?> dependent)
Remove the dependent task.
|
static void |
Utilities.reworkMapRedWork(Task<?> task,
boolean reworkMapredWork,
HiveConf conf)
The check here is kind of not clean.
|
void |
Task.setBackupTask(Task<?> backupTask) |
void |
Task.subscribeFeed(Task<?> publisher)
Subscribe the feed of publisher.
|
| Modifier and Type | Method and Description |
|---|---|
static List<Task<?>> |
Task.findLeafs(List<Task<?>> rootTasks) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType,
String cboInfo,
String cboPlan,
String optimizedSQL) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType,
String cboInfo,
String cboPlan,
String optimizedSQL,
String stageIdRearrange) |
static List<ExecDriver> |
Utilities.getMRTasks(List<Task<?>> tasks) |
static int |
Utilities.getNumClusterJobs(List<Task<?>> tasks) |
static List<TezTask> |
Utilities.getTezTasks(List<Task<?>> tasks) |
static <T> void |
NodeUtils.iterateTask(Collection<Task<?>> tasks,
Class<T> clazz,
NodeUtils.Function<T> function) |
org.json.JSONObject |
ExplainTask.outputDependencies(PrintStream out,
boolean jsonOutput,
boolean appendTaskType,
List<Task> tasks) |
org.json.JSONObject |
ExplainTask.outputStagePlans(PrintStream out,
List<Task> tasks,
boolean jsonOutput,
boolean isExtended) |
void |
Task.setBackupChildrenTasks(List<Task<?>> backupChildrenTasks) |
void |
Task.setChildTasks(List<Task<?>> childTasks) |
void |
Task.setFeedSubscribers(List<Task<?>> s) |
void |
ConditionalTask.setListTasks(List<Task<?>> listTasks) |
void |
Task.setParentTasks(List<Task<?>> parentTasks) |
| Constructor and Description |
|---|
TaskRunner(Task<?> tsk,
TaskQueue taskQueue) |
| Constructor and Description |
|---|
TaskTuple(Class<T> workClass,
Class<? extends Task<T>> taskClass) |
| Modifier and Type | Class and Description |
|---|---|
class |
ExecDriver
ExecDriver is the central class in co-ordinating execution of any map-reduce task.
|
class |
MapredLocalTask
MapredLocalTask represents any local work (i.e.: client side work) that hive needs to
execute.
|
class |
MapRedTask
Extension of ExecDriver:
- can optionally spawn a map-reduce task from a separate jvm
- will make last minute adjustments to map-reduce job parameters, viz:
* estimating number of reducers
* estimating whether job should run locally
|
| Modifier and Type | Field and Description |
|---|---|
protected Task<?> |
HadoopJobExecHelper.task |
| Constructor and Description |
|---|
HadoopJobExecHelper(org.apache.hadoop.mapred.JobConf job,
SessionState.LogHelper console,
Task<?> task,
HadoopJobExecHook hookCallBack) |
| Modifier and Type | Class and Description |
|---|---|
class |
AckTask
AckTask.
|
class |
AtlasDumpTask
Atlas Metadata Replication Dump Task.
|
class |
AtlasLoadTask
Atlas Metadata Replication Load Task.
|
class |
DirCopyTask
DirCopyTask, mainly to be used to copy External table data.
|
class |
RangerDenyTask
RangerDenyTask.
|
class |
RangerDumpTask
RangerDumpTask.
|
class |
RangerLoadTask
RangerLoadTask.
|
class |
ReplDumpTask |
class |
ReplLoadTask |
class |
ReplStateLogTask
ReplStateLogTask.
|
| Modifier and Type | Method and Description |
|---|---|
static Task<?> |
ReplLoadTask.createViewTask(MetaData metaData,
String dbNameToLoadIn,
HiveConf conf,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
Task<?> |
ReplLoadWork.getRootTask() |
| Modifier and Type | Method and Description |
|---|---|
List<Task<?>> |
ReplLoadWork.externalTableCopyTasks(TaskTracker tracker,
HiveConf conf) |
List<Task<?>> |
ReplDumpWork.externalTableCopyTasks(TaskTracker tracker,
HiveConf conf) |
List<Task<?>> |
ReplDumpWork.functionsBinariesCopyTasks(TaskTracker tracker,
HiveConf conf) |
List<Task<?>> |
ReplDumpWork.managedTableCopyTasks(TaskTracker tracker,
HiveConf conf) |
| Modifier and Type | Method and Description |
|---|---|
void |
ReplLoadWork.setRootTask(Task<?> rootTask) |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
IncrementalLoadTasksBuilder.build(Context context,
Hive hive,
org.slf4j.Logger log,
TaskTracker tracker) |
| Modifier and Type | Method and Description |
|---|---|
static Task<?> |
ReplUtils.getTableCheckpointTask(ImportTableDesc tableDesc,
HashMap<String,String> partSpec,
String dumpRoot,
ReplicationMetricCollector metricCollector,
HiveConf conf) |
static Task<?> |
ReplUtils.getTableReplLogTask(ImportTableDesc tableDesc,
ReplLogger replLogger,
HiveConf conf,
ReplicationMetricCollector metricCollector,
String dumpRoot) |
| Modifier and Type | Method and Description |
|---|---|
static List<Task<?>> |
ReplUtils.addChildTask(Task<?> childTask) |
static List<Task<?>> |
ReplUtils.addTasksForLoadingColStats(org.apache.hadoop.hive.metastore.api.ColumnStatistics colStats,
HiveConf conf,
UpdatedMetaDataTracker updatedMetadata,
org.apache.hadoop.hive.metastore.api.Table tableObj,
long writeId,
String nonRecoverableMarkPath,
ReplicationMetricCollector metricCollector) |
List<Task<?>> |
TaskTracker.tasks() |
| Modifier and Type | Method and Description |
|---|---|
static List<Task<?>> |
ReplUtils.addChildTask(Task<?> childTask) |
void |
TaskTracker.addDependentTask(Task<?> dependent) |
void |
TaskTracker.addTask(Task<?> task)
this method is used to identify all the tasks in a graph.
|
void |
AddDependencyToLeaves.process(Task<?> task) |
boolean |
AddDependencyToLeaves.skipProcessing(Task<?> task) |
| Modifier and Type | Method and Description |
|---|---|
static void |
ReplUtils.addLoggerTask(ReplLogger replLogger,
List<Task<?>> tasks,
HiveConf conf)
Adds a logger task at the end of the tasks passed.
|
void |
TaskTracker.addTaskList(List<Task<?>> taskList) |
| Constructor and Description |
|---|
AddDependencyToLeaves(Task<?> postDependencyTask) |
| Constructor and Description |
|---|
AddDependencyToLeaves(List<Task<?>> postDependencyCollectionTasks) |
| Modifier and Type | Class and Description |
|---|---|
class |
ScheduledQueryMaintenanceTask
Scheduled query maintenance task.
|
| Modifier and Type | Class and Description |
|---|---|
class |
TezTask
TezTask handles the execution of TezWork.
|
| Modifier and Type | Method and Description |
|---|---|
void |
DAGTraversal.Function.process(Task<?> task) |
boolean |
DAGTraversal.Function.skipProcessing(Task<?> task) |
| Modifier and Type | Method and Description |
|---|---|
static void |
DAGTraversal.traverse(List<Task<?>> tasks,
DAGTraversal.Function function) |
| Modifier and Type | Method and Description |
|---|---|
void |
HiveHistory.endTask(String queryId,
Task<?> task)
Called at the end of a task.
|
void |
HiveHistoryImpl.endTask(String queryId,
Task<?> task) |
void |
HiveHistory.progressTask(String queryId,
Task<?> task)
Logs progress of a task if ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS is
set to true
|
void |
HiveHistoryImpl.progressTask(String queryId,
Task<?> task) |
void |
HiveHistory.startTask(String queryId,
Task<?> task,
String taskName)
Called at the start of a task.
|
void |
HiveHistoryImpl.startTask(String queryId,
Task<?> task,
String taskName) |
| Modifier and Type | Method and Description |
|---|---|
void |
AccurateEstimatesCheckerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<?>> rootTasks) |
| Modifier and Type | Class and Description |
|---|---|
class |
MergeFileTask
Task for fast merging of ORC and RC files.
|
| Modifier and Type | Class and Description |
|---|---|
class |
ColumnTruncateTask |
| Modifier and Type | Method and Description |
|---|---|
static Task<MoveWork> |
GenMapRedUtils.findMoveTaskForFsopOutput(List<Task<MoveWork>> mvTasks,
org.apache.hadoop.fs.Path fsopFinalDir,
boolean isMmFsop,
boolean isDirectInsert,
String fsoMoveTaskId,
AcidUtils.Operation acidOperation) |
Task<?> |
GenMRProcContext.getCurrTask() |
Task<?> |
GenMRProcContext.GenMapRedCtx.getCurrTask() |
Task<?> |
GenMRProcContext.GenMRUnionCtx.getUTask() |
| Modifier and Type | Method and Description |
|---|---|
Map<FileSinkDesc,Task<?>> |
GenMRProcContext.getLinkedFileDescTasks() |
List<Task<MoveWork>> |
GenMRProcContext.getMvTask() |
HashMap<Operator<? extends OperatorDesc>,Task<?>> |
GenMRProcContext.getOpTaskMap() |
List<Task<?>> |
GenMRProcContext.getRootTasks() |
| Modifier and Type | Method and Description |
|---|---|
static void |
GenMapRedUtils.addDependentMoveTasks(Task<MoveWork> mvTask,
HiveConf hconf,
Task<?> parentTask,
DependencyCollectionTask dependencyTask)
Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask.
|
static void |
GenMapRedUtils.addDependentMoveTasks(Task<MoveWork> mvTask,
HiveConf hconf,
Task<?> parentTask,
DependencyCollectionTask dependencyTask)
Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask.
|
boolean |
GenMRProcContext.addRootIfPossible(Task<?> task) |
void |
GenMRProcContext.addSeenOp(Task task,
Operator operator) |
static void |
GenMapRedUtils.addStatsTask(FileSinkOperator nd,
MoveTask mvTask,
Task<?> currTask,
HiveConf hconf)
Add the StatsTask as a dependent task of the MoveTask
because StatsTask will change the Table/Partition metadata.
|
static org.apache.hadoop.fs.Path |
GenMapRedUtils.createMoveTask(Task<?> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks
|
static void |
GenMapRedUtils.createMRWorkForMergingFiles(FileSinkOperator fsInput,
org.apache.hadoop.fs.Path finalName,
DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks,
HiveConf conf,
Task<?> currTask,
LineageState lineageState) |
static void |
GenMapRedUtils.initUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<?> currTask,
boolean local) |
static void |
GenMapRedUtils.initUnionPlan(ReduceSinkOperator op,
UnionOperator currUnionOp,
GenMRProcContext opProcCtx,
Task<?> unionTask)
Initialize the current union plan.
|
static boolean |
GenMapRedUtils.isMergeRequired(List<Task<MoveWork>> mvTasks,
HiveConf hconf,
FileSinkOperator fsOp,
Task<?> currTask,
boolean isInsertTable)
Returns true iff the fsOp requires a merge
|
boolean |
GenMRProcContext.isSeenOp(Task task,
Operator operator) |
static void |
GenMapRedUtils.joinPlan(Task<?> currTask,
Task<?> oldTask,
GenMRProcContext opProcCtx)
Merge the current task into the old task for the reducer
|
static void |
GenMapRedUtils.joinPlan(Task<?> currTask,
Task<?> oldTask,
GenMRProcContext opProcCtx)
Merge the current task into the old task for the reducer
|
static void |
GenMapRedUtils.joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<?> currentUnionTask,
Task<?> existingTask,
boolean local) |
static void |
GenMapRedUtils.joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<?> currentUnionTask,
Task<?> existingTask,
boolean local) |
void |
GenMRProcContext.setCurrTask(Task<?> currTask) |
static void |
GenMapRedUtils.setKeyAndValueDescForTaskTree(Task<?> task)
Set the key and value description for all the tasks rooted at the given
task.
|
static void |
GenMapRedUtils.setTaskPlan(String alias_id,
TableScanOperator topOp,
Task<?> task,
boolean local,
GenMRProcContext opProcCtx)
set the current task in the mapredWork.
|
static void |
GenMapRedUtils.setTaskPlan(String alias_id,
TableScanOperator topOp,
Task<?> task,
boolean local,
GenMRProcContext opProcCtx,
PrunedPartitionList pList)
set the current task in the mapredWork.
|
| Modifier and Type | Method and Description |
|---|---|
static org.apache.hadoop.fs.Path |
GenMapRedUtils.createMoveTask(Task<?> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks
|
static void |
GenMapRedUtils.createMRWorkForMergingFiles(FileSinkOperator fsInput,
org.apache.hadoop.fs.Path finalName,
DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks,
HiveConf conf,
Task<?> currTask,
LineageState lineageState) |
static void |
GenMapRedUtils.finalMapWorkChores(List<Task<?>> tasks,
org.apache.hadoop.conf.Configuration conf,
com.google.common.collect.Interner<TableDesc> interner)
Called at the end of TaskCompiler::compile
This currently does the following for each map work
1.
|
static Task<MoveWork> |
GenMapRedUtils.findMoveTaskForFsopOutput(List<Task<MoveWork>> mvTasks,
org.apache.hadoop.fs.Path fsopFinalDir,
boolean isMmFsop,
boolean isDirectInsert,
String fsoMoveTaskId,
AcidUtils.Operation acidOperation) |
static boolean |
GenMapRedUtils.isMergeRequired(List<Task<MoveWork>> mvTasks,
HiveConf hconf,
FileSinkOperator fsOp,
Task<?> currTask,
boolean isInsertTable)
Returns true iff the fsOp requires a merge
|
void |
GenMRProcContext.setLinkedFileDescTasks(Map<FileSinkDesc,Task<?>> linkedFileDescTasks) |
void |
GenMRProcContext.setMvTask(List<Task<MoveWork>> mvTask) |
void |
GenMRProcContext.setOpTaskMap(HashMap<Operator<? extends OperatorDesc>,Task<?>> opTaskMap) |
void |
GenMRProcContext.setRootTasks(List<Task<?>> rootTasks) |
| Constructor and Description |
|---|
GenMapRedCtx(Task<?> currTask,
String currAliasId) |
GenMRUnionCtx(Task<?> uTask) |
| Modifier and Type | Field and Description |
|---|---|
protected Task<?> |
PhysicalContext.fetchTask |
| Modifier and Type | Field and Description |
|---|---|
protected List<Task<?>> |
PhysicalContext.rootTasks |
protected Set<Task<?>> |
StageIDsRearranger.TaskTraverse.traversed |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
MapJoinResolver.LocalMapJoinProcCtx.getCurrentTask() |
Task<?> |
SkewJoinResolver.SkewJoinProcCtx.getCurrentTask() |
Task<?> |
PhysicalContext.getFetchTask() |
abstract Task<?> |
AbstractJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Task<?> |
CommonJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Task<?> |
SortMergeJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
| Modifier and Type | Method and Description |
|---|---|
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.getChildTasks(Task<?> task) |
static List<Task> |
StageIDsRearranger.getExplainOrder(List<Task<?>> tasks,
String stageIdRearrange) |
static List<Task> |
StageIDsRearranger.getFetchSources(List<Task<?>> tasks) |
List<Task<?>> |
PhysicalContext.getRootTasks() |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.next(Task<?> task) |
| Modifier and Type | Method and Description |
|---|---|
protected void |
StageIDsRearranger.TaskTraverse.accepted(Task<?> task) |
void |
PhysicalContext.addToRootTask(Task<?> tsk) |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.getChildTasks(Task<?> task) |
protected boolean |
StageIDsRearranger.TaskTraverse.isReady(Task<?> task) |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.next(Task<?> task) |
static void |
GenMRSkewJoinProcessor.processSkewJoin(JoinOperator joinOp,
Task<?> currTask,
ParseContext parseCtx)
Create tasks for processing skew joins.
|
protected void |
StageIDsRearranger.TaskTraverse.rejected(Task<?> child) |
void |
PhysicalContext.removeFromRootTask(Task<?> tsk) |
protected void |
AbstractJoinTaskDispatcher.replaceTask(Task<?> currTask,
Task<?> newTask) |
protected void |
AbstractJoinTaskDispatcher.replaceTask(Task<?> currTask,
Task<?> newTask) |
protected void |
AbstractJoinTaskDispatcher.replaceTaskWithConditionalTask(Task<?> currTask,
ConditionalTask cndTsk) |
void |
MapJoinResolver.LocalMapJoinProcCtx.setCurrentTask(Task<?> currentTask) |
void |
SkewJoinResolver.SkewJoinProcCtx.setCurrentTask(Task<?> currentTask) |
void |
PhysicalContext.setFetchTask(Task<?> fetchTask) |
void |
StageIDsRearranger.TaskTraverse.traverse(Task<?> task) |
| Modifier and Type | Method and Description |
|---|---|
static List<Task> |
StageIDsRearranger.getExplainOrder(List<Task<?>> tasks,
String stageIdRearrange) |
static List<Task> |
StageIDsRearranger.getFetchSources(List<Task<?>> tasks) |
void |
PhysicalContext.setRootTasks(List<Task<?>> rootTasks) |
| Constructor and Description |
|---|
LocalMapJoinProcCtx(Task<?> task,
ParseContext parseCtx) |
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<?>> rootTasks,
Task<?> fetchTask) |
SkewJoinProcCtx(Task<?> task,
ParseContext parseCtx) |
| Constructor and Description |
|---|
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<?>> rootTasks,
Task<?> fetchTask) |
| Modifier and Type | Field and Description |
|---|---|
List<Task<MoveWork>> |
GenTezProcContext.moveTask |
List<Task<?>> |
GenTezProcContext.rootTasks |
protected List<Task<?>> |
BaseSemanticAnalyzer.rootTasks |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
EximUtil.SemanticAnalyzerWrapperContext.getOpenTxnTask() |
| Modifier and Type | Method and Description |
|---|---|
List<Task<?>> |
SemanticAnalyzer.getAllRootTasks() |
List<Task<?>> |
BaseSemanticAnalyzer.getAllRootTasks() |
List<Task<?>> |
BaseSemanticAnalyzer.getRootTasks() |
List<Task<?>> |
EximUtil.SemanticAnalyzerWrapperContext.getTasks() |
| Modifier and Type | Method and Description |
|---|---|
void |
ParseContext.replaceRootTask(Task<?> rootTask,
List<? extends Task<?>> tasks) |
protected abstract void |
TaskCompiler.setInputFormat(Task<?> rootTask) |
protected void |
TezCompiler.setInputFormat(Task<?> task) |
protected void |
MapReduceCompiler.setInputFormat(Task<?> task) |
void |
EximUtil.SemanticAnalyzerWrapperContext.setOpenTxnTask(Task<?> openTxnTask) |
| Modifier and Type | Method and Description |
|---|---|
void |
TaskCompiler.compile(ParseContext pCtx,
List<Task<?>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.decideExecMode(List<Task<?>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
TezCompiler.decideExecMode(List<Task<?>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
MapReduceCompiler.decideExecMode(List<Task<?>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<?>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
ParseContext |
TaskCompiler.getParseContext(ParseContext pCtx,
List<Task<?>> rootTasks)
Create a clone of the parse context
|
protected abstract void |
TaskCompiler.optimizeTaskPlan(List<Task<?>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
TezCompiler.optimizeTaskPlan(List<Task<?>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
MapReduceCompiler.optimizeTaskPlan(List<Task<?>> rootTasks,
ParseContext pCtx,
Context ctx) |
void |
HiveSemanticAnalyzerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<?>> rootTasks)
Invoked after Hive performs its own semantic analysis on a
statement (including optimization).
|
void |
AbstractSemanticAnalyzerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<?>> rootTasks) |
void |
ParseContext.replaceRootTask(Task<?> rootTask,
List<? extends Task<?>> tasks) |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
HiveAuthorizationTaskFactory.createCreateRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createCreateRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createDropRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createDropRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createGrantRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createGrantRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createGrantTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createGrantTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createRevokeRoleTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createRevokeRoleTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createRevokeTask(ASTNode node,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createRevokeTask(ASTNode ast,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createSetRoleTask(String roleName,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createSetRoleTask(String roleName,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowCurrentRoleTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowCurrentRoleTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<?> |
HiveAuthorizationTaskFactory.createShowGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowRoleGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowRoleGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactory.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Task<?> |
HiveAuthorizationTaskFactoryImpl.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
| Modifier and Type | Field and Description |
|---|---|
Task<?> |
MessageHandler.Context.precursor |
| Modifier and Type | Method and Description |
|---|---|
List<Task<?>> |
AddCheckConstraintHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
CommitTxnHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DeleteTableColStatHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
OpenTxnHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AddUniqueConstraintHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AllocWriteIdHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AddDefaultConstraintHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DropDatabaseHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DropTableHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
UpdatePartColStatHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
TruncatePartitionHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
RenamePartitionHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
RenameTableHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
CreateFunctionHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DeletePartColStatHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AddPrimaryKeyHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DropConstraintHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AlterDatabaseHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DropPartitionHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
UpdateTableColStatHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
CreateDatabaseHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AddNotNullConstraintHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
MessageHandler.handle(MessageHandler.Context withinContext) |
List<Task<?>> |
TableHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
AbortTxnHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
TruncateTableHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DefaultHandler.handle(MessageHandler.Context withinContext) |
List<Task<?>> |
AddForeignKeyHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
DropFunctionHandler.handle(MessageHandler.Context context) |
List<Task<?>> |
InsertHandler.handle(MessageHandler.Context withinContext) |
| Constructor and Description |
|---|
Context(String dbName,
String location,
Task<?> precursor,
DumpMetaData dmd,
HiveConf hiveConf,
Hive db,
Context nestedContext,
org.slf4j.Logger log) |
Context(String dbName,
String location,
Task<?> precursor,
DumpMetaData dmd,
HiveConf hiveConf,
Hive db,
Context nestedContext,
org.slf4j.Logger log,
String dumpDirectory,
ReplicationMetricCollector metricCollector) |
| Modifier and Type | Method and Description |
|---|---|
Task<?> |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.getCommonJoinTask() |
Task<?> |
ImportTableDesc.getCreateTableTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
HiveConf conf) |
Task<?> |
ImportTableDesc.getCreateTableTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
HiveConf conf,
boolean isReplication,
String dumpRoot,
ReplicationMetricCollector metricCollector,
boolean executeInParallel) |
Task<?> |
ExplainWork.getFetchTask() |
Task |
BasicStatsWork.getSourceTask() |
Task |
StatsWork.getSourceTask() |
protected Task<?> |
ConditionalResolverCommonJoin.resolveMapJoinTask(ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx ctx,
HiveConf conf) |
| Modifier and Type | Method and Description |
|---|---|
HashMap<org.apache.hadoop.fs.Path,Task<?>> |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.getDirToTaskMap() |
List<Task<?>> |
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx.getListTasks() |
List<Task<?>> |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.getNoSkewTask() |
List<Task<?>> |
ExplainWork.getRootTasks() |
List<Task<?>> |
ConditionalResolver.getTasks(HiveConf conf,
Object ctx)
All conditional resolvers implement this interface.
|
List<Task<?>> |
ConditionalResolverMergeFiles.getTasks(HiveConf conf,
Object objCtx) |
List<Task<?>> |
ConditionalResolverSkewJoin.getTasks(HiveConf conf,
Object objCtx) |
List<Task<?>> |
ConditionalResolverCommonJoin.getTasks(HiveConf conf,
Object objCtx) |
HashMap<Task<?>,Set<String>> |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.getTaskToAliases() |
| Modifier and Type | Method and Description |
|---|---|
void |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.setCommonJoinTask(Task<?> commonJoinTask) |
void |
ExplainWork.setFetchTask(Task<?> fetchTask) |
void |
BasicStatsWork.setSourceTask(Task sourceTask) |
void |
StatsWork.setSourceTask(Task<?> sourceTask) |
| Modifier and Type | Method and Description |
|---|---|
void |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.setDirToTaskMap(HashMap<org.apache.hadoop.fs.Path,Task<?>> dirToTaskMap) |
void |
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx.setListTasks(List<Task<?>> listTasks) |
void |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.setNoSkewTask(List<Task<?>> noSkewTask) |
void |
ExplainWork.setRootTasks(List<Task<?>> rootTasks) |
void |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.setTaskToAliases(HashMap<Task<?>,Set<String>> taskToAliases) |
| Constructor and Description |
|---|
ExplainWork(org.apache.hadoop.fs.Path resFile,
ParseContext pCtx,
List<Task<?>> rootTasks,
Task<?> fetchTask,
ASTNode astTree,
BaseSemanticAnalyzer analyzer,
ExplainConfiguration config,
String cboInfo,
String optimizedSQL,
String cboPlan) |
| Constructor and Description |
|---|
ConditionalResolverMergeFilesCtx(List<Task<?>> listTasks,
String dir) |
ConditionalResolverSkewJoinCtx(HashMap<org.apache.hadoop.fs.Path,Task<?>> dirToTaskMap,
List<Task<?>> noSkewTask) |
ConditionalResolverSkewJoinCtx(HashMap<org.apache.hadoop.fs.Path,Task<?>> dirToTaskMap,
List<Task<?>> noSkewTask) |
ExplainWork(org.apache.hadoop.fs.Path resFile,
ParseContext pCtx,
List<Task<?>> rootTasks,
Task<?> fetchTask,
ASTNode astTree,
BaseSemanticAnalyzer analyzer,
ExplainConfiguration config,
String cboInfo,
String optimizedSQL,
String cboPlan) |
| Modifier and Type | Method and Description |
|---|---|
Task |
StatsCollectionContext.getTask() |
| Modifier and Type | Method and Description |
|---|---|
void |
StatsCollectionContext.setTask(Task task) |
Copyright © 2024 The Apache Software Foundation. All rights reserved.