Package org.apache.druid.indexer
Class HadoopTuningConfig
- java.lang.Object
-
- org.apache.druid.indexer.HadoopTuningConfig
-
- All Implemented Interfaces:
org.apache.druid.segment.indexing.TuningConfig
public class HadoopTuningConfig extends Object implements org.apache.druid.segment.indexing.TuningConfig
-
-
Field Summary
Fields Modifier and Type Field Description static intDEFAULT_DETERMINE_PARTITIONS_SAMPLING_FACTOR-
Fields inherited from interface org.apache.druid.segment.indexing.TuningConfig
DEFAULT_APPENDABLE_INDEX, DEFAULT_AWAIT_SEGMENT_AVAILABILITY_TIMEOUT_MILLIS, DEFAULT_LOG_PARSE_EXCEPTIONS, DEFAULT_MAX_PARSE_EXCEPTIONS, DEFAULT_MAX_ROWS_IN_MEMORY_BATCH, DEFAULT_MAX_ROWS_IN_MEMORY_REALTIME, DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS, DEFAULT_SKIP_BYTES_IN_MEMORY_OVERHEAD_CHECK
-
-
Constructor Summary
Constructors Constructor Description HadoopTuningConfig(String workingPath, String version, org.apache.druid.indexer.partitions.DimensionBasedPartitionsSpec partitionsSpec, Map<Long,List<HadoopyShardSpec>> shardSpecs, org.apache.druid.segment.IndexSpec indexSpec, org.apache.druid.segment.IndexSpec indexSpecForIntermediatePersists, org.apache.druid.segment.incremental.AppendableIndexSpec appendableIndexSpec, Integer maxRowsInMemory, Long maxBytesInMemory, boolean leaveIntermediate, Boolean cleanupOnFailure, boolean overwriteFiles, Boolean ignoreInvalidRows, Map<String,String> jobProperties, boolean combineText, Boolean useCombiner, Integer maxRowsInMemoryCOMPAT, Integer numBackgroundPersistThreads, boolean forceExtendableShardSpecs, boolean useExplicitVersion, List<String> allowedHadoopPrefix, Boolean logParseExceptions, Integer maxParseExceptions, Boolean useYarnRMJobStatusFallback, Long awaitSegmentAvailabilityTimeoutMillis, Integer determinePartitionsSamplingFactor)
-
Method Summary
-
-
-
Field Detail
-
DEFAULT_DETERMINE_PARTITIONS_SAMPLING_FACTOR
public static final int DEFAULT_DETERMINE_PARTITIONS_SAMPLING_FACTOR
- See Also:
- Constant Field Values
-
-
Constructor Detail
-
HadoopTuningConfig
public HadoopTuningConfig(@Nullable String workingPath, @Nullable String version, @Nullable org.apache.druid.indexer.partitions.DimensionBasedPartitionsSpec partitionsSpec, @Nullable Map<Long,List<HadoopyShardSpec>> shardSpecs, @Nullable org.apache.druid.segment.IndexSpec indexSpec, @Nullable org.apache.druid.segment.IndexSpec indexSpecForIntermediatePersists, @Nullable org.apache.druid.segment.incremental.AppendableIndexSpec appendableIndexSpec, @Nullable Integer maxRowsInMemory, @Nullable Long maxBytesInMemory, boolean leaveIntermediate, @Nullable Boolean cleanupOnFailure, boolean overwriteFiles, @Deprecated @Nullable Boolean ignoreInvalidRows, @Nullable Map<String,String> jobProperties, boolean combineText, @Nullable Boolean useCombiner, @Nullable Integer maxRowsInMemoryCOMPAT, @Nullable Integer numBackgroundPersistThreads, boolean forceExtendableShardSpecs, boolean useExplicitVersion, @Nullable List<String> allowedHadoopPrefix, @Nullable Boolean logParseExceptions, @Nullable Integer maxParseExceptions, @Nullable Boolean useYarnRMJobStatusFallback, @Nullable Long awaitSegmentAvailabilityTimeoutMillis, @Nullable Integer determinePartitionsSamplingFactor)
-
-
Method Detail
-
makeDefaultTuningConfig
public static HadoopTuningConfig makeDefaultTuningConfig()
-
getVersion
public String getVersion()
-
getPartitionsSpec
public org.apache.druid.indexer.partitions.DimensionBasedPartitionsSpec getPartitionsSpec()
- Specified by:
getPartitionsSpecin interfaceorg.apache.druid.segment.indexing.TuningConfig
-
getShardSpecs
public Map<Long,List<HadoopyShardSpec>> getShardSpecs()
-
getIndexSpec
public org.apache.druid.segment.IndexSpec getIndexSpec()
- Specified by:
getIndexSpecin interfaceorg.apache.druid.segment.indexing.TuningConfig
-
getIndexSpecForIntermediatePersists
public org.apache.druid.segment.IndexSpec getIndexSpecForIntermediatePersists()
- Specified by:
getIndexSpecForIntermediatePersistsin interfaceorg.apache.druid.segment.indexing.TuningConfig
-
getAppendableIndexSpec
public org.apache.druid.segment.incremental.AppendableIndexSpec getAppendableIndexSpec()
- Specified by:
getAppendableIndexSpecin interfaceorg.apache.druid.segment.indexing.TuningConfig
-
getMaxRowsInMemory
public int getMaxRowsInMemory()
- Specified by:
getMaxRowsInMemoryin interfaceorg.apache.druid.segment.indexing.TuningConfig
-
getMaxBytesInMemory
public long getMaxBytesInMemory()
- Specified by:
getMaxBytesInMemoryin interfaceorg.apache.druid.segment.indexing.TuningConfig
-
isLeaveIntermediate
public boolean isLeaveIntermediate()
-
isCleanupOnFailure
public Boolean isCleanupOnFailure()
-
isOverwriteFiles
public boolean isOverwriteFiles()
-
isIgnoreInvalidRows
public Boolean isIgnoreInvalidRows()
-
isCombineText
public boolean isCombineText()
-
getUseCombiner
public boolean getUseCombiner()
-
getNumBackgroundPersistThreads
public int getNumBackgroundPersistThreads()
-
isForceExtendableShardSpecs
public boolean isForceExtendableShardSpecs()
-
isUseExplicitVersion
public boolean isUseExplicitVersion()
-
isLogParseExceptions
public boolean isLogParseExceptions()
-
getMaxParseExceptions
public int getMaxParseExceptions()
-
isUseYarnRMJobStatusFallback
public boolean isUseYarnRMJobStatusFallback()
-
getAwaitSegmentAvailabilityTimeoutMillis
public long getAwaitSegmentAvailabilityTimeoutMillis()
-
getDeterminePartitionsSamplingFactor
public int getDeterminePartitionsSamplingFactor()
-
withWorkingPath
public HadoopTuningConfig withWorkingPath(String path)
-
withVersion
public HadoopTuningConfig withVersion(String ver)
-
withShardSpecs
public HadoopTuningConfig withShardSpecs(Map<Long,List<HadoopyShardSpec>> specs)
-
-