Packages

class SQLConf extends Serializable with Logging

A class that enables the setting and getting of mutable config parameters/hints.

In the presence of a SQLContext, these can be set and queried by passing SET commands into Spark SQL's query functions (i.e. sql()). Otherwise, users of this class can modify the hints by programmatically calling the setters and getters of this class.

SQLConf is thread-safe (internally synchronized, so safe to be used in multiple threads).

Linear Supertypes
Logging, Serializable, Serializable, AnyRef, Any
Known Subclasses
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. SQLConf
  2. Logging
  3. Serializable
  4. Serializable
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SQLConf()

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def adaptiveExecutionEnabled: Boolean
  5. def adaptiveExecutionLogLevel: String
  6. def addSingleFileInAddFile: Boolean
  7. def advancedPartitionPredicatePushdownEnabled: Boolean
  8. def allowAutoGeneratedAliasForView: Boolean
  9. def allowNegativeScaleOfDecimalEnabled: Boolean
  10. def allowNonEmptyLocationInCTAS: Boolean
  11. def allowStarWithSingleTableIdentifierInCount: Boolean
  12. def allowsTempViewCreationWithMultipleNameparts: Boolean
  13. def analyzerMaxIterations: Int

    ************************ Spark SQL Params/Hints *******************

  14. def ansiEnabled: Boolean
  15. def ansiRelationPrecedence: Boolean
  16. def arrowLocalRelationThreshold: Long
  17. def arrowMaxRecordsPerBatch: Int
  18. def arrowPySparkEnabled: Boolean
  19. def arrowPySparkFallbackEnabled: Boolean
  20. def arrowPySparkSelfDestructEnabled: Boolean
  21. def arrowSafeTypeConversion: Boolean
  22. def arrowSparkREnabled: Boolean
  23. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  24. def autoBroadcastJoinThreshold: Long
  25. def autoBucketedScanEnabled: Boolean
  26. def autoSizeUpdateEnabled: Boolean
  27. def avroCompressionCodec: String
  28. def avroDeflateLevel: Int
  29. def avroFilterPushDown: Boolean
  30. def broadcastHashJoinOutputPartitioningExpandLimit: Int
  31. def broadcastTimeout: Long
  32. def bucketingEnabled: Boolean
  33. def bucketingMaxBuckets: Int
  34. def cacheVectorizedReaderEnabled: Boolean
  35. def cartesianProductExecBufferInMemoryThreshold: Int
  36. def cartesianProductExecBufferSpillThreshold: Int
  37. def caseSensitiveAnalysis: Boolean
  38. def caseSensitiveInferenceMode: SQLConf.HiveCaseSensitiveInferenceMode.Value
  39. def castDatetimeToString: Boolean
  40. def cboEnabled: Boolean
  41. def charVarcharAsString: Boolean
  42. def checkpointLocation: Option[String]
  43. def checkpointRenamedFileCheck: Boolean
  44. def clear(): Unit
  45. def cliPrintHeader: Boolean
  46. def clone(): SQLConf
    Definition Classes
    SQLConf → AnyRef
  47. def coalesceBucketsInJoinEnabled: Boolean
  48. def coalesceBucketsInJoinMaxBucketRatio: Int
  49. def coalesceShufflePartitionsEnabled: Boolean
  50. def codegenCacheMaxEntries: Int
  51. def codegenComments: Boolean
  52. def codegenFallback: Boolean
  53. def codegenSplitAggregateFunc: Boolean
  54. def columnBatchSize: Int
  55. def columnNameOfCorruptRecord: String
  56. def concatBinaryAsString: Boolean
  57. def constraintPropagationEnabled: Boolean
  58. def contains(key: String): Boolean

    Return whether a given key is set in this SQLConf.

  59. def continuousStreamingEpochBacklogQueueSize: Int
  60. def continuousStreamingExecutorPollIntervalMs: Long
  61. def continuousStreamingExecutorQueueSize: Int
  62. def convertCTAS: Boolean
  63. def copy(entries: (ConfigEntry[_], Any)*): SQLConf
  64. def crossJoinEnabled: Boolean
  65. def csvColumnPruning: Boolean
  66. def csvEnableDateTimeParsingFallback: Option[Boolean]
  67. def csvExpressionOptimization: Boolean
  68. def csvFilterPushDown: Boolean
  69. def dataFramePivotMaxValues: Int
  70. def dataFrameRetainGroupColumns: Boolean
  71. def dataFrameSelfJoinAutoResolveAmbiguity: Boolean
  72. def datetimeJava8ApiEnabled: Boolean
  73. def decimalOperationsAllowPrecisionLoss: Boolean
  74. def decorrelateInnerQueryEnabled: Boolean
  75. def defaultColumnAllowedProviders: String
  76. def defaultDataSourceName: String
  77. def defaultDatabase: String
  78. def defaultNumShufflePartitions: Int
  79. def defaultSizeInBytes: Long
  80. def disabledJdbcConnectionProviders: String
  81. def disabledV2StreamingMicroBatchReaders: String
  82. def disabledV2StreamingWriters: String
  83. def doubleQuotedIdentifiers: Boolean
  84. def dynamicPartitionPruningEnabled: Boolean
  85. def dynamicPartitionPruningFallbackFilterRatio: Double
  86. def dynamicPartitionPruningReuseBroadcastOnly: Boolean
  87. def dynamicPartitionPruningUseStats: Boolean
  88. def eltOutputAsString: Boolean
  89. def enableDefaultColumns: Boolean
  90. def enableRadixSort: Boolean
  91. def enableTwoLevelAggMap: Boolean
  92. def enableVectorizedHashMap: Boolean
  93. def enforceReservedKeywords: Boolean
  94. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  95. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  96. def errorMessageFormat: ErrorMessageFormat.Value
  97. def escapedStringLiterals: Boolean
  98. def exchangeReuseEnabled: Boolean
  99. def exponentLiteralAsDecimalEnabled: Boolean
  100. def fallBackToHdfsForStatsEnabled: Boolean
  101. def fastFailFileFormatOutput: Boolean
  102. def fastHashAggregateRowMaxCapacityBit: Int
  103. def fetchShuffleBlocksInBatch: Boolean
  104. def fileCommitProtocolClass: String
  105. def fileCompressionFactor: Double
  106. def fileSinkLogCleanupDelay: Long
  107. def fileSinkLogCompactInterval: Int
  108. def fileSinkLogDeletion: Boolean
  109. def fileSourceLogCleanupDelay: Long
  110. def fileSourceLogCompactInterval: Int
  111. def fileSourceLogDeletion: Boolean
  112. def fileStreamSinkMetadataIgnored: Boolean
  113. def filesMaxPartitionBytes: Long
  114. def filesMinPartitionNum: Option[Int]
  115. def filesOpenCostInBytes: Long
  116. def filesourcePartitionFileCacheSize: Long
  117. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  118. def gatherFastStats: Boolean
  119. def getAllConfs: Map[String, String]

    Return all the configuration properties that have been set (i.e.

    Return all the configuration properties that have been set (i.e. not the default). This creates a new copy of the config properties in the form of a Map.

  120. def getAllDefinedConfs: Seq[(String, String, String, String)]

    Return all the configuration definitions that have been defined in SQLConf.

    Return all the configuration definitions that have been defined in SQLConf. Each definition contains key, defaultValue and doc.

  121. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  122. def getConf[T](entry: OptionalConfigEntry[T]): Option[T]

    Return the value of an optional Spark SQL configuration property for the given key.

    Return the value of an optional Spark SQL configuration property for the given key. If the key is not set yet, returns None.

  123. def getConf[T](entry: ConfigEntry[T]): T

    Return the value of Spark SQL configuration property for the given key.

    Return the value of Spark SQL configuration property for the given key. If the key is not set yet, return defaultValue in ConfigEntry.

  124. def getConf[T](entry: ConfigEntry[T], defaultValue: T): T

    Return the value of Spark SQL configuration property for the given key.

    Return the value of Spark SQL configuration property for the given key. If the key is not set yet, return defaultValue. This is useful when defaultValue in ConfigEntry is not the desired one.

  125. def getConfString(key: String, defaultValue: String): String

    Return the string value of Spark SQL configuration property for the given key.

    Return the string value of Spark SQL configuration property for the given key. If the key is not set yet, return defaultValue.

  126. def getConfString(key: String): String

    Return the value of Spark SQL configuration property for the given key.

    Return the value of Spark SQL configuration property for the given key.

    Annotations
    @throws( "if key is not set" )
  127. def groupByAliases: Boolean
  128. def groupByOrdinal: Boolean
  129. def groupingIdWithAppendedUserGroupByEnabled: Boolean
  130. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  131. def hintErrorHandler: HintErrorHandler

    Returns the error handler for handling hint errors.

  132. def histogramEnabled: Boolean
  133. def histogramNumBins: Int
  134. def histogramNumericPropagateInputType: Boolean
  135. def hiveThriftServerSingleSession: Boolean
  136. def hugeMethodLimit: Int
  137. def ignoreCorruptFiles: Boolean
  138. def ignoreDataLocality: Boolean
  139. def ignoreMissingFiles: Boolean
  140. def ignoreMissingParquetFieldId: Boolean
  141. def inMemoryPartitionPruning: Boolean
  142. def inMemoryTableScanStatisticsEnabled: Boolean
  143. def inferDictAsStruct: Boolean
  144. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  145. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  146. def integerGroupingIdEnabled: Boolean
  147. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  148. def isModifiable(key: String): Boolean
  149. def isOrcSchemaMergingEnabled: Boolean
  150. def isParquetBinaryAsString: Boolean
  151. def isParquetINT96AsTimestamp: Boolean
  152. def isParquetINT96TimestampConversion: Boolean
  153. def isParquetSchemaMergingEnabled: Boolean
  154. def isParquetSchemaRespectSummaries: Boolean
  155. def isReplEagerEvalEnabled: Boolean
  156. def isStateSchemaCheckEnabled: Boolean
  157. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  158. def isUnsupportedOperationCheckEnabled: Boolean
  159. def joinReorderCardWeight: Double
  160. def joinReorderDPStarFilter: Boolean
  161. def joinReorderDPThreshold: Int
  162. def joinReorderEnabled: Boolean
  163. def jsonEnableDateTimeParsingFallback: Option[Boolean]
  164. def jsonEnablePartialResults: Boolean
  165. def jsonExpressionOptimization: Boolean
  166. def jsonFilterPushDown: Boolean
  167. def jsonGeneratorIgnoreNullFields: Boolean
  168. def jsonWriteNullIfWithDefaultValue: Boolean
  169. def legacyInferArrayTypeFromFirstElement: Boolean
  170. def legacyIntervalEnabled: Boolean
  171. def legacyMsSqlServerNumericMappingEnabled: Boolean
  172. def legacyNegativeIndexInArrayInsert: Boolean
  173. def legacyParquetNanosAsLong: Boolean
  174. def legacyPathOptionBehavior: Boolean
  175. def legacySizeOfNull: Boolean
  176. def legacyStatisticalAggregate: Boolean
  177. def legacyTimeParserPolicy: SQLConf.LegacyBehaviorPolicy.Value
  178. def limitInitialNumPartitions: Int
  179. def limitScaleUpFactor: Int
  180. def literalPickMinimumPrecision: Boolean
  181. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  182. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  183. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  184. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  185. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  186. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  187. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  188. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  189. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  190. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  191. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  192. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  193. def loggingMaxLinesForCodegen: Int
  194. def manageFilesourcePartitions: Boolean
  195. def maxBatchesToRetainInMemory: Int
  196. def maxConcurrentOutputFileWriters: Int
  197. def maxMetadataStringLength: Int
  198. def maxNestedViewDepth: Int
  199. def maxPlanStringLength: Int
  200. def maxRecordsPerFile: Long
  201. def maxToStringFields: Int
  202. def metadataCacheTTL: Long
  203. def metastoreDropPartitionsByName: Boolean
  204. def metastorePartitionPruning: Boolean
  205. def metastorePartitionPruningFallbackOnException: Boolean
  206. def metastorePartitionPruningFastFallback: Boolean
  207. def metastorePartitionPruningInSetThreshold: Int
  208. def methodSplitThreshold: Int
  209. def minBatchesToRetain: Int
  210. def nameNonStructGroupingKeyAsValue: Boolean
  211. def ndvMaxError: Double
  212. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  213. def nestedPruningOnExpressions: Boolean
  214. def nestedSchemaPruningEnabled: Boolean
  215. def nonEmptyPartitionRatioForBroadcastJoin: Double
  216. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  217. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  218. def numShufflePartitions: Int
  219. def objectAggSortBasedFallbackThreshold: Int
  220. def offHeapColumnVectorEnabled: Boolean
  221. def optimizeNullAwareAntiJoin: Boolean
  222. def optimizerExcludedRules: Option[String]
  223. def optimizerInSetConversionThreshold: Int
  224. def optimizerInSetSwitchThreshold: Int
  225. def optimizerMaxIterations: Int
  226. def optimizerMetadataOnly: Boolean
  227. def orcAggregatePushDown: Boolean
  228. def orcCompressionCodec: String
  229. def orcFilterPushDown: Boolean
  230. def orcVectorizedReaderBatchSize: Int
  231. def orcVectorizedReaderEnabled: Boolean
  232. def orcVectorizedReaderNestedColumnEnabled: Boolean
  233. def orcVectorizedWriterBatchSize: Int
  234. def orderByOrdinal: Boolean
  235. def pandasGroupedMapAssignColumnsByName: Boolean
  236. def pandasUDFBufferSize: Int
  237. def parallelFileListingInStatsComputation: Boolean
  238. def parallelPartitionDiscoveryParallelism: Int
  239. def parallelPartitionDiscoveryThreshold: Int
  240. def parquetAggregatePushDown: Boolean
  241. def parquetCompressionCodec: String
  242. def parquetFieldIdReadEnabled: Boolean
  243. def parquetFieldIdWriteEnabled: Boolean
  244. def parquetFilterPushDown: Boolean
  245. def parquetFilterPushDownDate: Boolean
  246. def parquetFilterPushDownDecimal: Boolean
  247. def parquetFilterPushDownInFilterThreshold: Int
  248. def parquetFilterPushDownStringPredicate: Boolean
  249. def parquetFilterPushDownTimestamp: Boolean
  250. def parquetInferTimestampNTZEnabled: Boolean
  251. def parquetOutputCommitterClass: String
  252. def parquetOutputTimestampType: SQLConf.ParquetOutputTimestampType.Value
  253. def parquetRecordFilterEnabled: Boolean
  254. def parquetVectorizedReaderBatchSize: Int
  255. def parquetVectorizedReaderEnabled: Boolean
  256. def parquetVectorizedReaderNestedColumnEnabled: Boolean
  257. def partitionColumnTypeInferenceEnabled: Boolean
  258. def partitionOverwriteMode: SQLConf.PartitionOverwriteMode.Value
  259. def percentileAccuracy: Int
  260. def planChangeBatches: Option[String]
  261. def planChangeLogLevel: String
  262. def planChangeRules: Option[String]
  263. def planStatsEnabled: Boolean
  264. def plannedWriteEnabled: Boolean
  265. def preferSortMergeJoin: Boolean
  266. def pysparkJVMStacktraceEnabled: Boolean
  267. def pysparkSimplifiedTraceback: Boolean
  268. def rangeExchangeSampleSizePerPartition: Int
  269. def readSideCharPadding: Boolean
  270. val reader: ConfigReader
    Attributes
    protected
  271. def redactOptions[K, V](options: Seq[(K, V)]): Seq[(K, V)]

    Redacts the given option map according to the description of SQL_OPTIONS_REDACTION_PATTERN.

  272. def redactOptions[K, V](options: Map[K, V]): Map[K, V]

    Redacts the given option map according to the description of SQL_OPTIONS_REDACTION_PATTERN.

  273. def replEagerEvalMaxNumRows: Int
  274. def replEagerEvalTruncate: Int
  275. def replaceDatabricksSparkAvroEnabled: Boolean
  276. def replaceExceptWithFilter: Boolean
  277. def resolver: Resolver

    Returns the Resolver for the current configuration, which can be used to determine if two identifiers are equal.

  278. def runSQLonFile: Boolean
  279. def runtimeFilterBloomFilterEnabled: Boolean
  280. def runtimeFilterCreationSideThreshold: Long
  281. def runtimeFilterSemiJoinReductionEnabled: Boolean
  282. def runtimeRowLevelOperationGroupFilterEnabled: Boolean
  283. def serializerNestedSchemaPruningEnabled: Boolean
  284. def sessionLocalTimeZone: String
  285. def sessionWindowBufferInMemoryThreshold: Int
  286. def sessionWindowBufferSpillThreshold: Int
  287. def setCommandRejectsSparkCoreConfs: Boolean
  288. def setConf[T](entry: ConfigEntry[T], value: T): Unit

    Set the given Spark SQL configuration property.

  289. def setConf(props: Properties): Unit

    Set Spark SQL configuration properties.

  290. def setConfString(key: String, value: String): Unit

    Set the given Spark SQL configuration property using a string value.

  291. def setConfWithCheck(key: String, value: String): Unit
    Attributes
    protected
  292. def setOpsPrecedenceEnforced: Boolean
  293. val settings: Map[String, String]

    Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap.

    Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap.

    Attributes
    protected[spark]
  294. def sortBeforeRepartition: Boolean
  295. def sortMergeJoinExecBufferInMemoryThreshold: Int
  296. def sortMergeJoinExecBufferSpillThreshold: Int
  297. def starSchemaDetection: Boolean
  298. def starSchemaFTRatio: Double
  299. def stateStoreCompressionCodec: String
  300. def stateStoreFormatValidationEnabled: Boolean
  301. def stateStoreMinDeltasForSnapshot: Int
  302. def stateStoreProviderClass: String
  303. def stateStoreSkipNullsForStreamStreamJoins: Boolean
  304. def statefulOperatorCorrectnessCheckEnabled: Boolean
  305. def storeAnalyzedPlanForView: Boolean
  306. def storeAssignmentPolicy: SQLConf.StoreAssignmentPolicy.Value
  307. def streamingFileCommitProtocolClass: String
  308. def streamingMaintenanceInterval: Long
  309. def streamingMetricsEnabled: Boolean
  310. def streamingNoDataMicroBatchesEnabled: Boolean
  311. def streamingNoDataProgressEventInterval: Long
  312. def streamingPollingDelay: Long
  313. def streamingProgressRetention: Int
  314. def streamingSchemaInference: Boolean
  315. def streamingSessionWindowMergeSessionInLocalPartition: Boolean
  316. def stringRedactionPattern: Option[Regex]
  317. def subexpressionEliminationCacheMaxEntries: Int
  318. def subexpressionEliminationEnabled: Boolean
  319. def subqueryReuseEnabled: Boolean
  320. def supportQuotedRegexColumnName: Boolean
  321. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  322. def tableRelationCacheSize: Int
  323. def timestampType: AtomicType
  324. def toString(): String
    Definition Classes
    AnyRef → Any
  325. def topKSortFallbackThreshold: Int
  326. def truncateTableIgnorePermissionAcl: Boolean
  327. def uiExplainMode: String
  328. def unsetConf(entry: ConfigEntry[_]): Unit
  329. def unsetConf(key: String): Unit
  330. def useCompression: Boolean
  331. def useCurrentSQLConfigsForView: Boolean
  332. def useDeprecatedKafkaOffsetFetching: Boolean
  333. def useNullsForMissingDefaultColumnValues: Boolean
  334. def useObjectHashAggregation: Boolean
  335. def useV1Command: Boolean
  336. def v2BucketingEnabled: Boolean
  337. def v2BucketingPartiallyClusteredDistributionEnabled: Boolean
  338. def v2BucketingPushPartValuesEnabled: Boolean
  339. def validatePartitionColumns: Boolean
  340. def variableSubstituteEnabled: Boolean
  341. def verifyPartitionPath: Boolean
  342. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  343. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  344. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  345. def warehousePath: String
  346. def wholeStageEnabled: Boolean
  347. def wholeStageMaxNumFields: Int
  348. def wholeStageSplitConsumeFuncByOperator: Boolean
  349. def wholeStageUseIdInClassName: Boolean
  350. def windowExecBufferInMemoryThreshold: Int
  351. def windowExecBufferSpillThreshold: Int
  352. def writeLegacyParquetFormat: Boolean

Inherited from Logging

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped