object Utils
Various arbitrary helper functions
- Alphabetic
- By Inheritance
- Utils
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
val
CERTIFIED_JDBC_VERSION: String
The certified JDBC version to work with this spark connector version.
-
val
PROPERTY_NAME_OF_CONNECTOR_VERSION: String
Important: Never change the value of PROPERTY_NAME_OF_CONNECTOR_VERSION.
Important: Never change the value of PROPERTY_NAME_OF_CONNECTOR_VERSION. Changing it will cause spark connector doesn't work in some cases.
-
val
SNOWFLAKE_SOURCE_NAME: String
Literal to be used with the Spark DataFrame's .format method
-
val
SNOWFLAKE_SOURCE_SHORT_NAME: String
Short literal name of SNOWFLAKE_SOURCE_NAME
- val VERSION: String
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
checkFileSystem(uri: URI, hadoopConfig: Configuration): Unit
Given a URI, verify that the Hadoop FileSystem for that URI is not the S3 block FileSystem.
Given a URI, verify that the Hadoop FileSystem for that URI is not the S3 block FileSystem.
spark-snowflakedbcannot use this FileSystem because the files written to it will not be readable by Snowflake (and vice versa). -
def
checkThatBucketHasObjectLifecycleConfiguration(tempDir: String, tempDirStorageType: FSType, s3Client: AmazonS3Client): Unit
Checks whether the S3 bucket for the given UI has an object lifecycle configuration to ensure cleanup of temporary files.
Checks whether the S3 bucket for the given UI has an object lifecycle configuration to ensure cleanup of temporary files. If no applicable configuration is found, this method logs a helpful warning for the user.
- def classForName(className: String): Class[_]
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
containVariant(schema: StructType): Boolean
Check whether the giving DataFrame contains variant type or not
-
def
ensureQuoted(name: String): String
ensure a name wrapped with double quotes
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
fixS3Url(url: String): String
Snowflake COPY and UNLOAD commands don't support s3n or s3a, but users may wish to use them for data loads.
Snowflake COPY and UNLOAD commands don't support s3n or s3a, but users may wish to use them for data loads. This function converts the URL back to the s3:// format.
-
def
fixUrlForCopyCommand(url: String): String
Converts url for the copy command.
Converts url for the copy command. For S3, convert s3a|s3n to s3. For Azure, convert the wasb: url to azure: url.
- url
the url to be used in hadoop/spark
- returns
the url to be used in Snowflake
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getClientInfoJson(): ObjectNode
- def getClientInfoString(): String
- def getJDBCConnection(params: Map[String, String]): Connection
- def getJDBCConnection(params: Map[String, String]): Connection
- def getLastCopyLoad: String
- def getLastCopyUnload: String
- def getLastGetCommand: String
- def getLastPutCommand: String
- def getLastSelect: String
- def getSizeString(size: Long): String
- def getTimeString(milliSeconds: Long): String
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isQuoted(name: String): Boolean
check whether a name is quoted
-
def
joinUrls(a: String, b: String): String
Joins prefix URL a to path suffix b, and appends a trailing /, in order to create a temp directory path for S3.
-
def
makeTempPath(tempRoot: String): String
Creates a randomly named temp directory path for intermediate data
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
parseMap(source: String): Map[String, String]
create a map from string for column mapping
- def printQuery(params: Map[String, String], query: String): Unit
-
def
quotedName(name: String): String
wrap a name with double quotes
-
def
quotedNameIgnoreCase(name: String): String
wrap a name with double quotes without capitalize letters
- def readMapFromFile(sc: SparkContext, file: String): Map[String, String]
-
def
readMapFromString(string: String): Map[String, String]
Same as readMapFromFile, but accepts the file content as an argument
-
def
removeCredentialsFromURI(uri: URI): URI
Returns a copy of the given URI with the user credentials removed.
- def runQuery(params: Map[String, String], query: String): ResultSet
- def runQuery(params: Map[String, String], query: String): ResultSet
-
def
sanitizeQueryText(q: String): String
Removes (hopefully :)) sensitive content from a query string
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()