case class ImpalaWaimakJDBCConnector(context: SparkFlowContext, database: String, cluster: String = "default", properties: Properties = new java.util.Properties(), secureProperties: Map[String, String] = Map.empty) extends ImpalaDBConnector with WaimakJDBCConnector with Product with Serializable
Impala Database connector that is constructed using the Waimak JDBC template in spark conf
- context
The flow context object containing the SparkSession and FileSystem
- database
name of the database to connect to
- cluster
the cluster label in the JDBC template string
- properties
Key value pairs passed as connection arguments to the DriverManager during connection
- secureProperties
Key value set of parameters used to get parameter values for JDBC properties from a secure jceks file at CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH. First value is the key of the parameter in the jceks file and the second parameter is the key of the parameter you want in jdbc properties
- Alphabetic
- By Inheritance
- ImpalaWaimakJDBCConnector
- Serializable
- Serializable
- Product
- Equals
- WaimakJDBCConnector
- JDBCConnector
- ImpalaDBConnector
- HadoopDBConnector
- DBConnector
- Logging
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
ImpalaWaimakJDBCConnector(context: SparkFlowContext, database: String, cluster: String = "default", properties: Properties = new java.util.Properties(), secureProperties: Map[String, String] = Map.empty)
- context
The flow context object containing the SparkSession and FileSystem
- database
name of the database to connect to
- cluster
the cluster label in the JDBC template string
- properties
Key value pairs passed as connection arguments to the DriverManager during connection
- secureProperties
Key value set of parameters used to get parameter values for JDBC properties from a secure jceks file at CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH. First value is the key of the parameter in the jceks file and the second parameter is the key of the parameter you want in jdbc properties
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
val
cluster: String
- Definition Classes
- ImpalaWaimakJDBCConnector → WaimakJDBCConnector
-
val
context: SparkFlowContext
- Definition Classes
- ImpalaWaimakJDBCConnector → HadoopDBConnector
-
val
database: String
- Definition Classes
- ImpalaWaimakJDBCConnector → WaimakJDBCConnector
-
val
driverName: String
- Definition Classes
- ImpalaWaimakJDBCConnector → JDBCConnector
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
forceRecreateTables: Boolean
- Definition Classes
- HadoopDBConnector
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hadoopConfiguration: Configuration
- Definition Classes
- ImpalaWaimakJDBCConnector → JDBCConnector
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
jdbcString: String
- Definition Classes
- WaimakJDBCConnector → JDBCConnector
-
def
logAndReturn[A](a: A, msg: String, level: Level): A
- Definition Classes
- Logging
-
def
logAndReturn[A](a: A, message: (A) ⇒ String, level: Level): A
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
val
properties: Properties
- Definition Classes
- ImpalaWaimakJDBCConnector → JDBCConnector
-
def
recreateTableFromParquetDDLs(tableName: String, path: String, partitionColumns: Seq[String]): Seq[String]
- Definition Classes
- HadoopDBConnector
-
val
secureProperties: Map[String, String]
- Definition Classes
- ImpalaWaimakJDBCConnector → JDBCConnector
-
val
service: String
- Definition Classes
- ImpalaWaimakJDBCConnector → WaimakJDBCConnector
-
val
sparkConf: SparkConf
- Definition Classes
- ImpalaWaimakJDBCConnector → WaimakJDBCConnector
-
def
submitAtomicResultlessQueries(ddls: Seq[String]): Unit
- Definition Classes
- DBConnector
-
def
submitResultlessQuery(ddl: String): Unit
- Definition Classes
- DBConnector
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
updateTableParquetLocationDDLs(tableName: String, path: String, partitionColumns: Seq[String]): Seq[String]
- Definition Classes
- HadoopDBConnector
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()