public class SnowflakeS3Client extends Object implements SnowflakeStorageClient
| Constructor and Description |
|---|
SnowflakeS3Client(Map stageCredentials,
com.amazonaws.ClientConfiguration clientConfig,
net.snowflake.common.core.RemoteStoreFileEncryptionMaterial encMat,
String stageRegion) |
| Modifier and Type | Method and Description |
|---|---|
void |
addDigestMetadata(StorageObjectMetadata meta,
String digest)
Adds digest metadata to the StorageObjectMetadata object
|
void |
addEncryptionMetadata(StorageObjectMetadata meta,
MatDesc matDesc,
byte[] ivData,
byte[] encKeK,
long contentLength)
Adds encryption metadata to the StorageObjectMetadata object
|
void |
download(SFSession connection,
String command,
String localLocation,
String destFileName,
int parallelism,
String remoteStorageLocation,
String stageFilePath,
String stageRegion)
Download a file from S3.
|
InputStream |
downloadToStream(SFSession connection,
String command,
int parallelism,
String remoteStorageLocation,
String stageFilePath,
String stageRegion)
Download a file from remote storage
|
String |
getDigestMetadata(StorageObjectMetadata meta)
Gets digest metadata to the StorageObjectMetadata object
|
int |
getEncryptionKeySize() |
String |
getMatdescKey()
Returns the material descriptor key
|
int |
getMaxRetries() |
StorageObjectMetadata |
getObjectMetadata(String remoteStorageLocation,
String prefix)
Returns the metadata properties for a remote storage object
|
int |
getRetryBackoffMaxExponent()
Returns the max exponent for multiplying backoff with the power of 2, the value
of 4 will give us 16secs as the max number of time to sleep before retry
|
int |
getRetryBackoffMin() |
void |
handleStorageException(Exception ex,
int retryCount,
String operation,
SFSession connection,
String command)
Handles exceptions thrown by the remote storage provider
|
boolean |
isEncrypting() |
StorageObjectSummaryCollection |
listObjects(String remoteStorageLocation,
String prefix)
For a set of remote storage objects under a remote location and a given prefix/path
returns their properties wrapped in ObjectSummary objects
|
void |
renew(Map stageCredentials)
Renew the S3 client with fresh AWS credentials/access token
|
void |
shutdown()
shuts down the client
|
void |
upload(SFSession connection,
String command,
int parallelism,
boolean uploadFromStream,
String remoteStorageLocation,
File srcFile,
String destFileName,
InputStream inputStream,
FileBackedOutputStream fileBackedOutputStream,
StorageObjectMetadata meta,
String stageRegion)
Upload a file (-stream) to S3.
|
public SnowflakeS3Client(Map stageCredentials, com.amazonaws.ClientConfiguration clientConfig, net.snowflake.common.core.RemoteStoreFileEncryptionMaterial encMat, String stageRegion) throws SnowflakeSQLException
SnowflakeSQLExceptionpublic int getMaxRetries()
getMaxRetries in interface SnowflakeStorageClientpublic int getRetryBackoffMaxExponent()
SnowflakeStorageClientgetRetryBackoffMaxExponent in interface SnowflakeStorageClientpublic int getRetryBackoffMin()
getRetryBackoffMin in interface SnowflakeStorageClientpublic boolean isEncrypting()
isEncrypting in interface SnowflakeStorageClientpublic int getEncryptionKeySize()
getEncryptionKeySize in interface SnowflakeStorageClientpublic void renew(Map stageCredentials) throws SnowflakeSQLException
renew in interface SnowflakeStorageClientstageCredentials - a Map of new AWS credential properties, to refresh the client with (as returned by GS)SnowflakeSQLException - if any error occurspublic void shutdown()
SnowflakeStorageClientshutdown in interface SnowflakeStorageClientpublic StorageObjectSummaryCollection listObjects(String remoteStorageLocation, String prefix) throws StorageProviderException
SnowflakeStorageClientlistObjects in interface SnowflakeStorageClientremoteStorageLocation - location, i.e. bucket for S3prefix - the prefix to listStorageProviderException - cloud storage provider errorpublic StorageObjectMetadata getObjectMetadata(String remoteStorageLocation, String prefix) throws StorageProviderException
SnowflakeStorageClientgetObjectMetadata in interface SnowflakeStorageClientremoteStorageLocation - location, i.e. bucket for S3prefix - the prefix/path of the object to retrieveStorageProviderException - cloud storage provider errorpublic void download(SFSession connection, String command, String localLocation, String destFileName, int parallelism, String remoteStorageLocation, String stageFilePath, String stageRegion) throws SnowflakeSQLException
download in interface SnowflakeStorageClientconnection - connection objectcommand - command to download filelocalLocation - local file pathdestFileName - destination file nameparallelism - number of threads for parallel downloadingremoteStorageLocation - s3 bucket namestageFilePath - stage file pathstageRegion - region name where the stage persistsSnowflakeSQLException - if download failed without an exceptionSnowflakeSQLException - if failed to decrypt downloaded fileSnowflakeSQLException - if file metadata is incompletepublic InputStream downloadToStream(SFSession connection, String command, int parallelism, String remoteStorageLocation, String stageFilePath, String stageRegion) throws SnowflakeSQLException
downloadToStream in interface SnowflakeStorageClientconnection - connection objectcommand - command to download fileparallelism - number of threads for parallel downloadingremoteStorageLocation - remote storage location, i.e. bucket for s3stageFilePath - stage file pathstageRegion - region name where the stage persistsSnowflakeSQLException - when download failurepublic void upload(SFSession connection, String command, int parallelism, boolean uploadFromStream, String remoteStorageLocation, File srcFile, String destFileName, InputStream inputStream, FileBackedOutputStream fileBackedOutputStream, StorageObjectMetadata meta, String stageRegion) throws SnowflakeSQLException
upload in interface SnowflakeStorageClientconnection - connection objectcommand - upload commandparallelism - number of threads do parallel uploadinguploadFromStream - true if upload source is streamremoteStorageLocation - s3 bucket namesrcFile - source file if not uploading from a streamdestFileName - file name on s3 after uploadinputStream - stream used for uploading if fileBackedOutputStream is nullfileBackedOutputStream - stream used for uploading if not nullmeta - object meta datastageRegion - region name where the stage persistsSnowflakeSQLException - if upload failed even after retrypublic void handleStorageException(Exception ex, int retryCount, String operation, SFSession connection, String command) throws SnowflakeSQLException
SnowflakeStorageClienthandleStorageException in interface SnowflakeStorageClientex - the exception to handleretryCount - current number of retries, incremented by the caller before each calloperation - string that indicates the function/operation that was taking place,
when the exception was raised, for example "upload"connection - the current SFSession object used by the clientcommand - the command attempted at the time of the exceptionSnowflakeSQLException - exceptions that were not handled, or retried past
what the retry policy allows, are propagatedpublic String getMatdescKey()
getMatdescKey in interface SnowflakeStorageClientpublic void addEncryptionMetadata(StorageObjectMetadata meta, MatDesc matDesc, byte[] ivData, byte[] encKeK, long contentLength)
addEncryptionMetadata in interface SnowflakeStorageClientmeta - the storage metadata object to add the encyption info tomatDesc - the material decriptorivData - the initialization vectorencKeK - the key encryption keycontentLength - the length of the encrypted contentpublic void addDigestMetadata(StorageObjectMetadata meta, String digest)
addDigestMetadata in interface SnowflakeStorageClientmeta - the storage metadata object to add the digest todigest - the digest metadata to addpublic String getDigestMetadata(StorageObjectMetadata meta)
getDigestMetadata in interface SnowflakeStorageClientmeta - the metadata object to extract the digest metadata fromCopyright © 2019. All rights reserved.