Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Snow 1431870 gcm cloud integration #1838

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1106,6 +1106,7 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
isClientSideEncrypted =
jsonNode.path("data").path("stageInfo").path("isClientSideEncrypted").asBoolean(true);
}
String ciphers = jsonNode.path("data").path("stageInfo").path("ciphers").asText();

// endPoint is currently known to be set for Azure stages or S3. For S3 it will be set
// specifically
Expand Down Expand Up @@ -1166,7 +1167,8 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
stageRegion,
endPoint,
stgAcct,
isClientSideEncrypted);
isClientSideEncrypted,
ciphers);

// Setup pre-signed URL into stage info if pre-signed URL is returned.
if (stageInfo.getStageType() == StageInfo.StageType.GCS) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
package net.snowflake.client.jdbc.cloud.storage;

import com.google.common.base.Strings;
import java.io.File;
import java.io.InputStream;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import javax.crypto.BadPaddingException;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeSQLLoggedException;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial;
import net.snowflake.common.core.SqlState;

class DecryptionHelper {
private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeGCSClient.class);

private final String queryId;
private final SFBaseSession session;
private final String key;
private final String keyIv;
private final String dataIv;
private final String keyAad;
private final String dataAad;
private final StageInfo.Ciphers ciphers;

private DecryptionHelper(
String queryId,
SFBaseSession session,
String key,
String keyIv,
String dataIv,
String keyAad,
String dataAad,
StageInfo.Ciphers ciphers) {
this.queryId = queryId;
this.session = session;
this.key = key;
this.keyIv = keyIv;
this.dataIv = dataIv;
this.keyAad = keyAad;
this.dataAad = dataAad;
this.ciphers = ciphers;
}

static DecryptionHelper forCbc(
String queryId, SFBaseSession session, String key, String contentIv)
throws SnowflakeSQLLoggedException {
if (Strings.isNullOrEmpty(key) || Strings.isNullOrEmpty(contentIv)) {
throw exception(queryId, session);
}
return new DecryptionHelper(
queryId, session, key, null, contentIv, null, null, StageInfo.Ciphers.AESECB_AESCBC);
}

static DecryptionHelper forGcm(
String queryId,
SFBaseSession session,
String key,
String keyIv,
String dataIv,
String keyAad,
String dataAad)
throws SnowflakeSQLLoggedException {
if (Strings.isNullOrEmpty(key)
|| Strings.isNullOrEmpty(keyIv)
|| Strings.isNullOrEmpty(dataIv)
|| keyAad == null
|| dataAad == null) {
throw exception(queryId, session);
}
return new DecryptionHelper(
queryId, session, key, keyIv, dataIv, keyAad, dataAad, StageInfo.Ciphers.AESGCM_AESGCM);
}

void validate() throws SnowflakeSQLLoggedException {
if (key == null
|| dataIv == null
|| (ciphers == StageInfo.Ciphers.AESGCM_AESGCM && keyIv == null)) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}
}

void decryptFile(File file, RemoteStoreFileEncryptionMaterial encMat)
throws SnowflakeSQLLoggedException {
try {
switch (ciphers) {
case AESECB_AESCBC:
EncryptionProvider.decrypt(file, key, dataIv, encMat);
case AESGCM_AESGCM:
GcmEncryptionProvider.decryptFile(file, key, dataIv, keyIv, encMat, dataAad, keyAad);
default:
throw new IllegalArgumentException("unsuported ciphers: " + ciphers);
}
} catch (Exception ex) {
logger.error("Error decrypting file", ex);
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Cannot decrypt file");
}
}

InputStream decryptStream(InputStream inputStream, RemoteStoreFileEncryptionMaterial encMat)
throws NoSuchPaddingException, NoSuchAlgorithmException, InvalidKeyException,
BadPaddingException, IllegalBlockSizeException, InvalidAlgorithmParameterException {
switch (ciphers) {
case AESGCM_AESGCM:
return GcmEncryptionProvider.decryptStream(
inputStream, key, dataIv, keyIv, encMat, dataAad, keyAad);
case AESECB_AESCBC:
return EncryptionProvider.decryptStream(inputStream, key, dataIv, encMat);
}
throw new IllegalArgumentException("unsupported ciphers: " + ciphers);
}

private static SnowflakeSQLLoggedException exception(String queryId, SFBaseSession session) {
return new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,6 @@
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.microsoft.azure.storage.OperationContext;
import com.microsoft.azure.storage.StorageCredentials;
import com.microsoft.azure.storage.StorageCredentialsAnonymous;
Expand All @@ -33,16 +29,13 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import java.util.AbstractMap;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Base64;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.snowflake.client.core.HttpUtil;
import net.snowflake.client.core.ObjectMapperFactory;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFSession;
import net.snowflake.client.core.SFSessionProperty;
Expand Down Expand Up @@ -81,8 +74,9 @@ public class SnowflakeAzureClient implements SnowflakeStorageClient {
private OperationContext opContext = null;
private SFBaseSession session;

private StorageClientHelper storageClientHelper;

private SnowflakeAzureClient() {}
;

/*
* Factory method for a SnowflakeAzureClient object
Expand Down Expand Up @@ -161,6 +155,7 @@ private void setupAzureClient(
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("invalid_azure_credentials");
}
storageClientHelper = new StorageClientHelper(this, encMat, session, stageInfo.getCiphers());
}

// Returns the Max number of retry attempts
Expand Down Expand Up @@ -349,26 +344,17 @@ public void download(

// Get the user-defined BLOB metadata
Map<String, String> userDefinedMetadata = blob.getMetadata();
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

String key = encryptionData.getKey();
String iv = encryptionData.getValue();
DecryptionHelper decryptionHelper =
storageClientHelper.parseEncryptionDataFromJson(
userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
stopwatch.restart();
if (key == null || iv == null) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}
decryptionHelper.validate();

// Decrypt file
try {
EncryptionProvider.decrypt(localFile, key, iv, this.encMat);
decryptionHelper.decryptFile(localFile, encMat);
stopwatch.stop();
long decryptMillis = stopwatch.elapsedMillis();
logger.info(
Expand Down Expand Up @@ -449,26 +435,15 @@ public InputStream downloadToStream(
long downloadMillis = stopwatch.elapsedMillis();
Map<String, String> userDefinedMetadata = blob.getMetadata();

AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

String key = encryptionData.getKey();

String iv = encryptionData.getValue();
DecryptionHelper decryptionHelper =
storageClientHelper.parseEncryptionDataFromJson(
userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
decryptionHelper.validate();
stopwatch.restart();
if (key == null || iv == null) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}

try {
InputStream is = EncryptionProvider.decryptStream(stream, key, iv, encMat);
InputStream is = decryptionHelper.decryptStream(stream, encMat);
stopwatch.stop();
long decryptMillis = stopwatch.elapsedMillis();
logger.info(
Expand Down Expand Up @@ -694,7 +669,7 @@ private SFPair<InputStream, Boolean> createUploadStream(
final InputStream stream;
FileInputStream srcFileStream = null;
try {
if (isEncrypting() && getEncryptionKeySize() < 256) {
if (isEncrypting() && getEncryptionKeySize() <= 256) {
try {
final InputStream uploadStream =
uploadFromStream
Expand All @@ -705,9 +680,7 @@ private SFPair<InputStream, Boolean> createUploadStream(
toClose.add(srcFileStream);

// Encrypt
stream =
EncryptionProvider.encrypt(
meta, originalContentLength, uploadStream, this.encMat, this);
stream = storageClientHelper.encrypt(meta, originalContentLength, uploadStream);
uploadFromStream = true;
} catch (Exception ex) {
logger.error("Failed to encrypt input", ex);
Expand Down Expand Up @@ -934,51 +907,6 @@ private static URI buildAzureStorageEndpointURI(String storageEndPoint, String s
return storageEndpoint;
}

/*
* buildEncryptionMetadataJSON
* Takes the base64-encoded iv and key and creates the JSON block to be
* used as the encryptiondata metadata field on the blob.
*/
private String buildEncryptionMetadataJSON(String iv64, String key64) {
return String.format(
"{\"EncryptionMode\":\"FullBlob\",\"WrappedContentKey\""
+ ":{\"KeyId\":\"symmKey1\",\"EncryptedKey\":\"%s\""
+ ",\"Algorithm\":\"AES_CBC_256\"},\"EncryptionAgent\":"
+ "{\"Protocol\":\"1.0\",\"EncryptionAlgorithm\":"
+ "\"AES_CBC_256\"},\"ContentEncryptionIV\":\"%s\""
+ ",\"KeyWrappingMetadata\":{\"EncryptionLibrary\":"
+ "\"Java 5.3.0\"}}",
key64, iv64);
}

/*
* parseEncryptionData
* Takes the json string in the encryptiondata metadata field of the encrypted
* blob and parses out the key and iv. Returns the pair as key = key, iv = value.
*/
private SimpleEntry<String, String> parseEncryptionData(String jsonEncryptionData, String queryId)
throws SnowflakeSQLException {
ObjectMapper mapper = ObjectMapperFactory.getObjectMapper();
JsonFactory factory = mapper.getFactory();
try {
JsonParser parser = factory.createParser(jsonEncryptionData);
JsonNode encryptionDataNode = mapper.readTree(parser);

String iv = encryptionDataNode.get("ContentEncryptionIV").asText();
String key = encryptionDataNode.get("WrappedContentKey").get("EncryptedKey").asText();

return new SimpleEntry<String, String>(key, iv);
} catch (Exception ex) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
SqlState.SYSTEM_ERROR,
ErrorCode.IO_ERROR.getMessageCode(),
ex,
"Error parsing encryption data as json" + ": " + ex.getMessage());
}
}

/** Returns the material descriptor key */
@Override
public String getMatdescKey() {
Expand All @@ -996,12 +924,34 @@ public void addEncryptionMetadata(
meta.addUserMetadata(getMatdescKey(), matDesc.toString());
meta.addUserMetadata(
AZ_ENCRYPTIONDATAPROP,
buildEncryptionMetadataJSON(
storageClientHelper.buildEncryptionMetadataJSONForEcbCbc(
Base64.getEncoder().encodeToString(ivData),
Base64.getEncoder().encodeToString(encryptedKey)));
meta.setContentLength(contentLength);
}

@Override
public void addEncryptionMetadataForGcm(
StorageObjectMetadata meta,
MatDesc matDesc,
byte[] encryptedKey,
byte[] dataIvBytes,
byte[] keyIvBytes,
byte[] keyAad,
byte[] dataAad,
long contentLength) {
meta.addUserMetadata(getMatdescKey(), matDesc.toString());
meta.addUserMetadata(
AZ_ENCRYPTIONDATAPROP,
storageClientHelper.buildEncryptionMetadataJSONForGcm(
Base64.getEncoder().encodeToString(keyIvBytes),
Base64.getEncoder().encodeToString(encryptedKey),
Base64.getEncoder().encodeToString(dataIvBytes),
Base64.getEncoder().encodeToString(keyAad),
Base64.getEncoder().encodeToString(dataAad)));
meta.setContentLength(contentLength);
}

/** Adds digest metadata to the StorageObjectMetadata object */
@Override
public void addDigestMetadata(StorageObjectMetadata meta, String digest) {
Expand Down
Loading
Loading