mirror of
https://github.com/apache/sqoop.git
synced 2025-05-04 06:10:18 +08:00
SQOOP-2890: Provide tooling to encrypt non-encrypted repository and rotate keys
(Abraham Fine via Jarek Jarcec Cecho)
This commit is contained in:
parent
5b897a46fc
commit
c6fc7f95ac
@ -218,6 +218,13 @@ public enum CommonRepositoryError implements ErrorCode {
|
||||
COMMON_0058("Resource doesn't exist"),
|
||||
|
||||
COMMON_0059("Unable to retrieve master key"),
|
||||
|
||||
COMMON_0060("Failed to change the master key for inputs"),
|
||||
|
||||
COMMON_0061("Unable to delete Master Key"),
|
||||
|
||||
COMMON_0062("Invalid primary key after registering entity"),
|
||||
|
||||
;
|
||||
|
||||
private final String message;
|
||||
|
@ -114,10 +114,6 @@ public Class getDriverJobConfigurationClass() {
|
||||
}
|
||||
|
||||
private Driver() {
|
||||
List<MConfig> driverConfig = ConfigUtils.toConfigs(getDriverJobConfigurationClass());
|
||||
List<MValidator> mValidators = ConfigUtils.getMValidatorsFromConfigurationClass(getDriverJobConfigurationClass());
|
||||
mDriver = new MDriver(new MDriverConfig(driverConfig, mValidators), DriverBean.CURRENT_DRIVER_VERSION);
|
||||
|
||||
// Build upgrader
|
||||
driverUpgrader = new DriverUpgrader();
|
||||
}
|
||||
@ -130,15 +126,20 @@ public synchronized void initialize() {
|
||||
public synchronized void initialize(boolean autoUpgrade) {
|
||||
LOG.trace("Begin Driver initialization");
|
||||
|
||||
List<MConfig> driverConfig = ConfigUtils.toConfigs(getDriverJobConfigurationClass());
|
||||
List<MValidator> mValidators = ConfigUtils.getMValidatorsFromConfigurationClass(getDriverJobConfigurationClass());
|
||||
|
||||
// Register driver in repository
|
||||
mDriver = RepositoryManager.getInstance().getRepository().registerDriver(mDriver, autoUpgrade);
|
||||
mDriver = RepositoryManager.getInstance().getRepository().registerDriver(
|
||||
new MDriver(new MDriverConfig(driverConfig, mValidators),
|
||||
DriverBean.CURRENT_DRIVER_VERSION), autoUpgrade);
|
||||
|
||||
SqoopConfiguration.getInstance().getProvider().registerListener(new CoreConfigurationListener(this));
|
||||
|
||||
LOG.info("Driver initialized: OK");
|
||||
}
|
||||
|
||||
public synchronized void destroy() {
|
||||
public synchronized void destroy() {
|
||||
LOG.trace("Begin Driver destroy");
|
||||
}
|
||||
|
||||
|
@ -662,20 +662,20 @@ public Object doIt(Connection conn) {
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public MMasterKey getMasterKey() {
|
||||
public MMasterKey getMasterKey(RepositoryTransaction txn) {
|
||||
return (MMasterKey) doWithConnection(new DoWithConnection() {
|
||||
@Override
|
||||
public Object doIt(Connection conn) throws Exception {
|
||||
return handler.getMasterKey(conn);
|
||||
}
|
||||
});
|
||||
}, (JdbcRepositoryTransaction) txn);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void createMasterKey(final MMasterKey mMasterKey) {
|
||||
public void createMasterKey(final MMasterKey mMasterKey, RepositoryTransaction txn) {
|
||||
doWithConnection(new DoWithConnection() {
|
||||
@Override
|
||||
public Object doIt(Connection conn) {
|
||||
@ -685,7 +685,35 @@ public Object doIt(Connection conn) {
|
||||
handler.createMasterKey(mMasterKey, conn);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}, (JdbcRepositoryTransaction) txn);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void deleteMasterKey(final long masterKeyId, RepositoryTransaction txn) {
|
||||
doWithConnection(new DoWithConnection() {
|
||||
@Override
|
||||
public Object doIt(Connection conn) {
|
||||
handler.deleteMasterKey(masterKeyId, conn);
|
||||
return null;
|
||||
}
|
||||
}, (JdbcRepositoryTransaction) txn);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void changeMasterKeyManager(final MasterKeyManager fromMasterKeyManager, final MasterKeyManager toMasterKeyManager, RepositoryTransaction txn) {
|
||||
doWithConnection(new DoWithConnection() {
|
||||
@Override
|
||||
public Object doIt(Connection conn) {
|
||||
handler.changeMasterKeyManager(fromMasterKeyManager, toMasterKeyManager, conn);
|
||||
return null;
|
||||
}
|
||||
}, (JdbcRepositoryTransaction) txn);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -477,9 +477,28 @@ public abstract class JdbcRepositoryHandler {
|
||||
/**
|
||||
* Create the master key in the database
|
||||
*
|
||||
* @param mMasterKey MMasterKey representing the Master Key
|
||||
* @param mMasterKey MMasterKey representing the Master Key, this method populates
|
||||
* its persistence id
|
||||
* @param conn Connection to the repository
|
||||
*/
|
||||
public abstract void createMasterKey(MMasterKey mMasterKey, Connection conn);
|
||||
|
||||
/**
|
||||
* Delete the master key from the database
|
||||
*
|
||||
* @param conn Connection to the repository
|
||||
*/
|
||||
public abstract void deleteMasterKey(long masterKeyId, Connection conn);
|
||||
|
||||
/**
|
||||
* Change MasterKeyManager for inputs
|
||||
*
|
||||
* @param fromMasterKeyManager The master key manager that currently represents
|
||||
* the encryption strategy for the inputs
|
||||
* @param toMasterKeyManager The master key manager that will represent the
|
||||
* encryption strategy for the inputs
|
||||
* @param conn Connection to the repository
|
||||
*/
|
||||
public abstract void changeMasterKeyManager(MasterKeyManager fromMasterKeyManager, MasterKeyManager toMasterKeyManager, Connection conn);
|
||||
|
||||
}
|
||||
|
@ -53,6 +53,9 @@ public class MasterKeyManager {
|
||||
private int pbkdf2Rounds;
|
||||
private int ivLength;
|
||||
|
||||
private RepositoryTransaction repositoryTransaction;
|
||||
private MMasterKey mMasterKey;
|
||||
|
||||
private SecretKey masterEncryptionKey;
|
||||
private SecretKey masterHmacKey;
|
||||
|
||||
@ -64,7 +67,7 @@ public class MasterKeyManager {
|
||||
instance = new MasterKeyManager();
|
||||
}
|
||||
|
||||
private MasterKeyManager() {
|
||||
public MasterKeyManager() {
|
||||
}
|
||||
|
||||
public static MasterKeyManager getInstance() {
|
||||
@ -79,128 +82,168 @@ public void initialize() throws SqoopException {
|
||||
initialize(true);
|
||||
}
|
||||
|
||||
public synchronized void initialize(boolean createMasterKey) throws SqoopException {
|
||||
// This is used for the generation of random initialization vectors and salts
|
||||
random = new SecureRandom();
|
||||
public void initialize(boolean createMasterKey) throws SqoopException {
|
||||
initialize(createMasterKey, false, null);
|
||||
}
|
||||
|
||||
public void initialize(boolean createMasterKey, boolean createKeyEvenIfKeyExists, RepositoryTransaction repositoryTransactionArg) throws SqoopException {
|
||||
MapContext configurationContext = SqoopConfiguration.getInstance().getContext();
|
||||
if (configurationContext.getBoolean(SecurityConstants.REPO_ENCRYPTION_ENABLED, false)) {
|
||||
|
||||
// Grab configuration from the sqoop properties file. All of this configuration is required
|
||||
// and an exception will be thrown if any of it is missing
|
||||
hmacAlgorithm = populateStringConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM);
|
||||
cipherAlgorithm = populateStringConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM);
|
||||
cipherSpec = populateStringConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC);
|
||||
cipherKeySize = populateIntConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE);
|
||||
ivLength = populateIntConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE);
|
||||
pbkdf2Algorithm = populateStringConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM);
|
||||
pbkdf2Rounds = populateIntConfiguration(configurationContext, SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS);
|
||||
String hmacAlgorithm = populateStringConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM);
|
||||
String cipherAlgorithm = populateStringConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM);
|
||||
String cipherSpec = populateStringConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC);
|
||||
int cipherKeySize = populateIntConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE);
|
||||
int ivLength = populateIntConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE);
|
||||
String pbkdf2Algorithm = populateStringConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM);
|
||||
int pbkdf2Rounds = populateIntConfiguration(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS);
|
||||
|
||||
// The size of the hmac key can be derived from the provided HMAC algorithm
|
||||
try {
|
||||
hmacKeySizeBytes = Mac.getInstance(hmacAlgorithm).getMacLength();
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0011, e);
|
||||
}
|
||||
|
||||
Repository repository = RepositoryManager.getInstance().getRepository();
|
||||
String password = PasswordUtils.readPassword(configurationContext, SecurityConstants.REPO_ENCRYPTION_PASSWORD,
|
||||
String password = PasswordUtils.readPassword(configurationContext,
|
||||
SecurityConstants.REPO_ENCRYPTION_PASSWORD,
|
||||
SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR);
|
||||
if (StringUtils.isEmpty(password)) {
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0008);
|
||||
}
|
||||
|
||||
MMasterKey existingEncryptedMasterKey = repository.getMasterKey();
|
||||
String salt;
|
||||
|
||||
if (existingEncryptedMasterKey == null) {
|
||||
// Since the master key does not exist, we can generate a random salt that we will use
|
||||
// for encryption of the Master Key
|
||||
// We will use a salt that is the same size as the encryption key
|
||||
salt = Base64.encodeBase64String(generateRandomByteArray(hmacKeySizeBytes));
|
||||
} else {
|
||||
// Since the master key already exists, we will read the salt from the repository
|
||||
salt = existingEncryptedMasterKey.getSalt();
|
||||
}
|
||||
|
||||
// Derive two keys (that we will be used to encrypt and verify the master key)
|
||||
// from the configuration provided password and the salt we just read/created.
|
||||
byte[] keyBytes = getKeysFromPassword(password, salt);
|
||||
SecretKey passwordEncryptionKey = new SecretKeySpec(keyBytes, 0,
|
||||
cipherKeySize, cipherAlgorithm);
|
||||
SecretKey passwordHmacKey = new SecretKeySpec(keyBytes,
|
||||
cipherKeySize, hmacKeySizeBytes, hmacAlgorithm);
|
||||
|
||||
byte[] masterEncryptionKeyBytes;
|
||||
byte[] masterHmacKeyBytes;
|
||||
if (existingEncryptedMasterKey == null) {
|
||||
if (createMasterKey) {
|
||||
// A master key does not exist so we must create one. We will simply
|
||||
// use two random byte arrays for the encryption and hmac components.
|
||||
// The sizes of these keys is determined by the values provided to
|
||||
// configuration.
|
||||
masterEncryptionKeyBytes = generateRandomByteArray(cipherKeySize);
|
||||
masterHmacKeyBytes = generateRandomByteArray(hmacKeySizeBytes);
|
||||
|
||||
// The initialization vector for the encryption of the master key is
|
||||
// randomly generated.
|
||||
String iv = Base64.encodeBase64String(generateRandomByteArray(ivLength));
|
||||
|
||||
// We append our two keys together and encrypt the resulting byte array.
|
||||
// This is the secret that all of the encryption in the repository depends upon
|
||||
byte[] secret = ArrayUtils.addAll(masterEncryptionKeyBytes, masterHmacKeyBytes);
|
||||
String encryptedSecret = encryptToString(passwordEncryptionKey, secret, iv);
|
||||
|
||||
// We store our new master key in the repository in its encrypted form
|
||||
// along with an HMAC to verify the key when we read it, the salt needed to
|
||||
// generate keys to decrypt it, and the initialization vector used
|
||||
repository.createMasterKey(new MMasterKey(encryptedSecret, generateHmac(passwordHmacKey,
|
||||
encryptedSecret), salt, iv));
|
||||
} else {
|
||||
// If a master key does not exist and we are trying to initialize the
|
||||
// manager without allowing it to create a master key, we should fail
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0002);
|
||||
}
|
||||
} else {
|
||||
// A master key exists so we need to read it from the repository and
|
||||
// decrypt it.
|
||||
String iv = existingEncryptedMasterKey.getIv();
|
||||
String encryptedSecret = existingEncryptedMasterKey.getEncryptedSecret();
|
||||
|
||||
// Before we go about decrypting the master key we should verify the hmac
|
||||
// to ensure that it has not been tampered with
|
||||
String hmac = existingEncryptedMasterKey.getHmac();
|
||||
if (!validHmac(passwordHmacKey, encryptedSecret, hmac)) {
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0001);
|
||||
}
|
||||
|
||||
// The master key has not been tampered with, lets decrypt it using the key
|
||||
// derived from the password and the initialization vector from the repository
|
||||
byte[] decryptedKey = decryptToBytes(passwordEncryptionKey, encryptedSecret, iv);
|
||||
|
||||
// Since the master key is stored as the concatenation of an encryption
|
||||
// key and an hmac key, we need to split it according to the sizes derived
|
||||
// from the configuration
|
||||
masterEncryptionKeyBytes = new byte[cipherKeySize];
|
||||
masterHmacKeyBytes = new byte[hmacKeySizeBytes];
|
||||
System.arraycopy(decryptedKey, 0, masterEncryptionKeyBytes, 0,
|
||||
cipherKeySize);
|
||||
System.arraycopy(decryptedKey, cipherKeySize,
|
||||
masterHmacKeyBytes, 0, hmacKeySizeBytes);
|
||||
}
|
||||
|
||||
// Place the master encryption and master hmac key in SecretKey objects
|
||||
// so we can use them to encrypt and decrypt data
|
||||
masterEncryptionKey = new SecretKeySpec(masterEncryptionKeyBytes, 0, cipherKeySize, cipherAlgorithm);
|
||||
masterHmacKey = new SecretKeySpec(masterHmacKeyBytes, 0, hmacKeySizeBytes, hmacAlgorithm);
|
||||
initialize(createMasterKey, hmacAlgorithm, cipherAlgorithm, cipherSpec, cipherKeySize,
|
||||
ivLength, pbkdf2Algorithm, pbkdf2Rounds, password, createKeyEvenIfKeyExists, repositoryTransactionArg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public synchronized void initialize(boolean createMasterKey, String hmacAlgorithmArg,
|
||||
String cipherAlgorithmArg, String cipherSpecArg,
|
||||
int cipherKeySizeArg, int ivLengthArg,
|
||||
String pbkdf2AlgorithmArg, int pbkdf2RoundsArg,
|
||||
String password, boolean createKeyEvenIfKeyExists, RepositoryTransaction repositoryTransactionArg) throws SqoopException {
|
||||
hmacAlgorithm = hmacAlgorithmArg;
|
||||
cipherAlgorithm = cipherAlgorithmArg;
|
||||
cipherSpec = cipherSpecArg;
|
||||
cipherKeySize = cipherKeySizeArg;
|
||||
ivLength = ivLengthArg;
|
||||
pbkdf2Algorithm = pbkdf2AlgorithmArg;
|
||||
pbkdf2Rounds = pbkdf2RoundsArg;
|
||||
|
||||
repositoryTransaction = repositoryTransactionArg;
|
||||
|
||||
// This is used for the generation of random initialization vectors and salts
|
||||
random = new SecureRandom();
|
||||
// The size of the hmac key can be derived from the provided HMAC algorithm
|
||||
try {
|
||||
hmacKeySizeBytes = Mac.getInstance(hmacAlgorithm).getMacLength();
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0011, e);
|
||||
}
|
||||
|
||||
Repository repository = RepositoryManager.getInstance().getRepository();
|
||||
if (StringUtils.isEmpty(password)) {
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0008);
|
||||
}
|
||||
|
||||
MMasterKey existingEncryptedMasterKey = repository.getMasterKey(repositoryTransaction);
|
||||
String salt;
|
||||
|
||||
if (existingEncryptedMasterKey == null || createKeyEvenIfKeyExists) {
|
||||
// Since the master key does not exist, we can generate a random salt that we will use
|
||||
// for encryption of the Master Key
|
||||
// We will use a salt that is the same size as the encryption key
|
||||
salt = Base64.encodeBase64String(generateRandomByteArray(hmacKeySizeBytes));
|
||||
} else {
|
||||
// Since the master key already exists, we will read the salt from the repository
|
||||
salt = existingEncryptedMasterKey.getSalt();
|
||||
}
|
||||
|
||||
// Derive two keys (that we will be used to encrypt and verify the master key)
|
||||
// from the configuration provided password and the salt we just read/created.
|
||||
byte[] keyBytes = getKeysFromPassword(password, salt);
|
||||
SecretKey passwordEncryptionKey = new SecretKeySpec(keyBytes, 0,
|
||||
cipherKeySize, cipherAlgorithm);
|
||||
SecretKey passwordHmacKey = new SecretKeySpec(keyBytes,
|
||||
cipherKeySize, hmacKeySizeBytes, hmacAlgorithm);
|
||||
|
||||
byte[] masterEncryptionKeyBytes;
|
||||
byte[] masterHmacKeyBytes;
|
||||
if (existingEncryptedMasterKey == null || createKeyEvenIfKeyExists) {
|
||||
if (createMasterKey) {
|
||||
// A master key does not exist so we must create one. We will simply
|
||||
// use two random byte arrays for the encryption and hmac components.
|
||||
// The sizes of these keys is determined by the values provided to
|
||||
// configuration.
|
||||
masterEncryptionKeyBytes = generateRandomByteArray(cipherKeySize);
|
||||
masterHmacKeyBytes = generateRandomByteArray(hmacKeySizeBytes);
|
||||
|
||||
// The initialization vector for the encryption of the master key is
|
||||
// randomly generated.
|
||||
String iv = Base64.encodeBase64String(generateRandomByteArray(ivLength));
|
||||
|
||||
// We append our two keys together and encrypt the resulting byte array.
|
||||
// This is the secret that all of the encryption in the repository depends upon
|
||||
byte[] secret = ArrayUtils.addAll(masterEncryptionKeyBytes, masterHmacKeyBytes);
|
||||
String encryptedSecret = encryptToString(passwordEncryptionKey, secret, iv);
|
||||
|
||||
// We store our new master key in the repository in its encrypted form
|
||||
// along with an HMAC to verify the key when we read it, the salt needed to
|
||||
// generate keys to decrypt it, and the initialization vector used
|
||||
mMasterKey = new MMasterKey(encryptedSecret, generateHmac(passwordHmacKey, encryptedSecret), salt, iv);
|
||||
repository.createMasterKey(mMasterKey, repositoryTransaction);
|
||||
} else {
|
||||
// If a master key does not exist and we are trying to initialize the
|
||||
// manager without allowing it to create a master key, we should fail
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0002);
|
||||
}
|
||||
} else {
|
||||
// A master key exists so we need to read it from the repository and
|
||||
// decrypt it.
|
||||
mMasterKey = existingEncryptedMasterKey;
|
||||
String iv = existingEncryptedMasterKey.getIv();
|
||||
String encryptedSecret = existingEncryptedMasterKey.getEncryptedSecret();
|
||||
|
||||
// Before we go about decrypting the master key we should verify the hmac
|
||||
// to ensure that it has not been tampered with
|
||||
String hmac = existingEncryptedMasterKey.getHmac();
|
||||
if (!validHmac(passwordHmacKey, encryptedSecret, hmac)) {
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0001);
|
||||
}
|
||||
|
||||
// The master key has not been tampered with, lets decrypt it using the key
|
||||
// derived from the password and the initialization vector from the repository
|
||||
byte[] decryptedKey = decryptToBytes(passwordEncryptionKey, encryptedSecret, iv);
|
||||
|
||||
// Since the master key is stored as the concatenation of an encryption
|
||||
// key and an hmac key, we need to split it according to the sizes derived
|
||||
// from the configuration
|
||||
masterEncryptionKeyBytes = new byte[cipherKeySize];
|
||||
masterHmacKeyBytes = new byte[hmacKeySizeBytes];
|
||||
System.arraycopy(decryptedKey, 0, masterEncryptionKeyBytes, 0,
|
||||
cipherKeySize);
|
||||
System.arraycopy(decryptedKey, cipherKeySize,
|
||||
masterHmacKeyBytes, 0, hmacKeySizeBytes);
|
||||
}
|
||||
|
||||
// Place the master encryption and master hmac key in SecretKey objects
|
||||
// so we can use them to encrypt and decrypt data
|
||||
masterEncryptionKey = new SecretKeySpec(masterEncryptionKeyBytes, 0, cipherKeySize, cipherAlgorithm);
|
||||
masterHmacKey = new SecretKeySpec(masterHmacKeyBytes, 0, hmacKeySizeBytes, hmacAlgorithm);
|
||||
}
|
||||
|
||||
public synchronized void destroy() {
|
||||
hmacAlgorithm = null;
|
||||
hmacKeySizeBytes = 0;
|
||||
cipherAlgorithm = null;
|
||||
cipherKeySize = 0;
|
||||
cipherSpec = null;
|
||||
pbkdf2Algorithm = null;
|
||||
pbkdf2Rounds = 0;
|
||||
ivLength = 0;
|
||||
|
||||
repositoryTransaction = null;
|
||||
mMasterKey = null;
|
||||
|
||||
masterEncryptionKey = null;
|
||||
masterHmacKey = null;
|
||||
@ -208,6 +251,10 @@ public synchronized void destroy() {
|
||||
random = null;
|
||||
}
|
||||
|
||||
public void deleteMasterKeyFromRepository() {
|
||||
RepositoryManager.getInstance().getRepository().deleteMasterKey(mMasterKey.getPersistenceId(), repositoryTransaction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Base64 representation of the encrypted cleartext, using the Master Key
|
||||
*
|
||||
|
@ -33,7 +33,6 @@
|
||||
import org.apache.sqoop.driver.DriverUpgrader;
|
||||
import org.apache.sqoop.json.DriverBean;
|
||||
import org.apache.sqoop.model.ConfigUtils;
|
||||
import org.apache.sqoop.model.MConfig;
|
||||
import org.apache.sqoop.model.MConfigList;
|
||||
import org.apache.sqoop.model.MConnector;
|
||||
import org.apache.sqoop.model.MDriver;
|
||||
@ -332,19 +331,40 @@ public abstract class Repository {
|
||||
|
||||
/**
|
||||
* Get the encrypted master key from the repository
|
||||
* There should only be one unless the repository encryption tool is in use
|
||||
*
|
||||
* @return The encrypted master key, null if no master key exists
|
||||
*/
|
||||
public abstract MMasterKey getMasterKey();
|
||||
public abstract MMasterKey getMasterKey(RepositoryTransaction tx);
|
||||
|
||||
|
||||
/**
|
||||
* Create the encrypted master key in the repository
|
||||
*
|
||||
* @param mMasterKey The encrypted master key
|
||||
* @param mMasterKey The encrypted master key. Its persistenceId will be populated
|
||||
*/
|
||||
public abstract void createMasterKey(MMasterKey mMasterKey);
|
||||
public abstract void createMasterKey(MMasterKey mMasterKey, RepositoryTransaction tx);
|
||||
|
||||
/**
|
||||
* Delete the master key record for the given id
|
||||
*
|
||||
* @param masterKeyId id of the master key to delete
|
||||
* @param tx Transaction to perform the change within
|
||||
*/
|
||||
public abstract void deleteMasterKey(long masterKeyId, RepositoryTransaction tx);
|
||||
|
||||
/**
|
||||
* Change MasterKeyManager for inputs
|
||||
*
|
||||
* @param fromMasterKeyManager The master key manager that currently represents
|
||||
* the encryption strategy for the inputs.
|
||||
* null if the inputs are not currently encrypted
|
||||
* @param toMasterKeyManager The master key manager that will represent the
|
||||
* encryption strategy for the inputs.
|
||||
* null if we would like to decrypt the inputs
|
||||
* @param tx Transaction to perform the change within
|
||||
*/
|
||||
public abstract void changeMasterKeyManager(MasterKeyManager fromMasterKeyManager, MasterKeyManager toMasterKeyManager, RepositoryTransaction tx);
|
||||
|
||||
/*********************Configurable Upgrade APIs ******************************/
|
||||
|
||||
|
@ -102,6 +102,12 @@ public enum SecurityError implements ErrorCode {
|
||||
/** The HMAC algorithm specified in configuration could not be found */
|
||||
ENCRYPTION_0011("HMAC algorithm not found"),
|
||||
|
||||
/** An error occured when running the repository encryption tool */
|
||||
ENCRYPTION_0012("Failed to run repository encryption tool"),
|
||||
|
||||
/** An error occured when running the repository encryption tool */
|
||||
ENCRYPTION_0013("Repository is encrypted, need configuration to decrypt"),
|
||||
|
||||
;
|
||||
|
||||
|
||||
|
@ -81,7 +81,7 @@ public void setUp() throws Exception {
|
||||
|
||||
|
||||
jdbcRepoMock = mock(JdbcRepository.class);
|
||||
when(jdbcRepoMock.getMasterKey()).thenReturn(null);
|
||||
when(jdbcRepoMock.getMasterKey(null)).thenReturn(null);
|
||||
when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
|
||||
|
||||
masterKeyManager = MasterKeyManager.getInstance();
|
||||
@ -106,8 +106,7 @@ public void testInitializeWithoutKeyCreationWithExistingKey() {
|
||||
|
||||
ArgumentCaptor<MMasterKey> mMasterKeyArgumentCaptor = ArgumentCaptor
|
||||
.forClass(MMasterKey.class);
|
||||
verify(jdbcRepoMock, times(1)).createMasterKey(mMasterKeyArgumentCaptor
|
||||
.capture());
|
||||
verify(jdbcRepoMock, times(1)).createMasterKey(mMasterKeyArgumentCaptor.capture(), any(RepositoryTransaction.class));
|
||||
|
||||
// Encrypt something with that master key
|
||||
String secret = "imasecret";
|
||||
@ -120,12 +119,12 @@ public void testInitializeWithoutKeyCreationWithExistingKey() {
|
||||
// Create a new MasterKeyManager instance with existing master key
|
||||
// coming from the "db"
|
||||
jdbcRepoMock = mock(JdbcRepository.class);
|
||||
when(jdbcRepoMock.getMasterKey()).thenReturn(mMasterKeyArgumentCaptor
|
||||
when(jdbcRepoMock.getMasterKey(null)).thenReturn(mMasterKeyArgumentCaptor
|
||||
.getValue());
|
||||
when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
|
||||
|
||||
masterKeyManager.initialize();
|
||||
verify(jdbcRepoMock, times(1)).getMasterKey();
|
||||
verify(jdbcRepoMock, times(1)).getMasterKey(null);
|
||||
|
||||
// Try to decrypt
|
||||
assertEquals(masterKeyManager.decryptWithMasterKey(encrypted, iv, masterKeyManager.generateHmacWithMasterHmacKey(encrypted)), secret);
|
||||
@ -135,7 +134,7 @@ public void testInitializeWithoutKeyCreationWithExistingKey() {
|
||||
public void testInitializeWithKeyCreationWithoutExistingKey() {
|
||||
masterKeyManager.initialize();
|
||||
|
||||
verify(jdbcRepoMock, times(1)).createMasterKey(any(MMasterKey.class));
|
||||
verify(jdbcRepoMock, times(1)).createMasterKey(any(MMasterKey.class), any(RepositoryTransaction.class));
|
||||
}
|
||||
|
||||
@Test(
|
||||
@ -144,7 +143,7 @@ public void testInitializeWithKeyCreationWithoutExistingKey() {
|
||||
)
|
||||
public void testMasterKeyWithInvalidHmac() {
|
||||
jdbcRepoMock = mock(JdbcRepository.class);
|
||||
when(jdbcRepoMock.getMasterKey()).thenReturn(new MMasterKey(
|
||||
when(jdbcRepoMock.getMasterKey(null)).thenReturn(new MMasterKey(
|
||||
Base64.encodeBase64String(generateRandomByteArray(CIPHER_KEY_SIZE_BYTES)),
|
||||
Base64.encodeBase64String(generateRandomByteArray(HMAC_KEY_SIZE_BYTES)),
|
||||
Base64.encodeBase64String(generateRandomByteArray(CIPHER_KEY_SIZE_BYTES)),
|
||||
|
4
dist/src/main/conf/sqoop.properties
vendored
4
dist/src/main/conf/sqoop.properties
vendored
@ -197,9 +197,9 @@ org.apache.sqoop.execution.engine=org.apache.sqoop.execution.mapreduce.Mapreduce
|
||||
#org.apache.sqoop.security.repo_encryption.password_generator=
|
||||
#org.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256
|
||||
#org.apache.sqoop.security.repo_encryption.cipher_algorithm=AES
|
||||
#org.apache.sqoop.security.repo_encryption.cipher_key_size=128
|
||||
#org.apache.sqoop.security.repo_encryption.cipher_key_size=16
|
||||
#org.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding
|
||||
#org.apache.sqoop.security.repo_encryption.initialization_vector_size=128
|
||||
#org.apache.sqoop.security.repo_encryption.initialization_vector_size=16
|
||||
#org.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1
|
||||
#org.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000
|
||||
|
||||
|
@ -14,9 +14,9 @@
|
||||
limitations under the License.
|
||||
|
||||
|
||||
==========
|
||||
Encryption
|
||||
==========
|
||||
===========
|
||||
API TLS/SSL
|
||||
===========
|
||||
|
||||
Sqoop 2 offers an HTTP REST-like API as the mechanism by which clients can
|
||||
communicate with the Sqoop 2 server. The Sqoop 2 server and the Sqoop 2 shell
|
||||
@ -73,7 +73,7 @@ is configured.
|
||||
org.apache.sqoop.security.tls.keymanager_password=echo keymanagerpassword
|
||||
|
||||
Client/Shell Configuration
|
||||
--------------------------
|
||||
==========================
|
||||
|
||||
When using TLS on the Sqoop 2 server, especially with a self-signed certificate,
|
||||
it may be useful to specify a truststore for the client/shell to use.
|
||||
@ -99,4 +99,212 @@ You may also use a password generator.
|
||||
::
|
||||
|
||||
sqoop:000> set truststore --truststore /Users/abefine/keystore/node2.truststore --truststore-password-generator "echo changeme"
|
||||
Truststore set successfully
|
||||
Truststore set successfully
|
||||
|
||||
=====================
|
||||
Repository Encryption
|
||||
=====================
|
||||
|
||||
Sqoop 2 uses a database to store metadata about the various data sources it talks to, we call this database the repository.
|
||||
|
||||
The repository can store passwords and other pieces of information that are security sensitive, within the context of Sqoop
|
||||
2, this information is referred to as sensitive inputs. Which inputs are considered sensitive is determined by the connector.
|
||||
|
||||
We support encrypting sensitive inputs in the repository using a provided password or password generator. Sqoop 2 uses the
|
||||
provided password and the provided key generation algorithm (such as PBKDF2) to generate a key to encrypt sensitive inputs
|
||||
and another hmac key to verify their integrity.
|
||||
|
||||
Only the sensitive inputs are encrypted. If an input is not defined as sensitive by the connector, it is NOT encrypted.
|
||||
|
||||
Server Configuration
|
||||
=====================
|
||||
|
||||
Note: This configuration will allow a new Sqoop instance to encrypt information or read from an already encrypted repository.
|
||||
It will not encrypt sensitive inputs in an existing repository. Please see below for instructions on how to encrypt an existing repository.
|
||||
|
||||
First, repository encryption must be enabled.
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.enabled=true
|
||||
|
||||
Then we configure the password:
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.password=supersecret
|
||||
|
||||
Or the password generator:
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.password_generator=echo supersecret
|
||||
|
||||
The plaintext password is always given preference to the password generator if both are present.
|
||||
|
||||
Then we can configure the HMAC algorithm. Please find the list of possibilities here:
|
||||
`Standard Algorithm Name Documentation - Mac <http://docs.oracle.com/javase/7/docs/technotes/guides/security/StandardNames.html#Mac>`_
|
||||
We can store digests with up to 1024 bits.
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256
|
||||
|
||||
Then we configure the cipher algorithm. Possibilities can be found here:
|
||||
`Standard Algorithm Name Documentation - Cipher <http://docs.oracle.com/javase/7/docs/technotes/guides/security/StandardNames.html#Cipher>`_
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.cipher_algorithm=AES
|
||||
|
||||
Then we configure the key size for the cipher in bytes. We can store up to 1024 bit keys.
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.cipher_key_size=16
|
||||
|
||||
Next we need to specify the cipher transformation. The options for this field are listed here:
|
||||
`Cipher (Java Platform SE 7) <http://docs.oracle.com/javase/7/docs/api/javax/crypto/Cipher.html>`_
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding
|
||||
|
||||
The size of the initialization vector to use in bytes. We support up to 1024 bit initialization vectors.
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.initialization_vector_size=16
|
||||
|
||||
Next we need to specfy the algorithm for secret key generation. Please refer to:
|
||||
`Standard Algorithm Name Documentation - SecretKeyFactory <http://docs.oracle.com/javase/7/docs/technotes/guides/security/StandardNames.html#SecretKeyFactory>`_
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1
|
||||
|
||||
Finally specify the number of rounds/iterations for the generation of a key from a password.
|
||||
|
||||
::
|
||||
|
||||
org.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000
|
||||
|
||||
Repository Encryption Tool
|
||||
==========================
|
||||
|
||||
Sometimes we may want to change the password that is used to encrypt our data, generate a new key for our existing password,
|
||||
encrypt an existing unencrypted repository, or decrypt an existing encrypting repository. Sqoop 2 provides the
|
||||
Repository Encryption Tool to allow us to do this.
|
||||
|
||||
Before using the tool it is important to shut down the Sqoop 2 server.
|
||||
|
||||
All changes that the tool makes occur in a single transaction with the repository, which should prevent leaving the
|
||||
repository in a bad state.
|
||||
|
||||
The Repository Encryption Tool is very simple, it uses the exact same configuration specified above (with the exception
|
||||
of ``useConf``). Configuration prefixed with a "-F" represents the existing repository state, configuration prefixed with
|
||||
a "-T" represents the desired repository state. If one of these configuration sets is left out that means unencrypted.
|
||||
|
||||
Changing the Password
|
||||
---------------------
|
||||
|
||||
In order to change the password, we need to specify the current configuration with the existing password and the desired
|
||||
configuration with the new password. It looks like this:
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption \
|
||||
-Forg.apache.sqoop.security.repo_encryption.password=old_password \
|
||||
-Forg.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_algorithm=AES \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_key_size=16 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding \
|
||||
-Forg.apache.sqoop.security.repo_encryption.initialization_vector_size=16 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.password=new_password \
|
||||
-Torg.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_algorithm=AES \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_key_size=16 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding \
|
||||
-Torg.apache.sqoop.security.repo_encryption.initialization_vector_size=16 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000
|
||||
|
||||
Generate a New Key for the Existing Password
|
||||
--------------------------------------------
|
||||
|
||||
Just like with the previous scenario you could copy the same configuration twice like this:
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption \
|
||||
-Forg.apache.sqoop.security.repo_encryption.password=password \
|
||||
-Forg.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_algorithm=AES \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_key_size=16 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding \
|
||||
-Forg.apache.sqoop.security.repo_encryption.initialization_vector_size=16 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.password=password \
|
||||
-Torg.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_algorithm=AES \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_key_size=16 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding \
|
||||
-Torg.apache.sqoop.security.repo_encryption.initialization_vector_size=16 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000
|
||||
|
||||
But we do have a shortcut to make this easier:
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption -FuseConf -TuseConf
|
||||
|
||||
The ``useConf`` option will read whatever configuration is already in the configured sqoop properties file and apply it
|
||||
for the specified direction.
|
||||
|
||||
Encrypting an Existing Unencrypted Repository
|
||||
---------------------------------------------
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption \
|
||||
-Torg.apache.sqoop.security.repo_encryption.password=password \
|
||||
-Torg.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_algorithm=AES \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_key_size=16 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding \
|
||||
-Torg.apache.sqoop.security.repo_encryption.initialization_vector_size=16 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1 \
|
||||
-Torg.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000
|
||||
|
||||
If the configuration for the encrypted repository has already been written to the sqoop properties file, one can simply
|
||||
execute:
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption -TuseConf
|
||||
|
||||
|
||||
Decrypting an Existing Encrypted Repository
|
||||
-------------------------------------------
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption \
|
||||
-Forg.apache.sqoop.security.repo_encryption.password=password \
|
||||
-Forg.apache.sqoop.security.repo_encryption.hmac_algorithm=HmacSHA256 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_algorithm=AES \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_key_size=16 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.cipher_spec=AES/CBC/PKCS5Padding \
|
||||
-Forg.apache.sqoop.security.repo_encryption.initialization_vector_size=16 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.pbkdf2_algorithm=PBKDF2WithHmacSHA1 \
|
||||
-Forg.apache.sqoop.security.repo_encryption.pbkdf2_rounds=4000
|
||||
|
||||
If the configuration for the encrypted repository has not yet been removed from the sqoop properties file, one can simply
|
||||
execute:
|
||||
|
||||
::
|
||||
|
||||
sqoop.sh tool repositoryencryption -FuseConf
|
||||
|
@ -1164,12 +1164,15 @@ public MMasterKey getMasterKey(Connection conn) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String aesKey = rs.getString(1);
|
||||
String hmac = rs.getString(2);
|
||||
String salt = rs.getString(3);
|
||||
String iv = rs.getString(4);
|
||||
String aesKey = rs.getString(2);
|
||||
String hmac = rs.getString(3);
|
||||
String salt = rs.getString(4);
|
||||
String iv = rs.getString(5);
|
||||
|
||||
return new MMasterKey(aesKey, hmac, salt, iv);
|
||||
MMasterKey mMasterKey = new MMasterKey(aesKey, hmac, salt, iv);
|
||||
mMasterKey.setPersistenceId(rs.getLong(1));
|
||||
|
||||
return mMasterKey;
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
logException(ex);
|
||||
@ -1183,8 +1186,7 @@ public MMasterKey getMasterKey(Connection conn) {
|
||||
@Override
|
||||
public void createMasterKey(MMasterKey mMasterKey, Connection conn) {
|
||||
int result;
|
||||
try (PreparedStatement preparedStatement = conn.prepareStatement(crudQueries.getStmtInsertSqMasterKey(),
|
||||
Statement.RETURN_GENERATED_KEYS)) {
|
||||
try (PreparedStatement preparedStatement = conn.prepareStatement(crudQueries.getStmtInsertSqMasterKey(), Statement.RETURN_GENERATED_KEYS)) {
|
||||
preparedStatement.setString(1, mMasterKey.getEncryptedSecret());
|
||||
preparedStatement.setString(2, mMasterKey.getHmac());
|
||||
preparedStatement.setString(3, mMasterKey.getSalt());
|
||||
@ -1195,12 +1197,60 @@ public void createMasterKey(MMasterKey mMasterKey, Connection conn) {
|
||||
throw new SqoopException(CommonRepositoryError.COMMON_0009,
|
||||
Integer.toString(result));
|
||||
}
|
||||
|
||||
long masterKeyId = -1;
|
||||
try(ResultSet primaryKeyResultSet = preparedStatement.getGeneratedKeys()) {
|
||||
if (primaryKeyResultSet.next()) {
|
||||
masterKeyId = primaryKeyResultSet.getLong(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (masterKeyId < 0) {
|
||||
throw new SqoopException(CommonRepositoryError.COMMON_0062);
|
||||
}
|
||||
|
||||
mMasterKey.setPersistenceId(masterKeyId);
|
||||
|
||||
} catch (SQLException ex) {
|
||||
logException(ex, mMasterKey);
|
||||
throw new SqoopException(CommonRepositoryError.COMMON_0031, ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void deleteMasterKey(long masterKeyId, Connection conn) {
|
||||
try (PreparedStatement deleteMasterKeyPreparedStatement = conn.prepareStatement(crudQueries.getDeleteSqMasterKey())) {
|
||||
deleteMasterKeyPreparedStatement.setLong(1, masterKeyId);
|
||||
deleteMasterKeyPreparedStatement.executeUpdate();
|
||||
} catch (SQLException ex) {
|
||||
logException(ex);
|
||||
throw new SqoopException(CommonRepositoryError.COMMON_0061, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void changeMasterKeyManager(MasterKeyManager fromMasterKeyManager, MasterKeyManager toMasterKeyManager, Connection conn) {
|
||||
try (
|
||||
PreparedStatement fetchLinkInputs = conn.prepareStatement(crudQueries.getStmtFetchLinkInput());
|
||||
PreparedStatement updateLinkInput = conn.prepareStatement(crudQueries.getUpdateLinkInput());
|
||||
PreparedStatement fetchJobInputs = conn.prepareStatement(crudQueries.getStmtFetchJobInput());
|
||||
PreparedStatement updateJobInput = conn.prepareStatement(crudQueries.getUpdateJobInput())
|
||||
) {
|
||||
transitionInputs(fromMasterKeyManager, toMasterKeyManager, fetchLinkInputs, updateLinkInput);
|
||||
transitionInputs(fromMasterKeyManager, toMasterKeyManager, fetchJobInputs, updateJobInput);
|
||||
} catch (SQLException exception) {
|
||||
logException(exception);
|
||||
throw new SqoopException(CommonRepositoryError.COMMON_0060, exception);
|
||||
}
|
||||
}
|
||||
|
||||
private void insertConnectorDirection(Long connectorId, Direction direction, Connection conn)
|
||||
throws SQLException {
|
||||
try (PreparedStatement stmt = conn.prepareStatement(crudQueries.getStmtInsertSqConnectorDirections())) {
|
||||
@ -1646,7 +1696,7 @@ private List<MLink> loadLinksForUpgrade(PreparedStatement stmt,
|
||||
|
||||
try (ResultSet rsConnection = stmt.executeQuery();
|
||||
PreparedStatement connectorConfigFetchStatement = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfigurable());
|
||||
PreparedStatement connectorConfigInputStatement = conn.prepareStatement(crudQueries.getStmtFetchLinkInput());) {
|
||||
PreparedStatement connectorConfigInputStatement = conn.prepareStatement(crudQueries.getStmtFetchLinkInputByJob());) {
|
||||
while(rsConnection.next()) {
|
||||
long id = rsConnection.getLong(1);
|
||||
String name = rsConnection.getString(2);
|
||||
@ -1691,7 +1741,7 @@ private List<MLink> loadLinks(PreparedStatement stmt,
|
||||
|
||||
try (ResultSet rsConnection = stmt.executeQuery();
|
||||
PreparedStatement configStmt = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfiguration());
|
||||
PreparedStatement inputStmt = conn.prepareStatement(crudQueries.getStmtFetchLinkInput());
|
||||
PreparedStatement inputStmt = conn.prepareStatement(crudQueries.getStmtFetchLinkInputByJob());
|
||||
) {
|
||||
while(rsConnection.next()) {
|
||||
long id = rsConnection.getLong(1);
|
||||
@ -1734,7 +1784,7 @@ private List<MJob> loadJobsForUpgrade(PreparedStatement stmt,
|
||||
PreparedStatement fromConfigFetchStmt = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfigurable());
|
||||
PreparedStatement toConfigFetchStmt = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfigurable());
|
||||
PreparedStatement driverConfigfetchStmt = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfigurable());
|
||||
PreparedStatement jobInputFetchStmt = conn.prepareStatement(crudQueries.getStmtFetchJobInput());) {
|
||||
PreparedStatement jobInputFetchStmt = conn.prepareStatement(crudQueries.getStmtFetchJobInputByJob());) {
|
||||
|
||||
// Note: Job does not hold a explicit reference to the driver since every
|
||||
// job has the same driver
|
||||
@ -1813,9 +1863,9 @@ private List<MJob> loadJobs(PreparedStatement stmt,
|
||||
(crudQueries.getStmtSelectConfigForConfigurable());
|
||||
PreparedStatement configStmt = conn.prepareStatement(crudQueries
|
||||
.getStmtSelectConfigForConfiguration());
|
||||
PreparedStatement jobInputFetchStmt = conn.prepareStatement(crudQueries.getStmtFetchJobInput());
|
||||
PreparedStatement jobInputFetchStmt = conn.prepareStatement(crudQueries.getStmtFetchJobInputByJob());
|
||||
PreparedStatement inputStmt = conn.prepareStatement(crudQueries
|
||||
.getStmtFetchLinkInput())
|
||||
.getStmtFetchLinkInputByJob())
|
||||
) {
|
||||
|
||||
// Note: Job does not hold a explicit reference to the driver since every
|
||||
@ -2125,7 +2175,7 @@ private void loadDriverConfigs(List<MConfig> driverConfig,
|
||||
// get the overrides value from the SQ_INPUT_RELATION table
|
||||
String overrides = getOverrides(inputId, conn);
|
||||
String inputEnumValues = rsetInput.getString(9);
|
||||
String value = readInputValue(rsetInput.getString(10), rsetInput.getBoolean(11), rsetInput.getString(12), rsetInput.getString(13));
|
||||
String value = readInputValue(MasterKeyManager.getInstance(), rsetInput.getString(10), rsetInput.getBoolean(11), rsetInput.getString(12), rsetInput.getString(13));
|
||||
MInputType mit = MInputType.valueOf(inputType);
|
||||
MInput input = null;
|
||||
switch (mit) {
|
||||
@ -2222,6 +2272,58 @@ private Direction findConfigDirection(long configId, Connection conn) throws SQL
|
||||
}
|
||||
}
|
||||
|
||||
private void transitionInputs(MasterKeyManager fromMasterKeyManager, MasterKeyManager toMasterKeyManager,
|
||||
PreparedStatement selectInputsStatement, PreparedStatement updateInputsStatement) throws SQLException {
|
||||
try (ResultSet inputs = selectInputsStatement.executeQuery()) {
|
||||
while (inputs.next()) {
|
||||
long inputId = inputs.getLong(1);
|
||||
boolean encrypted = inputs.getBoolean(11);
|
||||
boolean sensitive = inputs.getBoolean(6);
|
||||
|
||||
if (encrypted) {
|
||||
assert(fromMasterKeyManager != null);
|
||||
// We need to decrypt the input first
|
||||
String encryptedValue = inputs.getString(10);
|
||||
String iv = inputs.getString(12);
|
||||
String hmac = inputs.getString(13);
|
||||
|
||||
String plainTextValue = readInputValue(fromMasterKeyManager, encryptedValue, encrypted, iv, hmac);
|
||||
|
||||
if (toMasterKeyManager != null && sensitive) {
|
||||
// We need to encrypt the input
|
||||
String newIv = toMasterKeyManager.generateRandomIv();
|
||||
String encryptedInput = toMasterKeyManager.encryptWithMasterKey(plainTextValue, newIv);
|
||||
|
||||
updateInputsStatement.setString(1, encryptedInput);
|
||||
updateInputsStatement.setBoolean(2, true);
|
||||
updateInputsStatement.setString(3, newIv);
|
||||
updateInputsStatement.setString(4, toMasterKeyManager.generateHmacWithMasterHmacKey(encryptedInput));
|
||||
updateInputsStatement.setLong(5, inputId);
|
||||
} else {
|
||||
// Store the plaintext
|
||||
updateInputsStatement.setString(1, plainTextValue);
|
||||
updateInputsStatement.setBoolean(2, false);
|
||||
updateInputsStatement.setNull(3, Types.VARCHAR);
|
||||
updateInputsStatement.setNull(4, Types.VARCHAR);
|
||||
updateInputsStatement.setLong(5, inputId);
|
||||
}
|
||||
updateInputsStatement.executeUpdate();
|
||||
} else if (toMasterKeyManager != null && sensitive) {
|
||||
// We need to encrypt the input
|
||||
String plainTextValue = inputs.getString(10);
|
||||
String newIv = toMasterKeyManager.generateRandomIv();
|
||||
String encryptedInput = toMasterKeyManager.encryptWithMasterKey(plainTextValue, newIv);
|
||||
|
||||
updateInputsStatement.setString(1, encryptedInput);
|
||||
updateInputsStatement.setBoolean(2, true);
|
||||
updateInputsStatement.setString(3, newIv);
|
||||
updateInputsStatement.setString(4, toMasterKeyManager.generateHmacWithMasterHmacKey(encryptedInput));
|
||||
updateInputsStatement.setLong(5, inputId);
|
||||
updateInputsStatement.executeUpdate();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void loadInputsForConfigs(MConfigList mConfigList, PreparedStatement configStmt, PreparedStatement inputStmt) throws SQLException {
|
||||
for (MConfig mConfig : mConfigList.getConfigs()) {
|
||||
@ -2237,7 +2339,7 @@ private void loadInputsForConfigs(MConfigList mConfigList, PreparedStatement con
|
||||
while (inputResults.next()) {
|
||||
long inputId = inputResults.getLong(1);
|
||||
String inputName = inputResults.getString(2);
|
||||
String value = readInputValue(inputResults.getString(10), inputResults.getBoolean(11), inputResults.getString(12), inputResults.getString(13));
|
||||
String value = readInputValue(MasterKeyManager.getInstance(), inputResults.getString(10), inputResults.getBoolean(11), inputResults.getString(12), inputResults.getString(13));
|
||||
if (mConfig.getName().equals(configName) && mConfig.getInputNames().contains(inputName)) {
|
||||
MInput mInput = mConfig.getInput(inputName);
|
||||
mInput.setPersistenceId(inputId);
|
||||
@ -2262,9 +2364,9 @@ private void loadInputsForConfigs(MConfigList mConfigList, PreparedStatement con
|
||||
* @param hmac HMAC for tamper resistance
|
||||
* @return The input value
|
||||
*/
|
||||
private String readInputValue(String possiblyEncryptedValue, boolean encrypted, String iv, String hmac) throws SqoopException {
|
||||
private String readInputValue(MasterKeyManager masterKeyManager, String possiblyEncryptedValue, boolean encrypted, String iv, String hmac) throws SqoopException {
|
||||
if (encrypted) {
|
||||
return MasterKeyManager.getInstance().decryptWithMasterKey(possiblyEncryptedValue, iv, hmac);
|
||||
return masterKeyManager.decryptWithMasterKey(possiblyEncryptedValue, iv, hmac);
|
||||
} else {
|
||||
return possiblyEncryptedValue;
|
||||
}
|
||||
@ -2318,7 +2420,7 @@ public void loadConnectorConfigs(List<MConfig> linkConfig, List<MConfig> fromCon
|
||||
// get the overrides value from the SQ_INPUT_RELATION table
|
||||
String overrides = getOverrides(inputId, conn);
|
||||
String inputEnumValues = rsetInput.getString(9);
|
||||
String value = readInputValue(rsetInput.getString(10), rsetInput.getBoolean(11), rsetInput.getString(12), rsetInput.getString(13));
|
||||
String value = readInputValue(MasterKeyManager.getInstance(), rsetInput.getString(10), rsetInput.getBoolean(11), rsetInput.getString(12), rsetInput.getString(13));
|
||||
|
||||
MInputType mit = MInputType.valueOf(inputType);
|
||||
|
||||
|
@ -215,7 +215,7 @@ public class CommonRepositoryInsertUpdateDeleteSelectQuery {
|
||||
* *******LINK INPUT TABLE *************
|
||||
*/
|
||||
//DML: Get inputs and values for a given link
|
||||
private static final String STMT_FETCH_LINK_INPUT =
|
||||
private static final String STMT_FETCH_LINK_INPUT_BY_JOB =
|
||||
"SELECT "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_ID) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_NAME) + ", "
|
||||
@ -237,11 +237,38 @@ public class CommonRepositoryInsertUpdateDeleteSelectQuery {
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_CONFIG) + " = ?"
|
||||
+ " ORDER BY " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_INDEX);
|
||||
|
||||
private static final String STMT_FETCH_LINK_INPUT =
|
||||
"SELECT "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_ID) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_NAME) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_CONFIG) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_INDEX) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_TYPE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_STRMASK) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_STRLENGTH) + ","
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_EDITABLE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_ENUMVALS) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_VALUE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_ENCRYPTED) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_IV) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_HMAC)
|
||||
+ " FROM " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_INPUT_NAME)
|
||||
+ " RIGHT OUTER JOIN " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_LINK_INPUT_NAME)
|
||||
+ " ON " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_ID) + " = " + CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_INPUT);
|
||||
|
||||
private static final String UPDATE_LINK_INPUT =
|
||||
"UPDATE " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_LINK_INPUT_NAME) + " SET "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_VALUE) + " = ?, "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_ENCRYPTED) + " = ?, "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_IV) + " = ?, "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_HMAC) + " = ? "
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_INPUT) + " = ?";
|
||||
|
||||
/**
|
||||
* *******JOB INPUT TABLE *************
|
||||
*/
|
||||
//DML: Fetch inputs and values for a given job
|
||||
private static final String STMT_FETCH_JOB_INPUT =
|
||||
private static final String STMT_FETCH_JOB_INPUT_BY_JOB =
|
||||
"SELECT "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_ID) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_NAME) + ", "
|
||||
@ -263,6 +290,35 @@ public class CommonRepositoryInsertUpdateDeleteSelectQuery {
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_CONFIG) + " = ?"
|
||||
+ " ORDER BY " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_INDEX);
|
||||
|
||||
|
||||
private static final String STMT_FETCH_JOB_INPUT =
|
||||
"SELECT "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_ID) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_NAME) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_CONFIG) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_INDEX) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_TYPE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_STRMASK) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_STRLENGTH) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_EDITABLE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQI_ENUMVALS) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_VALUE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_ENCRYPTED) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_IV) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_HMAC)
|
||||
+ " FROM " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_INPUT_NAME)
|
||||
+ " RIGHT OUTER JOIN " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_JOB_INPUT_NAME)
|
||||
+ " ON " + CommonRepoUtils.escapeColumnName(COLUMN_SQBI_INPUT) + " = " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_ID)
|
||||
+ " ORDER BY " + CommonRepoUtils.escapeColumnName(COLUMN_SQI_INDEX);
|
||||
|
||||
private static final String UPDATE_JOB_INPUT =
|
||||
"UPDATE " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_JOB_INPUT_NAME) + " SET "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_VALUE) + " = ?, "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_ENCRYPTED) + " = ?, "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_IV) + " = ?, "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQBI_HMAC) + " = ? "
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQBI_INPUT) + " = ?";
|
||||
|
||||
/**
|
||||
* *******LINK TABLE *************
|
||||
*/
|
||||
@ -758,7 +814,8 @@ public class CommonRepositoryInsertUpdateDeleteSelectQuery {
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQ_CFG_CONFIGURABLE) + " = ?)";
|
||||
|
||||
private static final String STMT_SELECT_SQ_MASTER_KEY =
|
||||
"SELECT " + CommonRepoUtils.escapeColumnName(COLUMN_SQMK_SECRET) + ", "
|
||||
"SELECT " + CommonRepoUtils.escapeColumnName(COLUMN_SQMK_ID) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQMK_SECRET) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQMK_HMAC) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQMK_SALT) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQMK_IV)
|
||||
@ -772,6 +829,10 @@ public class CommonRepositoryInsertUpdateDeleteSelectQuery {
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQMK_IV)
|
||||
+ ") VALUES (?, ?, ?, ?)";
|
||||
|
||||
private static final String DELETE_SQ_MASTER_KEY =
|
||||
"DELETE FROM " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_MASTER_KEY_NAME)
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQMK_ID) + " = ?";
|
||||
|
||||
|
||||
public String getStmtSelectSqdIdBySqdName() {
|
||||
return STMT_SELECT_SQD_ID_BY_SQD_NAME;
|
||||
@ -821,14 +882,30 @@ public String getStmtInsertIntoInput() {
|
||||
return STMT_INSERT_INTO_INPUT;
|
||||
}
|
||||
|
||||
public String getStmtFetchLinkInput() {
|
||||
return STMT_FETCH_LINK_INPUT;
|
||||
public String getStmtFetchLinkInputByJob() {
|
||||
return STMT_FETCH_LINK_INPUT_BY_JOB;
|
||||
}
|
||||
|
||||
public String getUpdateLinkInput() {
|
||||
return UPDATE_LINK_INPUT;
|
||||
}
|
||||
|
||||
public String getUpdateJobInput() {
|
||||
return UPDATE_JOB_INPUT;
|
||||
}
|
||||
|
||||
public String getStmtFetchJobInput() {
|
||||
return STMT_FETCH_JOB_INPUT;
|
||||
}
|
||||
|
||||
public String getStmtFetchJobInputByJob() {
|
||||
return STMT_FETCH_JOB_INPUT_BY_JOB;
|
||||
}
|
||||
|
||||
public String getStmtFetchLinkInput() {
|
||||
return STMT_FETCH_LINK_INPUT;
|
||||
}
|
||||
|
||||
public String getStmtInsertLink() {
|
||||
return STMT_INSERT_LINK;
|
||||
}
|
||||
@ -1082,4 +1159,8 @@ public String getStmtSelectSqMasterKey() {
|
||||
public String getStmtInsertSqMasterKey() {
|
||||
return STMT_INSERT_SQ_MASTER_KEY;
|
||||
}
|
||||
|
||||
public String getDeleteSqMasterKey() {
|
||||
return DELETE_SQ_MASTER_KEY;
|
||||
}
|
||||
}
|
@ -193,7 +193,7 @@
|
||||
* | SQ_LNKI_INPUT: BIGINT PK | FK SQ_INPUT(SQI_ID)
|
||||
* | SQ_LNKI_VALUE: LONG VARCHAR|
|
||||
* | SQ_LNKI_ENCRYPTED: BOOLEAN |
|
||||
* | SQ_LNKI_IV: VARCHAR(171) |
|
||||
* | SQ_LNKI_IV: VARCHAR(171) |
|
||||
* | SQ_LNKI_HMAC: VARCHAR(171) |
|
||||
* +----------------------------+
|
||||
* </pre>
|
||||
|
@ -185,6 +185,11 @@ limitations under the License.
|
||||
<artifactId>parquet-avro</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<!-- Add classifier name to the JAR name -->
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.sqoop.client.SqoopClient;
|
||||
import org.apache.sqoop.common.VersionInfo;
|
||||
import org.apache.sqoop.json.JSONUtils;
|
||||
import org.apache.sqoop.json.JobBean;
|
||||
@ -28,16 +29,18 @@
|
||||
import org.apache.sqoop.submission.SubmissionStatus;
|
||||
import org.apache.sqoop.test.infrastructure.Infrastructure;
|
||||
import org.apache.sqoop.test.infrastructure.SqoopTestCase;
|
||||
import org.apache.sqoop.test.infrastructure.providers.DatabaseInfrastructureProvider;
|
||||
import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider;
|
||||
import org.apache.sqoop.test.infrastructure.providers.KdcInfrastructureProvider;
|
||||
import org.apache.sqoop.test.infrastructure.providers.SqoopInfrastructureProvider;
|
||||
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
|
||||
import org.apache.sqoop.test.utils.HdfsUtils;
|
||||
import org.apache.sqoop.tools.tool.JSONConstants;
|
||||
import org.apache.sqoop.tools.tool.RepositoryDumpTool;
|
||||
import org.apache.sqoop.tools.tool.RepositoryLoadTool;
|
||||
import org.apache.sqoop.utils.UrlSafeUtils;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
import org.testng.annotations.*;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.List;
|
||||
@ -46,14 +49,24 @@
|
||||
import static org.testng.Assert.assertTrue;
|
||||
|
||||
@Test(groups = "no-real-cluster")
|
||||
@Infrastructure(dependencies = {KdcInfrastructureProvider.class, SqoopInfrastructureProvider.class})
|
||||
@Infrastructure(dependencies = {HadoopInfrastructureProvider.class})
|
||||
public class RepositoryDumpLoadToolTest extends SqoopTestCase {
|
||||
|
||||
private JettySqoopMiniCluster jettySqoopMiniCluster;
|
||||
private SqoopClient client;
|
||||
private String jsonFilePath;
|
||||
|
||||
// do the load test and insert data to repo first, then do the dump test.
|
||||
@Test(dependsOnMethods = { "testLoad" })
|
||||
@Test
|
||||
public void testDump() throws Exception {
|
||||
// load data into repository
|
||||
RepositoryLoadTool rlt = new RepositoryLoadTool();
|
||||
rlt.setInTest(true);
|
||||
rlt.runToolWithConfiguration(new String[]{"-i", jsonFilePath});
|
||||
verifyLinks(client.getLinks());
|
||||
verifyJobs(client.getJobs());
|
||||
verifySubmissions(client.getSubmissions());
|
||||
|
||||
// dump the repository
|
||||
RepositoryDumpTool rdt = new RepositoryDumpTool();
|
||||
rdt.setInTest(true);
|
||||
@ -87,16 +100,6 @@ public void testDump() throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLoad() throws Exception {
|
||||
RepositoryLoadTool rlt = new RepositoryLoadTool();
|
||||
rlt.setInTest(true);
|
||||
rlt.runToolWithConfiguration(new String[]{"-i", jsonFilePath});
|
||||
verifyLinks(getClient().getLinks());
|
||||
verifyJobs(getClient().getJobs());
|
||||
verifySubmissions(getClient().getSubmissions());
|
||||
}
|
||||
|
||||
private void verifyLinks(List<MLink> links) {
|
||||
for (MLink link : links) {
|
||||
String linkName = link.getName();
|
||||
@ -126,8 +129,15 @@ private void verifySubmissions(List<MSubmission> submissions) {
|
||||
assertEquals(submission.getStatus(), SubmissionStatus.SUCCEEDED);
|
||||
}
|
||||
|
||||
// generate the json file without the license
|
||||
@BeforeMethod
|
||||
private void startCluster() throws Exception {
|
||||
jettySqoopMiniCluster = new JettySqoopMiniCluster(HdfsUtils.joinPathFragments(super.getTemporaryPath(), this.getClass().getName()), getHadoopConf());
|
||||
jettySqoopMiniCluster.start();
|
||||
client = new SqoopClient(jettySqoopMiniCluster.getServerUrl());
|
||||
}
|
||||
|
||||
// generate the json file without the license
|
||||
@BeforeMethod(dependsOnMethods = { "startCluster" })
|
||||
public void prepareJsonFile() throws Exception {
|
||||
String testFilePath = getClass().getResource("/repoLoadToolTest.json").getPath();
|
||||
jsonFilePath = HdfsUtils.joinPathFragments(getTemporaryPath(), "repoLoadTest.json");
|
||||
@ -140,7 +150,7 @@ public void prepareJsonFile() throws Exception {
|
||||
// for hdfs connector, DirectoryExistsValidator is responsible for validation
|
||||
// replace the link config dir by the local path.
|
||||
if (line.indexOf("linkConfReplacement") > 0) {
|
||||
line = line.replaceAll("linkConfReplacement", UrlSafeUtils.urlEncode(getSqoopMiniClusterTemporaryPath() + "/config/"));
|
||||
line = line.replaceAll("linkConfReplacement", UrlSafeUtils.urlEncode(jettySqoopMiniCluster.getTemporaryPath() + "/config/"));
|
||||
}
|
||||
writer.write(line);
|
||||
}
|
||||
@ -148,4 +158,9 @@ public void prepareJsonFile() throws Exception {
|
||||
writer.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
private void stopCluster() throws Exception {
|
||||
jettySqoopMiniCluster.stop();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,444 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.sqoop.integration.tools;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.sqoop.client.SqoopClient;
|
||||
import org.apache.sqoop.common.MapContext;
|
||||
import org.apache.sqoop.core.PropertiesConfigurationProvider;
|
||||
import org.apache.sqoop.core.SqoopConfiguration;
|
||||
import org.apache.sqoop.model.MLink;
|
||||
import org.apache.sqoop.model.MStringInput;
|
||||
import org.apache.sqoop.repository.MasterKeyManager;
|
||||
import org.apache.sqoop.repository.RepositoryManager;
|
||||
import org.apache.sqoop.repository.common.CommonRepoUtils;
|
||||
import org.apache.sqoop.repository.common
|
||||
.CommonRepositoryInsertUpdateDeleteSelectQuery;
|
||||
import org.apache.sqoop.security.SecurityConstants;
|
||||
import org.apache.sqoop.test.infrastructure.Infrastructure;
|
||||
import org.apache.sqoop.test.infrastructure.SqoopTestCase;
|
||||
import org.apache.sqoop.test.infrastructure.providers.DatabaseInfrastructureProvider;
|
||||
import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider;
|
||||
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
|
||||
import org.apache.sqoop.test.utils.HdfsUtils;
|
||||
import org.apache.sqoop.tools.tool.RepositoryEncryptionTool;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.COLUMN_SQ_LNKI_ENCRYPTED;
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.COLUMN_SQ_LNKI_HMAC;
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.COLUMN_SQ_LNKI_INPUT;
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.COLUMN_SQ_LNKI_IV;
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.COLUMN_SQ_LNKI_VALUE;
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.SCHEMA_SQOOP;
|
||||
import static org.apache.sqoop.repository.common.CommonRepositorySchemaConstants.TABLE_SQ_LINK_INPUT_NAME;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@Test(groups = "no-real-cluster")
|
||||
@Infrastructure(dependencies = {DatabaseInfrastructureProvider.class, HadoopInfrastructureProvider.class})
|
||||
public class RepositoryEncryptionToolTest extends SqoopTestCase {
|
||||
|
||||
private SqoopMiniCluster sqoopMiniCluster;
|
||||
private String temporaryPath;
|
||||
|
||||
public static final String JDBC_URL = "jdbc:derby:memory:myDB";
|
||||
public static final String INPUT_VALUE_QUERY =
|
||||
"SELECT " + CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_INPUT) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_VALUE) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_ENCRYPTED) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_IV) + ", "
|
||||
+ CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_HMAC)
|
||||
+ " FROM " + CommonRepoUtils.getTableName(SCHEMA_SQOOP, TABLE_SQ_LINK_INPUT_NAME)
|
||||
+ " WHERE " + CommonRepoUtils.escapeColumnName(COLUMN_SQ_LNKI_INPUT) + " = ?";
|
||||
|
||||
private String passwordGenerator;
|
||||
private String hmacAlgorithm;
|
||||
private String cipherAlgorithm;
|
||||
private int cipherKeySize;
|
||||
private String cipherSpec;
|
||||
private String pbkdf2Algorithm;
|
||||
private int pbkdf2Rounds;
|
||||
private int ivLength;
|
||||
|
||||
public static class SqoopMiniCluster extends JettySqoopMiniCluster {
|
||||
|
||||
private boolean repositoryEncryptionEnabled;
|
||||
|
||||
private String passwordGenerator;
|
||||
private String hmacAlgorithm;
|
||||
private String cipherAlgorithm;
|
||||
private int cipherKeySize;
|
||||
private String cipherSpec;
|
||||
private String pbkdf2Algorithm;
|
||||
private int pbkdf2Rounds;
|
||||
private int ivLength;
|
||||
|
||||
public SqoopMiniCluster(String temporaryPath, Configuration configuration) throws Exception {
|
||||
super(temporaryPath, configuration);
|
||||
this.repositoryEncryptionEnabled = false;
|
||||
}
|
||||
|
||||
public SqoopMiniCluster(String temporaryPath, Configuration configuration, String passwordGenerator, String hmacAlgorithm, String cipherAlgorithm, int cipherKeySize, String cipherSpec, String pbkdf2Algorithm, int pbkdf2Rounds, int ivLength) throws Exception {
|
||||
super(temporaryPath, configuration);
|
||||
this.repositoryEncryptionEnabled = true;
|
||||
this.passwordGenerator = passwordGenerator;
|
||||
this.hmacAlgorithm = hmacAlgorithm;
|
||||
this.cipherAlgorithm = cipherAlgorithm;
|
||||
this.cipherKeySize = cipherKeySize;
|
||||
this.cipherSpec = cipherSpec;
|
||||
this.pbkdf2Algorithm = pbkdf2Algorithm;
|
||||
this.pbkdf2Rounds = pbkdf2Rounds;
|
||||
this.ivLength = ivLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, String> getSecurityConfiguration() {
|
||||
Map<String, String> properties = super.getSecurityConfiguration();
|
||||
|
||||
// Remove all default repository encryption values
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_ENABLED);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM);
|
||||
properties.remove(SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS);
|
||||
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_ENABLED, String.valueOf(repositoryEncryptionEnabled));
|
||||
if (repositoryEncryptionEnabled) {
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR, passwordGenerator);
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM, hmacAlgorithm);
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM, cipherAlgorithm);
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE, String.valueOf(cipherKeySize));
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE, String.valueOf(ivLength));
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC, cipherSpec);
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM, pbkdf2Algorithm);
|
||||
properties.put(SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS, String.valueOf(pbkdf2Rounds));
|
||||
}
|
||||
|
||||
return properties;
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeMethod
|
||||
public void before() throws Exception {
|
||||
dropRepository();
|
||||
|
||||
passwordGenerator = "echo test";
|
||||
hmacAlgorithm = "HmacSHA256";
|
||||
cipherAlgorithm = "AES";
|
||||
cipherKeySize = 16;
|
||||
cipherSpec = "AES/CBC/PKCS5Padding";
|
||||
pbkdf2Algorithm = "PBKDF2WithHmacSHA1";
|
||||
pbkdf2Rounds = 4000;
|
||||
ivLength = 16;
|
||||
|
||||
temporaryPath = HdfsUtils.joinPathFragments(super.getTemporaryPath(), getTestName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNotEncryptedToEncrypted() throws Exception {
|
||||
// Start nonencrypted sqoop instance
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf());
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
verifyMasterKeyDoesNotExist();
|
||||
|
||||
// Create a link and a job with a secure input
|
||||
SqoopClient client = new SqoopClient(sqoopMiniCluster.getServerUrl());
|
||||
MLink link = client.createLink("generic-jdbc-connector");
|
||||
link.setName("zelda");
|
||||
fillRdbmsLinkConfig(link);
|
||||
client.saveLink(link);
|
||||
|
||||
MStringInput sensitiveInput = link.getConnectorLinkConfig().getStringInput("linkConfig.password");
|
||||
verifyPlaintextInput(sensitiveInput.getPersistenceId(), sensitiveInput.getValue());
|
||||
|
||||
// Stop sqoop instance
|
||||
sqoopMiniCluster.stop();
|
||||
|
||||
// Run tool
|
||||
RepositoryEncryptionTool repositoryEncryptionTool = new RepositoryEncryptionTool();
|
||||
repositoryEncryptionTool.runToolWithConfiguration(new String[] {
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR + "=" + passwordGenerator,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM + "=" + hmacAlgorithm,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM + "=" + cipherAlgorithm,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE + "=" + cipherKeySize,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC + "=" + cipherSpec,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM + "=" + pbkdf2Algorithm,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS + "=" + pbkdf2Rounds,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE + "=" + ivLength,
|
||||
});
|
||||
|
||||
cleanUpAfterTool();
|
||||
|
||||
// Verify that the data is encrypted
|
||||
StringBuffer cipherText = new StringBuffer();
|
||||
StringBuffer iv = new StringBuffer();
|
||||
StringBuffer hmac = new StringBuffer();
|
||||
readEncryptedInput(sensitiveInput.getPersistenceId(), cipherText, iv, hmac);
|
||||
|
||||
// Read the encrypted data by using the MasterKeyManager the server initializes
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf(), passwordGenerator,
|
||||
hmacAlgorithm, cipherAlgorithm, cipherKeySize, cipherSpec, pbkdf2Algorithm, pbkdf2Rounds, ivLength);
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
String decrypted = MasterKeyManager.getInstance().decryptWithMasterKey(cipherText.toString(), iv.toString(), hmac.toString());
|
||||
|
||||
Assert.assertEquals(sensitiveInput.getValue(), decrypted);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEncryptedToNotEncrypted() throws Exception {
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf(), passwordGenerator,
|
||||
hmacAlgorithm, cipherAlgorithm, cipherKeySize, cipherSpec, pbkdf2Algorithm, pbkdf2Rounds, ivLength);
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
SqoopClient client = new SqoopClient(sqoopMiniCluster.getServerUrl());
|
||||
MLink link = client.createLink("generic-jdbc-connector");
|
||||
link.setName("zelda");
|
||||
fillRdbmsLinkConfig(link);
|
||||
client.saveLink(link);
|
||||
MStringInput sensitiveInput = link.getConnectorLinkConfig().getStringInput("linkConfig.password");
|
||||
|
||||
StringBuffer cipherText = new StringBuffer();
|
||||
StringBuffer iv = new StringBuffer();
|
||||
StringBuffer hmac = new StringBuffer();
|
||||
readEncryptedInput(sensitiveInput.getPersistenceId(), cipherText, iv, hmac);
|
||||
|
||||
String decrypted = MasterKeyManager.getInstance().decryptWithMasterKey(cipherText.toString(), iv.toString(), hmac.toString());
|
||||
|
||||
// Stop sqoop instance
|
||||
sqoopMiniCluster.stop();
|
||||
|
||||
// Run tool
|
||||
RepositoryEncryptionTool repositoryEncryptionTool = new RepositoryEncryptionTool();
|
||||
repositoryEncryptionTool.runToolWithConfiguration(new String[] {
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR + "=" + passwordGenerator,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM + "=" + hmacAlgorithm,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM + "=" + cipherAlgorithm,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE + "=" + cipherKeySize,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC + "=" + cipherSpec,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM + "=" + pbkdf2Algorithm,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS + "=" + pbkdf2Rounds,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE + "=" + ivLength,
|
||||
});
|
||||
|
||||
cleanUpAfterTool();
|
||||
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf());
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
verifyPlaintextInput(sensitiveInput.getPersistenceId(), decrypted);
|
||||
|
||||
verifyMasterKeyDoesNotExist();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEncryptedToEncrypted() throws Exception {
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf(), passwordGenerator,
|
||||
hmacAlgorithm, cipherAlgorithm, cipherKeySize, cipherSpec, pbkdf2Algorithm, pbkdf2Rounds, ivLength);
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
SqoopClient client = new SqoopClient(sqoopMiniCluster.getServerUrl());
|
||||
MLink link = client.createLink("generic-jdbc-connector");
|
||||
link.setName("zelda");
|
||||
fillRdbmsLinkConfig(link);
|
||||
client.saveLink(link);
|
||||
MStringInput sensitiveInput = link.getConnectorLinkConfig().getStringInput("linkConfig.password");
|
||||
|
||||
StringBuffer cipherTextFrom = new StringBuffer();
|
||||
StringBuffer ivFrom = new StringBuffer();
|
||||
StringBuffer hmacFrom = new StringBuffer();
|
||||
readEncryptedInput(sensitiveInput.getPersistenceId(), cipherTextFrom, ivFrom, hmacFrom);
|
||||
|
||||
String decryptedFirst = MasterKeyManager.getInstance().decryptWithMasterKey(cipherTextFrom.toString(), ivFrom.toString(), hmacFrom.toString());
|
||||
|
||||
// Stop sqoop instance
|
||||
sqoopMiniCluster.stop();
|
||||
|
||||
// Run tool
|
||||
RepositoryEncryptionTool repositoryEncryptionTool = new RepositoryEncryptionTool();
|
||||
repositoryEncryptionTool.runToolWithConfiguration(new String[] {
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR + "=" + passwordGenerator,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM + "=" + hmacAlgorithm,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM + "=" + cipherAlgorithm,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE + "=" + cipherKeySize,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC + "=" + cipherSpec,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM + "=" + pbkdf2Algorithm,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS + "=" + pbkdf2Rounds,
|
||||
"-F" + SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE + "=" + ivLength,
|
||||
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR + "=" + passwordGenerator,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM + "=" + hmacAlgorithm,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM + "=" + cipherAlgorithm,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE + "=" + cipherKeySize,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC + "=" + cipherSpec,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM + "=" + pbkdf2Algorithm,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS + "=" + pbkdf2Rounds,
|
||||
"-T" + SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE + "=" + ivLength,
|
||||
});
|
||||
|
||||
cleanUpAfterTool();
|
||||
|
||||
StringBuffer cipherTextTo = new StringBuffer();
|
||||
StringBuffer ivTo = new StringBuffer();
|
||||
StringBuffer hmacTo = new StringBuffer();
|
||||
|
||||
Assert.assertNotEquals(cipherTextFrom, cipherTextTo);
|
||||
|
||||
readEncryptedInput(sensitiveInput.getPersistenceId(), cipherTextTo, ivTo, hmacTo);
|
||||
|
||||
// Read the encrypted data by using the MasterKeyManager the server initializes
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf(), passwordGenerator,
|
||||
hmacAlgorithm, cipherAlgorithm, cipherKeySize, cipherSpec, pbkdf2Algorithm, pbkdf2Rounds, ivLength);
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
String decryptedSecond = MasterKeyManager.getInstance().decryptWithMasterKey(cipherTextTo.toString(), ivTo.toString(), hmacTo.toString());
|
||||
|
||||
Assert.assertEquals(decryptedFirst, decryptedSecond);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEncryptedToEncryptedUsingConfiguration() throws Exception {
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf(), passwordGenerator,
|
||||
hmacAlgorithm, cipherAlgorithm, cipherKeySize, cipherSpec, pbkdf2Algorithm, pbkdf2Rounds, ivLength);
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
SqoopClient client = new SqoopClient(sqoopMiniCluster.getServerUrl());
|
||||
MLink link = client.createLink("generic-jdbc-connector");
|
||||
link.setName("zelda");
|
||||
fillRdbmsLinkConfig(link);
|
||||
client.saveLink(link);
|
||||
MStringInput sensitiveInput = link.getConnectorLinkConfig().getStringInput("linkConfig.password");
|
||||
|
||||
StringBuffer cipherTextFrom = new StringBuffer();
|
||||
StringBuffer ivFrom = new StringBuffer();
|
||||
StringBuffer hmacFrom = new StringBuffer();
|
||||
readEncryptedInput(sensitiveInput.getPersistenceId(), cipherTextFrom, ivFrom, hmacFrom);
|
||||
|
||||
String decryptedFirst = MasterKeyManager.getInstance().decryptWithMasterKey(cipherTextFrom.toString(), ivFrom.toString(), hmacFrom.toString());
|
||||
|
||||
// Read the configuration context that we will need for the tool
|
||||
MapContext configurationMapContext = SqoopConfiguration.getInstance().getContext();
|
||||
|
||||
// Stop sqoop instance
|
||||
sqoopMiniCluster.stop();
|
||||
|
||||
// Set the configuration
|
||||
SqoopConfiguration oldSqoopConfiguration = SqoopConfiguration.getInstance();
|
||||
SqoopConfiguration configurationMock = mock(SqoopConfiguration.class);
|
||||
when(configurationMock.getContext()).thenReturn(configurationMapContext);
|
||||
when(configurationMock.getProvider()).thenReturn(new PropertiesConfigurationProvider());
|
||||
SqoopConfiguration.setInstance(configurationMock);
|
||||
|
||||
// Run tool
|
||||
RepositoryEncryptionTool repositoryEncryptionTool = new RepositoryEncryptionTool();
|
||||
repositoryEncryptionTool.runToolWithConfiguration(new String[] {
|
||||
"-FuseConf",
|
||||
"-TuseConf",
|
||||
});
|
||||
|
||||
cleanUpAfterTool();
|
||||
|
||||
StringBuffer cipherTextTo = new StringBuffer();
|
||||
StringBuffer ivTo = new StringBuffer();
|
||||
StringBuffer hmacTo = new StringBuffer();
|
||||
|
||||
Assert.assertNotEquals(cipherTextFrom, cipherTextTo);
|
||||
|
||||
readEncryptedInput(sensitiveInput.getPersistenceId(), cipherTextTo, ivTo, hmacTo);
|
||||
|
||||
// Read the encrypted data by using the MasterKeyManager the server initializes
|
||||
sqoopMiniCluster = new SqoopMiniCluster(temporaryPath, getHadoopConf(), passwordGenerator,
|
||||
hmacAlgorithm, cipherAlgorithm, cipherKeySize, cipherSpec, pbkdf2Algorithm, pbkdf2Rounds, ivLength);
|
||||
sqoopMiniCluster.start();
|
||||
|
||||
String decryptedSecond = MasterKeyManager.getInstance().decryptWithMasterKey(cipherTextTo.toString(), ivTo.toString(), hmacTo.toString());
|
||||
|
||||
Assert.assertEquals(decryptedFirst, decryptedSecond);
|
||||
|
||||
SqoopConfiguration.setInstance(oldSqoopConfiguration);
|
||||
}
|
||||
|
||||
private void cleanUpAfterTool() {
|
||||
RepositoryManager.getInstance().destroy();
|
||||
MasterKeyManager.getInstance().destroy();
|
||||
SqoopConfiguration.getInstance().destroy();
|
||||
}
|
||||
|
||||
private void verifyMasterKeyDoesNotExist() throws Exception {
|
||||
try (PreparedStatement inputSelection = DriverManager.getConnection(JDBC_URL).prepareStatement((new CommonRepositoryInsertUpdateDeleteSelectQuery()).getStmtSelectSqMasterKey())) {
|
||||
try (ResultSet resultSet = inputSelection.executeQuery()) {
|
||||
Assert.assertFalse(resultSet.next());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyPlaintextInput(long persistenceId, String expectedValue) throws Exception {
|
||||
try (PreparedStatement inputSelection = DriverManager.getConnection(JDBC_URL).prepareStatement(INPUT_VALUE_QUERY)) {
|
||||
inputSelection.setLong(1, persistenceId);
|
||||
try (ResultSet resultSet = inputSelection.executeQuery()) {
|
||||
while (resultSet.next()) {
|
||||
Assert.assertEquals(expectedValue, resultSet.getString(2));
|
||||
Assert.assertFalse(resultSet.getBoolean(3));
|
||||
Assert.assertNull(resultSet.getString(4));
|
||||
Assert.assertNull(resultSet.getString(5));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void readEncryptedInput(long inputId, StringBuffer cipherText, StringBuffer iv, StringBuffer hmac) throws Exception {
|
||||
try (PreparedStatement inputSelection = DriverManager.getConnection(JDBC_URL).prepareStatement(INPUT_VALUE_QUERY)) {
|
||||
inputSelection.setLong(1, inputId);
|
||||
try (ResultSet resultSet = inputSelection.executeQuery()) {
|
||||
while (resultSet.next()) {
|
||||
Assert.assertTrue(resultSet.getBoolean(3));
|
||||
cipherText.append(resultSet.getString(2));
|
||||
iv.append(resultSet.getString(4));
|
||||
hmac.append(resultSet.getString(5));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
public void stopCluster() throws Exception {
|
||||
sqoopMiniCluster.stop();
|
||||
dropRepository();
|
||||
}
|
||||
|
||||
private void dropRepository() {
|
||||
try {
|
||||
DriverManager.getConnection(JDBC_URL + ";drop=true");
|
||||
} catch (Exception exception) {
|
||||
// Dropping the database always throws an exception
|
||||
}
|
||||
}
|
||||
}
|
@ -38,6 +38,7 @@ public class BuiltinTools {
|
||||
tools.put("verify", VerifyTool.class);
|
||||
tools.put("repositorydump", RepositoryDumpTool.class);
|
||||
tools.put("repositoryload", RepositoryLoadTool.class);
|
||||
tools.put("repositoryencryption", RepositoryEncryptionTool.class);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,145 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.sqoop.tools.tool;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.sqoop.cli.SqoopGnuParser;
|
||||
import org.apache.sqoop.common.MapContext;
|
||||
import org.apache.sqoop.common.SqoopException;
|
||||
import org.apache.sqoop.core.SqoopConfiguration;
|
||||
import org.apache.sqoop.repository.MasterKeyManager;
|
||||
import org.apache.sqoop.repository.Repository;
|
||||
import org.apache.sqoop.repository.RepositoryManager;
|
||||
import org.apache.sqoop.repository.RepositoryTransaction;
|
||||
import org.apache.sqoop.security.SecurityConstants;
|
||||
import org.apache.sqoop.security.SecurityError;
|
||||
import org.apache.sqoop.tools.ConfiguredTool;
|
||||
import org.apache.sqoop.utils.PasswordUtils;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
public class RepositoryEncryptionTool extends ConfiguredTool {
|
||||
public static final Logger LOG = Logger.getLogger(RepositoryDumpTool.class);
|
||||
|
||||
private static String FROM_OPTION = "F";
|
||||
private static String TO_OPTION = "T";
|
||||
|
||||
private static String USE_CONFIGURATION = "useConf";
|
||||
|
||||
@Override
|
||||
public boolean runToolWithConfiguration(String[] arguments) {
|
||||
Options options = new Options();
|
||||
options.addOption(OptionBuilder.hasArgs().withValueSeparator().create(FROM_OPTION));
|
||||
options.addOption(OptionBuilder.hasArgs().withValueSeparator().create(TO_OPTION));
|
||||
|
||||
CommandLineParser parser = new SqoopGnuParser();
|
||||
SqoopConfiguration.getInstance().initialize();
|
||||
RepositoryManager.getInstance().initialize();
|
||||
Repository repository = RepositoryManager.getInstance().getRepository();
|
||||
|
||||
CommandLine line;
|
||||
try {
|
||||
line = parser.parse(options, arguments);
|
||||
} catch (ParseException e) {
|
||||
LOG.error("Error parsing command line arguments:", e);
|
||||
System.out.println("Error parsing command line arguments. Please check Server logs for details.");
|
||||
return false;
|
||||
}
|
||||
|
||||
Properties fromProperties = line.getOptionProperties(FROM_OPTION);
|
||||
Properties toProperties = line.getOptionProperties(TO_OPTION);
|
||||
|
||||
RepositoryTransaction repositoryTransaction = null;
|
||||
boolean successful = true;
|
||||
try {
|
||||
repositoryTransaction = repository.getTransaction();
|
||||
repositoryTransaction.begin();
|
||||
|
||||
MasterKeyManager fromMasterKeyManager = null;
|
||||
MasterKeyManager toMasterKeyManager = null;
|
||||
|
||||
if (!fromProperties.isEmpty()) {
|
||||
fromMasterKeyManager = initializeMasterKeyManagerFromProperties(fromProperties, false, repositoryTransaction);
|
||||
} else {
|
||||
// Check to make sure there is no master key to prevent corruption
|
||||
if (repository.getMasterKey(repositoryTransaction) != null) {
|
||||
System.out.println("Repository is encrypted, need configuration to decrypt");
|
||||
throw new SqoopException(SecurityError.ENCRYPTION_0013);
|
||||
}
|
||||
}
|
||||
|
||||
if (!toProperties.isEmpty()) {
|
||||
toMasterKeyManager = initializeMasterKeyManagerFromProperties(toProperties, true, repositoryTransaction);
|
||||
}
|
||||
|
||||
repository.changeMasterKeyManager(fromMasterKeyManager, toMasterKeyManager, repositoryTransaction);
|
||||
if (fromMasterKeyManager != null) {
|
||||
fromMasterKeyManager.deleteMasterKeyFromRepository();
|
||||
fromMasterKeyManager.destroy();
|
||||
}
|
||||
|
||||
repositoryTransaction.commit();
|
||||
System.out.println("Changes committed");
|
||||
} catch (Exception ex) {
|
||||
if (repositoryTransaction != null) {
|
||||
repositoryTransaction.rollback();
|
||||
}
|
||||
System.out.println("Error running tool. Please check Server logs for details.");
|
||||
LOG.error(new SqoopException(SecurityError.ENCRYPTION_0012, ex));
|
||||
successful = false;
|
||||
} finally {
|
||||
if (repositoryTransaction != null) {
|
||||
repositoryTransaction.close();
|
||||
}
|
||||
}
|
||||
return successful;
|
||||
}
|
||||
|
||||
private MasterKeyManager initializeMasterKeyManagerFromProperties(Properties properties, boolean readFromRepository, RepositoryTransaction transaction) {
|
||||
MasterKeyManager masterKeyManager = new MasterKeyManager();
|
||||
if (properties.getProperty(USE_CONFIGURATION) != null) {
|
||||
masterKeyManager.initialize(true, readFromRepository, transaction);
|
||||
} else {
|
||||
String hmacAlgorithm = properties.getProperty(SecurityConstants.REPO_ENCRYPTION_HMAC_ALGORITHM);
|
||||
String cipherAlgorithm = properties.getProperty(SecurityConstants.REPO_ENCRYPTION_CIPHER_ALGORITHM);
|
||||
String cipherSpec = properties.getProperty(SecurityConstants.REPO_ENCRYPTION_CIPHER_SPEC);
|
||||
int cipherKeySize = Integer.parseInt(properties.getProperty(SecurityConstants.REPO_ENCRYPTION_CIPHER_KEY_SIZE));
|
||||
int ivLength = Integer.parseInt(properties.getProperty(SecurityConstants.REPO_ENCRYPTION_INITIALIZATION_VECTOR_SIZE));
|
||||
String pbkdf2Algorithm = properties.getProperty(SecurityConstants.REPO_ENCRYPTION_PBKDF2_ALGORITHM);
|
||||
int pbkdf2Rounds = Integer.parseInt(properties.getProperty(SecurityConstants.REPO_ENCRYPTION_PBKDF2_ROUNDS));
|
||||
|
||||
// We need to create a MapContext to make reading the password simpler here
|
||||
Map<String, String> passwordProperties = new HashMap<>();
|
||||
passwordProperties.put(SecurityConstants.REPO_ENCRYPTION_PASSWORD, properties.getProperty(SecurityConstants.REPO_ENCRYPTION_PASSWORD));
|
||||
passwordProperties.put(SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR, properties.getProperty(SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR));
|
||||
String password = PasswordUtils.readPassword(new MapContext(passwordProperties), SecurityConstants.REPO_ENCRYPTION_PASSWORD, SecurityConstants.REPO_ENCRYPTION_PASSWORD_GENERATOR);
|
||||
|
||||
|
||||
masterKeyManager.initialize(true, hmacAlgorithm, cipherAlgorithm, cipherSpec, cipherKeySize, ivLength, pbkdf2Algorithm, pbkdf2Rounds, password, readFromRepository, transaction);
|
||||
}
|
||||
|
||||
return masterKeyManager;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user