5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-03 22:34:30 +08:00

SQOOP-2913: Make sqoop fail if user uses --direct connector for case when --direct connector is not available

(Anna Szonyi via Attila Szabo)
This commit is contained in:
Attila Szabo 2016-09-23 16:30:36 +02:00
parent 7c1754270f
commit b007e4d59d
9 changed files with 676 additions and 121 deletions

View File

@ -17,6 +17,9 @@
*/
package com.cloudera.sqoop.tool;
import com.cloudera.sqoop.SqoopOptions;
import org.apache.sqoop.manager.SupportedManagers;
/**
* @deprecated Moving to use org.apache.sqoop namespace.
*/
@ -205,4 +208,11 @@ public BaseSqoopTool(String toolName) {
super(toolName);
}
protected void validateHasDirectConnectorOption(SqoopOptions options) throws SqoopOptions.InvalidOptionsException {
SupportedManagers m = SupportedManagers.createFrom(options);
if (m != null && options.isDirect() && !m.hasDirectConnector()) {
throw new SqoopOptions.InvalidOptionsException(
"Was called with the --direct option, but no direct connector available.");
}
}
}

View File

@ -25,6 +25,17 @@
import com.cloudera.sqoop.metastore.JobData;
import com.cloudera.sqoop.manager.ConnManager;
import static org.apache.sqoop.manager.SupportedManagers.CUBRID;
import static org.apache.sqoop.manager.SupportedManagers.DB2;
import static org.apache.sqoop.manager.SupportedManagers.HSQLDB;
import static org.apache.sqoop.manager.SupportedManagers.JTDS_SQLSERVER;
import static org.apache.sqoop.manager.SupportedManagers.MYSQL;
import static org.apache.sqoop.manager.SupportedManagers.NETEZZA;
import static org.apache.sqoop.manager.SupportedManagers.ORACLE;
import static org.apache.sqoop.manager.SupportedManagers.POSTGRES;
import static org.apache.sqoop.manager.SupportedManagers.SQLSERVER;
/**
* Contains instantiation code for all ConnManager implementations
* shipped and enabled by default in Sqoop.
@ -34,6 +45,7 @@ public class DefaultManagerFactory
public static final Log LOG = LogFactory.getLog(
DefaultManagerFactory.class.getName());
public static final String NET_SOURCEFORGE_JTDS_JDBC_DRIVER = "net.sourceforge.jtds.jdbc.Driver";
public ConnManager accept(JobData data) {
SqoopOptions options = data.getSqoopOptions();
@ -48,37 +60,35 @@ public ConnManager accept(JobData data) {
LOG.debug("Trying with scheme: " + scheme);
if (scheme.equals("jdbc:mysql:")) {
if (MYSQL.isTheManagerTypeOf(options)) {
if (options.isDirect()) {
return new DirectMySQLManager(options);
} else {
return new MySQLManager(options);
}
} else if (scheme.equals("jdbc:postgresql:")) {
} else if (POSTGRES.isTheManagerTypeOf(options)) {
if (options.isDirect()) {
return new DirectPostgresqlManager(options);
} else {
return new PostgresqlManager(options);
}
} else if (scheme.startsWith("jdbc:hsqldb:")) {
} else if (HSQLDB.isTheManagerTypeOf(options)) {
return new HsqldbManager(options);
} else if (scheme.startsWith("jdbc:oracle:")) {
} else if (ORACLE.isTheManagerTypeOf(options)) {
return new OracleManager(options);
} else if (scheme.startsWith("jdbc:sqlserver:")) {
} else if (SQLSERVER.isTheManagerTypeOf(options)) {
return new SQLServerManager(options);
} else if (scheme.startsWith("jdbc:jtds:sqlserver:")) {
return new SQLServerManager(
"net.sourceforge.jtds.jdbc.Driver",
options);
} else if (scheme.startsWith("jdbc:db2:")) {
} else if (JTDS_SQLSERVER.isTheManagerTypeOf(options)) {
return new SQLServerManager(NET_SOURCEFORGE_JTDS_JDBC_DRIVER, options);
} else if (DB2.isTheManagerTypeOf(options)) {
return new Db2Manager(options);
} else if (scheme.startsWith("jdbc:netezza:")) {
} else if (NETEZZA.isTheManagerTypeOf(options)) {
if (options.isDirect()) {
return new DirectNetezzaManager(options);
} else {
return new NetezzaManager(options);
}
} else if (scheme.startsWith("jdbc:cubrid:")) {
} else if (CUBRID.isTheManagerTypeOf(options)) {
return new CubridManager(options);
} else {
return null;
@ -86,32 +96,7 @@ public ConnManager accept(JobData data) {
}
protected String extractScheme(SqoopOptions options) {
String connectStr = options.getConnectString();
// java.net.URL follows RFC-2396 literally, which does not allow a ':'
// character in the scheme component (section 3.1). JDBC connect strings,
// however, commonly have a multi-scheme addressing system. e.g.,
// jdbc:mysql://...; so we cannot parse the scheme component via URL
// objects. Instead, attempt to pull out the scheme as best as we can.
// First, see if this is of the form [scheme://hostname-and-etc..]
int schemeStopIdx = connectStr.indexOf("//");
if (-1 == schemeStopIdx) {
// If no hostname start marker ("//"), then look for the right-most ':'
// character.
schemeStopIdx = connectStr.lastIndexOf(':');
if (-1 == schemeStopIdx) {
// Warn that this is nonstandard. But we should be as permissive
// as possible here and let the ConnectionManagers themselves throw
// out the connect string if it doesn't make sense to them.
LOG.warn("Could not determine scheme component of connect string");
// Use the whole string.
schemeStopIdx = connectStr.length();
}
}
return connectStr.substring(0, schemeStopIdx);
return SupportedManagers.extractScheme(options);
}
}

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.manager;
import com.cloudera.sqoop.SqoopOptions;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public enum SupportedManagers {
MYSQL("jdbc:mysql:", true), POSTGRES("jdbc:postgresql:", true), HSQLDB("jdbc:hsqldb:", false), ORACLE("jdbc:oracle:", true), SQLSERVER("jdbc:sqlserver:", false),
JTDS_SQLSERVER("jdbc:jtds:sqlserver:", false), DB2("jdbc:db2:", false), NETEZZA("jdbc:netezza:", true), CUBRID("jdbc:cubrid:", false);
private final String schemePrefix;
private final boolean hasDirectConnector;
private static final Log LOG
= LogFactory.getLog(SupportedManagers.class);
SupportedManagers(String schemePrefix, boolean hasDirectConnector) {
this.schemePrefix = schemePrefix;
this.hasDirectConnector = hasDirectConnector;
}
public String getSchemePrefix() {
return schemePrefix;
}
public boolean hasDirectConnector() {
return hasDirectConnector;
}
public boolean isTheManagerTypeOf(SqoopOptions options) {
return (extractScheme(options)).startsWith(getSchemePrefix());
}
public static SupportedManagers createFrom(SqoopOptions options) {
String scheme = extractScheme(options);
for (SupportedManagers m : values()) {
if (scheme.startsWith(m.getSchemePrefix())) {
return m;
}
}
return null;
}
static String extractScheme(SqoopOptions options) {
String connectStr = options.getConnectString();
// java.net.URL follows RFC-2396 literally, which does not allow a ':'
// character in the scheme component (section 3.1). JDBC connect strings,
// however, commonly have a multi-scheme addressing system. e.g.,
// jdbc:mysql://...; so we cannot parse the scheme component via URL
// objects. Instead, attempt to pull out the scheme as best as we can.
// First, see if this is of the form [scheme://hostname-and-etc..]
int schemeStopIdx = connectStr.indexOf("//");
if (-1 == schemeStopIdx) {
// If no hostname start marker ("//"), then look for the right-most ':'
// character.
schemeStopIdx = connectStr.lastIndexOf(':');
if (-1 == schemeStopIdx) {
// Warn that this is nonstandard. But we should be as permissive
// as possible here and let the ConnectionManagers themselves throw
// out the connect string if it doesn't make sense to them.
LOG.warn("Could not determine scheme component of connect string");
// Use the whole string.
schemeStopIdx = connectStr.length();
}
}
return connectStr.substring(0, schemeStopIdx);
}
}

View File

@ -387,8 +387,14 @@ public void validateOptions(SqoopOptions options)
validateCommonOptions(options);
validateCodeGenOptions(options);
validateHCatalogOptions(options);
vaildateDirectExportOptions(options);
}
void vaildateDirectExportOptions(SqoopOptions options) throws InvalidOptionsException {
if (options.isDirect()) {
validateHasDirectConnectorOption(options);
}
}
private void applyNewUpdateOptions(CommandLine in, SqoopOptions out)
throws InvalidOptionsException {
if (in.hasOption(UPDATE_MODE_ARG)) {

View File

@ -53,6 +53,9 @@
import com.cloudera.sqoop.metastore.JobStorageFactory;
import com.cloudera.sqoop.util.AppendUtils;
import com.cloudera.sqoop.util.ImportException;
import org.apache.sqoop.manager.SupportedManagers;
import static org.apache.sqoop.manager.SupportedManagers.MYSQL;
/**
* Tool that performs database imports to HDFS.
@ -1018,90 +1021,103 @@ public void applyOptions(CommandLine in, SqoopOptions out)
* @param options the configured SqoopOptions to check
*/
protected void validateImportOptions(SqoopOptions options)
throws InvalidOptionsException {
if (!allTables && options.getTableName() == null
&& options.getSqlQuery() == null) {
throw new InvalidOptionsException(
"--table or --" + SQL_QUERY_ARG + " is required for import. "
+ "(Or use sqoop import-all-tables.)"
+ HELP_STR);
} else if (options.getExistingJarName() != null
&& options.getClassName() == null) {
throw new InvalidOptionsException("Jar specified with --jar-file, but no "
+ "class specified with --class-name." + HELP_STR);
} else if (options.getTargetDir() != null
&& options.getWarehouseDir() != null) {
throw new InvalidOptionsException(
"--target-dir with --warehouse-dir are incompatible options."
+ HELP_STR);
} else if (options.getTableName() != null
&& options.getSqlQuery() != null) {
throw new InvalidOptionsException(
"Cannot specify --" + SQL_QUERY_ARG + " and --table together."
+ HELP_STR);
} else if (options.getSqlQuery() != null
&& options.getTargetDir() == null
&& options.getHBaseTable() == null
&& options.getHCatTableName() == null
&& options.getAccumuloTable() == null) {
throw new InvalidOptionsException(
"Must specify destination with --target-dir. "
+ HELP_STR);
} else if (options.getSqlQuery() != null && options.doHiveImport()
&& options.getHiveTableName() == null) {
throw new InvalidOptionsException(
"When importing a query to Hive, you must specify --"
+ HIVE_TABLE_ARG + "." + HELP_STR);
} else if (options.getSqlQuery() != null && options.getNumMappers() > 1
&& options.getSplitByCol() == null) {
throw new InvalidOptionsException(
"When importing query results in parallel, you must specify --"
+ SPLIT_BY_ARG + "." + HELP_STR);
} else if (options.isDirect()
&& options.getFileLayout() != SqoopOptions.FileLayout.TextFile
&& options.getConnectString().contains("jdbc:mysql://")) {
throw new InvalidOptionsException(
"MySQL direct import currently supports only text output format. "
+ "Parameters --as-sequencefile --as-avrodatafile and --as-parquetfile are not "
+ "supported with --direct params in MySQL case.");
} else if (options.isDirect()
&& options.doHiveDropDelims()) {
throw new InvalidOptionsException(
"Direct import currently do not support dropping hive delimiters,"
+ " please remove parameter --hive-drop-import-delims.");
} else if (allTables && options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "all tables but single table only.");
} else if (options.getSqlQuery() != null && options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "free from query but single table only.");
} else if (options.getWhereClause() != null
&& options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "where clause but single table only.");
} else if (options.getIncrementalMode()
!= SqoopOptions.IncrementalMode.None && options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "incremental imports but single table only.");
} else if ((options.getTargetDir() != null
|| options.getWarehouseDir() != null)
&& options.getHCatTableName() != null) {
throw new InvalidOptionsException("--hcatalog-table cannot be used "
+ " --warehouse-dir or --target-dir options");
} else if (options.isDeleteMode() && options.isAppendMode()) {
throw new InvalidOptionsException("--append and --delete-target-dir can"
+ " not be used together.");
} else if (options.isDeleteMode() && options.getIncrementalMode()
!= SqoopOptions.IncrementalMode.None) {
throw new InvalidOptionsException("--delete-target-dir can not be used"
+ " with incremental imports.");
} else if (options.getAutoResetToOneMapper()
&& (options.getSplitByCol() != null)) {
throw new InvalidOptionsException("--autoreset-to-one-mapper and"
+ " --split-by cannot be used together.");
}
}
throws InvalidOptionsException {
if (!allTables && options.getTableName() == null
&& options.getSqlQuery() == null) {
throw new InvalidOptionsException(
"--table or --" + SQL_QUERY_ARG + " is required for import. "
+ "(Or use sqoop import-all-tables.)"
+ HELP_STR);
} else if (options.getExistingJarName() != null
&& options.getClassName() == null) {
throw new InvalidOptionsException("Jar specified with --jar-file, but no "
+ "class specified with --class-name." + HELP_STR);
} else if (options.getTargetDir() != null
&& options.getWarehouseDir() != null) {
throw new InvalidOptionsException(
"--target-dir with --warehouse-dir are incompatible options."
+ HELP_STR);
} else if (options.getTableName() != null
&& options.getSqlQuery() != null) {
throw new InvalidOptionsException(
"Cannot specify --" + SQL_QUERY_ARG + " and --table together."
+ HELP_STR);
} else if (options.getSqlQuery() != null
&& options.getTargetDir() == null
&& options.getHBaseTable() == null
&& options.getHCatTableName() == null
&& options.getAccumuloTable() == null) {
throw new InvalidOptionsException(
"Must specify destination with --target-dir. "
+ HELP_STR);
} else if (options.getSqlQuery() != null && options.doHiveImport()
&& options.getHiveTableName() == null) {
throw new InvalidOptionsException(
"When importing a query to Hive, you must specify --"
+ HIVE_TABLE_ARG + "." + HELP_STR);
} else if (options.getSqlQuery() != null && options.getNumMappers() > 1
&& options.getSplitByCol() == null) {
throw new InvalidOptionsException(
"When importing query results in parallel, you must specify --"
+ SPLIT_BY_ARG + "." + HELP_STR);
} else if (options.isDirect()) {
validateDirectImportOptions(options);
} else if (allTables && options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "all tables but single table only.");
} else if (options.getSqlQuery() != null && options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "free from query but single table only.");
} else if (options.getWhereClause() != null
&& options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "where clause but single table only.");
} else if (options.getIncrementalMode()
!= SqoopOptions.IncrementalMode.None && options.isValidationEnabled()) {
throw new InvalidOptionsException("Validation is not supported for "
+ "incremental imports but single table only.");
} else if ((options.getTargetDir() != null
|| options.getWarehouseDir() != null)
&& options.getHCatTableName() != null) {
throw new InvalidOptionsException("--hcatalog-table cannot be used "
+ " --warehouse-dir or --target-dir options");
} else if (options.isDeleteMode() && options.isAppendMode()) {
throw new InvalidOptionsException("--append and --delete-target-dir can"
+ " not be used together.");
} else if (options.isDeleteMode() && options.getIncrementalMode()
!= SqoopOptions.IncrementalMode.None) {
throw new InvalidOptionsException("--delete-target-dir can not be used"
+ " with incremental imports.");
} else if (options.getAutoResetToOneMapper()
&& (options.getSplitByCol() != null)) {
throw new InvalidOptionsException("--autoreset-to-one-mapper and"
+ " --split-by cannot be used together.");
}
}
void validateDirectImportOptions(SqoopOptions options) throws InvalidOptionsException {
validateDirectMysqlOptions(options);
validateDirectDropHiveDelimOption(options);
validateHasDirectConnectorOption(options);
}
void validateDirectDropHiveDelimOption(SqoopOptions options) throws InvalidOptionsException {
if (options.doHiveDropDelims()) {
throw new InvalidOptionsException(
"Direct import currently do not support dropping hive delimiters,"
+ " please remove parameter --hive-drop-import-delims.");
}
}
void validateDirectMysqlOptions(SqoopOptions options) throws InvalidOptionsException {
if (options.getFileLayout() != SqoopOptions.FileLayout.TextFile
&& MYSQL.isTheManagerTypeOf(options)) {
throw new InvalidOptionsException(
"MySQL direct import currently supports only text output format. "
+ "Parameters --as-sequencefile --as-avrodatafile and --as-parquetfile are not "
+ "supported with --direct params in MySQL case.");
}
}
/**
* Validate the incremental import options.
*/

View File

@ -0,0 +1,88 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.sqoop;
import com.cloudera.sqoop.testutil.CommonArgs;
import com.cloudera.sqoop.testutil.HsqldbTestServer;
import com.cloudera.sqoop.testutil.ImportJobTestCase;
import junit.framework.JUnit4TestAdapter;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.ArrayList;
@RunWith(value = org.junit.runners.JUnit4.class)
public class TestDirectImport extends ImportJobTestCase {
@Rule
public ExpectedException exception = ExpectedException.none();
protected String[] getArgv(boolean includeHadoopFlags, String[] colNames, boolean isDirect) {
String columnsString = "";
for (String col : colNames) {
columnsString += col + ",";
}
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--table");
args.add(HsqldbTestServer.getTableName());
args.add("--columns");
args.add(columnsString);
if (isDirect) args.add("--direct");
args.add("--split-by");
args.add("INTFIELD1");
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--delete-target-dir");
return args.toArray(new String[0]);
}
@Test
public void testDirectFlagWithHSQL() throws IOException {
String[] columns = HsqldbTestServer.getFieldNames();
String[] argv = getArgv(true, columns, true);
exception.expect(IOException.class);
runImport(argv);
}
@Test
public void testNonDirectFlagWithHSQL() throws IOException {
String[] columns = HsqldbTestServer.getFieldNames();
String[] argv = getArgv(true, columns, false);
runImport(argv);
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestDirectImport.class);
}
}

View File

@ -0,0 +1,167 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.manager;
import com.cloudera.sqoop.SqoopOptions;
import com.cloudera.sqoop.metastore.JobData;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
public class TestDefaultManagerFactory {
DefaultManagerFactory mf = new DefaultManagerFactory();
@Test
public void givenMySQLSchemaDirectFactoryReturnsMySQLManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.MYSQL, MySQLManager.class);
}
@Test
public void givenMySQLSchemaNonDirectFactoryReturnsMySQLManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.MYSQL, MySQLManager.class);
}
@Test
public void givenPostgresSchemaDirectFactoryReturnsDirectPostgresqlManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.POSTGRES, DirectPostgresqlManager.class);
}
@Test
public void givenPostgresSchemaNonDirectFactoryReturnsPostgresqlManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.POSTGRES, PostgresqlManager.class);
}
@Test
public void givenHsqlSchemaDirectFactoryReturnsHsqldbManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.HSQLDB, HsqldbManager.class);
}
@Test
public void givenHsqlSchemaNonDirectFactoryReturnsHsqldbManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.HSQLDB, HsqldbManager.class);
}
@Test
public void givenSqlServerSchemaDirectFactoryReturnsSQLServerManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.SQLSERVER, SQLServerManager.class);
}
@Test
public void givenSqlServerSchemaNonDirectFactoryReturnsSQLServerManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.SQLSERVER, SQLServerManager.class);
}
@Test
public void givenJTDSqlServerSchemaDirectFactoryReturnsSQLServerManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.JTDS_SQLSERVER, SQLServerManager.class);
}
@Test
public void givenJTDSqlServerSchemaNonDirectFactoryReturnsSQLServerManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.JTDS_SQLSERVER, SQLServerManager.class);
}
@Test
public void givenDb2SchemaDirectFactoryReturnsDb2Manager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.DB2, Db2Manager.class);
}
@Test
public void givenDb2SchemaNonDirectFactoryReturnsDb2Manager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.DB2, Db2Manager.class);
}
@Test
public void givenOracleSchemaDirectFactoryReturnsOracleManager() {
//OraOop connector is created differently, but from the factory's perspective it creates an OracleManager currently
assertCreationOfCorrectManagerClassDirect(SupportedManagers.ORACLE, OracleManager.class);
}
@Test
public void givenOracleSchemaNonDirectFactoryReturnsOracleManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.ORACLE, OracleManager.class);
}
@Test
public void givenNetezzaSchemaNonDirectFactoryReturnsNetezzaManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.NETEZZA, NetezzaManager.class);
}
@Test
public void givenNetezzaSchemaDirectFactoryReturnsDirectNetezzaManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.NETEZZA, DirectNetezzaManager.class);
}
@Test
public void givenCubridSchemaNonDirectFactoryReturnsCubridManager() {
assertCreationOfCorrectManagerClassNotDirect(SupportedManagers.CUBRID, CubridManager.class);
}
@Test
public void givenCubridSchemaDirectFactoryReturnsCubridManager() {
assertCreationOfCorrectManagerClassDirect(SupportedManagers.CUBRID, CubridManager.class);
}
private void assertCreationOfCorrectManagerClassNotDirect(SupportedManagers supportedManagers, Class type) {
assertCreationOfCorrectManagerClass(supportedManagers, type, false);
}
private void assertCreationOfCorrectManagerClassDirect(SupportedManagers supportedManagers, Class type) {
assertCreationOfCorrectManagerClass(supportedManagers, type, true);
}
private void assertCreationOfCorrectManagerClass(SupportedManagers supportedManagers, Class type, boolean isDirect) {
JobData data = mock(JobData.class);
SqoopOptions mockoptions = mockOptions(supportedManagers, isDirect);
when(data.getSqoopOptions()).thenReturn(mockoptions);
ConnManager connmanager = mf.accept(data);
assertThat(connmanager, instanceOf(type));
verifyCalls(supportedManagers, data, mockoptions);
}
private void verifyCalls(SupportedManagers supportedManagers, JobData data, SqoopOptions mockoptions) {
verify(data).getSqoopOptions();
verifyNoMoreInteractions(data);
//Workaround as Oracle Direct Connector creation is not handled by the DefaultManagerFactory
if (supportedManagers.hasDirectConnector() && !supportedManagers.equals(SupportedManagers.ORACLE)) {
verify(mockoptions).isDirect();
}
else
verify(mockoptions, never()).isDirect();
}
private SqoopOptions mockOptions(SupportedManagers supportedManagers, boolean isDirect) {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getConnectString()).thenReturn(supportedManagers.getSchemePrefix() + "//" + RandomStringUtils.random(10));
when(options.isDirect()).thenReturn(isDirect);
when(options.getConf()).thenReturn(mock(Configuration.class));
return options;
}
}

View File

@ -0,0 +1,66 @@
package org.apache.sqoop.tool;
import com.cloudera.sqoop.SqoopOptions;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.manager.SupportedManagers;
import org.junit.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class TestExportToolValidateOptions {
ExportTool exportTool = new ExportTool();
@Test
public void givenDirectImportHasDirectConnectorValidationPasses() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = stubDirectOptions(SupportedManagers.NETEZZA);
exportTool.vaildateDirectExportOptions(options);
}
@Test(expected = org.apache.sqoop.SqoopOptions.InvalidOptionsException.class)
public void givenDirectImportNoDirectConnectorValidationThrows() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = stubDirectOptions(SupportedManagers.HSQLDB);
exportTool.vaildateDirectExportOptions(options);
}
@Test
public void givenNoDirectOptionWhenNoDirectConnectorAvailableValidationPasses() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = stubNotDirectOptions(SupportedManagers.HSQLDB);
exportTool.vaildateDirectExportOptions(options);
}
private SqoopOptions stubDirectOptions(SupportedManagers supportedManagers) {
return stubOptions(supportedManagers, true);
}
private SqoopOptions stubNotDirectOptions(SupportedManagers supportedManagers) {
return stubOptions(supportedManagers, false);
}
private SqoopOptions stubOptions(SupportedManagers supportedManagers, boolean isDirect) {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getConnectString()).thenReturn(supportedManagers.getSchemePrefix() + "//localhost");
when(options.isDirect()).thenReturn(isDirect);
when(options.getConf()).thenReturn(mock(Configuration.class));
return options;
}
}

View File

@ -0,0 +1,127 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.tool;
import com.cloudera.sqoop.SqoopOptions;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.manager.SupportedManagers;
import org.apache.commons.lang.RandomStringUtils;
import org.junit.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
public class TestValidateImportOptions {
ImportTool importTool = new ImportTool();
private static final String mysqlConnectionString = "jdbc:mysql://" + RandomStringUtils.random(5);
@Test
public void givenDirectImportMysqlTextFileValidationPasses() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getFileLayout()).thenReturn(SqoopOptions.FileLayout.TextFile);
when(options.getConnectString()).thenReturn(mysqlConnectionString);
importTool.validateDirectMysqlOptions(options);
verify(options, times(1)).getFileLayout();
verifyNoMoreInteractions(options);
}
@Test(expected = org.apache.sqoop.SqoopOptions.InvalidOptionsException.class)
public void givenDirectImportMysqlSequenceFileValidationThrows() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getFileLayout()).thenReturn(SqoopOptions.FileLayout.SequenceFile);
when(options.getConnectString()).thenReturn(mysqlConnectionString);
importTool.validateDirectMysqlOptions(options);
verify(options, times(1)).getFileLayout();
verify(options, times(1)).getConnectString();
verifyNoMoreInteractions(options);
}
@Test(expected = org.apache.sqoop.SqoopOptions.InvalidOptionsException.class)
public void givenDirectImportMysqlParquetFileValidationThrows() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getFileLayout()).thenReturn(SqoopOptions.FileLayout.ParquetFile);
when(options.getConnectString()).thenReturn(mysqlConnectionString);
importTool.validateDirectMysqlOptions(options);
verify(options, times(1)).getFileLayout();
verify(options, times(1)).getConnectString();
verifyNoMoreInteractions(options);
}
@Test(expected = org.apache.sqoop.SqoopOptions.InvalidOptionsException.class)
public void givenDirectImportMysqlAvroDataFileValidationThrows() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getFileLayout()).thenReturn(SqoopOptions.FileLayout.AvroDataFile);
when(options.getConnectString()).thenReturn(mysqlConnectionString);
importTool.validateDirectMysqlOptions(options);
verify(options, times(1)).getFileLayout();
verify(options, times(1)).getConnectString();
verifyNoMoreInteractions(options);
}
@Test(expected = org.apache.sqoop.SqoopOptions.InvalidOptionsException.class)
public void givenDirectImportHiveDropDelimValidationThrows() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = mock(SqoopOptions.class);
when(options.doHiveDropDelims()).thenReturn(true);
importTool.validateDirectDropHiveDelimOption(options);
verify(options, times(1)).doHiveDropDelims();
verifyNoMoreInteractions(options);
}
@Test
public void givenDirectImportHasDirectConnectorValidationPasses() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = stubDirectOptions(SupportedManagers.NETEZZA);
importTool.validateDirectImportOptions(options);
}
@Test(expected = org.apache.sqoop.SqoopOptions.InvalidOptionsException.class)
public void givenDirectImportNoDirectConnectorValidationThrows() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = stubDirectOptions(SupportedManagers.HSQLDB);
importTool.validateDirectImportOptions(options);
}
@Test
public void givenNoDirectOptionWhenNoDirectConnectorAvailableValidationPasses() throws SqoopOptions.InvalidOptionsException {
SqoopOptions options = stubNotDirectOptions(SupportedManagers.HSQLDB);
importTool.validateDirectImportOptions(options);
}
private SqoopOptions stubDirectOptions(SupportedManagers supportedManagers) {
return stubOptions(supportedManagers, true);
}
private SqoopOptions stubNotDirectOptions(SupportedManagers supportedManagers) {
return stubOptions(supportedManagers, false);
}
private SqoopOptions stubOptions(SupportedManagers supportedManagers, boolean isDirect) {
SqoopOptions options = mock(SqoopOptions.class);
when(options.getConnectString()).thenReturn(supportedManagers.getSchemePrefix() + "//localhost");
when(options.isDirect()).thenReturn(isDirect);
when(options.getConf()).thenReturn(mock(Configuration.class));
return options;
}
//TODO create tests for all old validations as well
}