5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 03:11:00 +08:00

SQOOP-219. Duplicate columns arguments results in code generation compilation error

From: Jonathan Hsieh <jon@cloudera.com>

git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1150041 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Bayer 2011-07-22 20:04:37 +00:00
parent 913034315b
commit c0ca0c100b
2 changed files with 47 additions and 15 deletions

View File

@ -42,6 +42,7 @@
import java.io.Writer; import java.io.Writer;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -1057,11 +1058,18 @@ public void generate() throws IOException {
// Translate all the column names into names that are safe to // Translate all the column names into names that are safe to
// use as identifiers. // use as identifiers.
String [] cleanedColNames = cleanColNames(colNames); String [] cleanedColNames = cleanColNames(colNames);
Set<String> uniqColNames = new HashSet<String>();
for (int i = 0; i < colNames.length; i++) { for (int i = 0; i < colNames.length; i++) {
// Guarantee uniq col identifier
String identifier = cleanedColNames[i];
if (uniqColNames.contains(identifier)) {
throw new IllegalArgumentException("Duplicate Column identifier "
+ "specified: '" + identifier + "'");
}
uniqColNames.add(identifier);
// Make sure the col->type mapping holds for the // Make sure the col->type mapping holds for the
// new identifier name, too. // new identifier name, too.
String identifier = cleanedColNames[i];
String col = colNames[i]; String col = colNames[i];
columnTypes.put(identifier, columnTypes.get(col)); columnTypes.put(identifier, columnTypes.get(col));
} }

View File

@ -59,7 +59,7 @@ public void testFailedImportDueToIOException() throws IOException {
assertTrue(fs.exists(outputPath)); assertTrue(fs.exists(outputPath));
String [] argv = getArgv(true, new String [] { "DATA_COL0" }, conf); String[] argv = getArgv(true, new String[] { "DATA_COL0" }, conf);
Sqoop importer = new Sqoop(new ImportTool()); Sqoop importer = new Sqoop(new ImportTool());
try { try {
@ -72,11 +72,11 @@ public void testFailedImportDueToIOException() throws IOException {
} }
/** A mapper that is guaranteed to cause the task to fail. */ /** A mapper that is guaranteed to cause the task to fail. */
public static class NullDereferenceMapper public static class NullDereferenceMapper extends
extends AutoProgressMapper<Object, Object, Text, NullWritable> { AutoProgressMapper<Object, Object, Text, NullWritable> {
public void map(Object key, Object val, Context c) public void map(Object key, Object val, Context c) throws IOException,
throws IOException, InterruptedException { InterruptedException {
String s = null; String s = null;
s.length(); // This will throw a NullPointerException. s.length(); // This will throw a NullPointerException.
} }
@ -122,19 +122,16 @@ public void testFailedImportDueToJobFail() throws IOException {
// Use the dependency-injection manager. // Use the dependency-injection manager.
conf.setClass(ConnFactory.FACTORY_CLASS_NAMES_KEY, conf.setClass(ConnFactory.FACTORY_CLASS_NAMES_KEY,
InjectableManagerFactory.class, InjectableManagerFactory.class, ManagerFactory.class);
ManagerFactory.class);
String [] argv = getArgv(true, new String [] { "DATA_COL0" }, conf); String[] argv = getArgv(true, new String[] { "DATA_COL0" }, conf);
// Use dependency injection to specify a mapper that we know // Use dependency injection to specify a mapper that we know
// will fail. // will fail.
conf.setClass(InjectableConnManager.MAPPER_KEY, conf.setClass(InjectableConnManager.MAPPER_KEY,
NullDereferenceMapper.class, NullDereferenceMapper.class, Mapper.class);
Mapper.class);
conf.setClass(InjectableConnManager.IMPORT_JOB_KEY, conf.setClass(InjectableConnManager.IMPORT_JOB_KEY, DummyImportJob.class,
DummyImportJob.class,
ImportJobBase.class); ImportJobBase.class);
Sqoop importer = new Sqoop(new ImportTool(), conf); Sqoop importer = new Sqoop(new ImportTool(), conf);
@ -147,5 +144,32 @@ public void testFailedImportDueToJobFail() throws IOException {
} }
} }
} public void testDuplicateColumns() throws IOException {
// Make sure that if a MapReduce job to do the import fails due
// to an IOException, we tell the user about it.
// Create a table to attempt to import.
createTableForColType("VARCHAR(32)", "'meep'");
Configuration conf = new Configuration();
// Make the output dir exist so we know the job will fail via IOException.
Path outputPath = new Path(new Path(getWarehouseDir()), getTableName());
FileSystem fs = FileSystem.getLocal(conf);
fs.mkdirs(outputPath);
assertTrue(fs.exists(outputPath));
String[] argv = getArgv(true, new String[] { "DATA_COL0,DATA_COL0" }, conf);
Sqoop importer = new Sqoop(new ImportTool());
try {
int ret = Sqoop.runSqoop(importer, argv);
assertTrue("Expected job to fail!", 1 == ret);
} catch (Exception e) {
// In debug mode, ImportException is wrapped in RuntimeException.
LOG.info("Got exceptional return (expected: ok). msg is: " + e);
}
}
}