5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 05:39:35 +08:00

SQOOP-2372: Import all tables as parquet will throw NPE

(Qian Xu via Abraham Elmahrek)
This commit is contained in:
Abraham Elmahrek 2015-05-29 17:31:44 -07:00
parent 974b886c4a
commit 9147967eee
2 changed files with 44 additions and 9 deletions

View File

@ -93,8 +93,8 @@ public String generateORM(SqoopOptions options, String tableName)
if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) { if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
String className = options.getClassName() != null ? String className = options.getClassName() != null ?
options.getClassName() : options.getTableName(); options.getClassName() : tableName;
if (className.equalsIgnoreCase(options.getTableName())) { if (className.equalsIgnoreCase(tableName)) {
className = "codegen_" + className; className = "codegen_" + className;
options.setClassName(className); options.setClassName(className);
LOG.info("Will generate java class as " + options.getClassName()); LOG.info("Will generate java class as " + options.getClassName());

View File

@ -23,6 +23,7 @@
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -34,6 +35,9 @@
import com.cloudera.sqoop.testutil.CommonArgs; import com.cloudera.sqoop.testutil.CommonArgs;
import com.cloudera.sqoop.testutil.ImportJobTestCase; import com.cloudera.sqoop.testutil.ImportJobTestCase;
import com.cloudera.sqoop.tool.ImportAllTablesTool; import com.cloudera.sqoop.tool.ImportAllTablesTool;
import org.kitesdk.data.Dataset;
import org.kitesdk.data.DatasetReader;
import org.kitesdk.data.Datasets;
/** /**
* Test the --all-tables functionality that can import multiple tables. * Test the --all-tables functionality that can import multiple tables.
@ -44,13 +48,10 @@ public class TestAllTables extends ImportJobTestCase {
* Create the argv to pass to Sqoop. * Create the argv to pass to Sqoop.
* @return the argv as an array of strings. * @return the argv as an array of strings.
*/ */
private String [] getArgv(boolean includeHadoopFlags, String[] excludeTables) { private String [] getArgv(String[] extraArgs, String[] excludeTables) {
ArrayList<String> args = new ArrayList<String>(); ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) { CommonArgs.addHadoopFlags(args);
CommonArgs.addHadoopFlags(args);
}
args.add("--warehouse-dir"); args.add("--warehouse-dir");
args.add(getWarehouseDir()); args.add(getWarehouseDir());
args.add("--connect"); args.add("--connect");
@ -63,6 +64,11 @@ public class TestAllTables extends ImportJobTestCase {
args.add("--exclude-tables"); args.add("--exclude-tables");
args.add(StringUtils.join(excludeTables, ",")); args.add(StringUtils.join(excludeTables, ","));
} }
if (extraArgs != null) {
for (String arg : extraArgs) {
args.add(arg);
}
}
return args.toArray(new String[0]); return args.toArray(new String[0]);
} }
@ -124,7 +130,7 @@ public void tearDown() {
} }
public void testMultiTableImport() throws IOException { public void testMultiTableImport() throws IOException {
String [] argv = getArgv(true, null); String [] argv = getArgv(null, null);
runImport(new ImportAllTablesTool(), argv); runImport(new ImportAllTablesTool(), argv);
Path warehousePath = new Path(this.getWarehouseDir()); Path warehousePath = new Path(this.getWarehouseDir());
@ -159,9 +165,38 @@ public void testMultiTableImport() throws IOException {
} }
} }
public void testMultiTableImportAsParquetFormat() throws IOException {
String [] argv = getArgv(new String[]{"--as-parquetfile"}, null);
runImport(new ImportAllTablesTool(), argv);
Path warehousePath = new Path(this.getWarehouseDir());
int i = 0;
for (String tableName : this.tableNames) {
Path tablePath = new Path(warehousePath, tableName);
Dataset dataset = Datasets.load("dataset:file:" + tablePath);
// dequeue the expected value for this table. This
// list has the same order as the tableNames list.
String expectedVal = Integer.toString(i++) + ","
+ this.expectedStrings.get(0);
this.expectedStrings.remove(0);
DatasetReader<GenericRecord> reader = dataset.newReader();
try {
GenericRecord record = reader.next();
String line = record.get(0) + "," + record.get(1);
assertEquals("Table " + tableName + " expected a different string",
expectedVal, line);
assertFalse(reader.hasNext());
} finally {
reader.close();
}
}
}
public void testMultiTableImportWithExclude() throws IOException { public void testMultiTableImportWithExclude() throws IOException {
String exclude = this.tableNames.get(0); String exclude = this.tableNames.get(0);
String [] argv = getArgv(true, new String[]{ exclude }); String [] argv = getArgv(null, new String[]{ exclude });
runImport(new ImportAllTablesTool(), argv); runImport(new ImportAllTablesTool(), argv);
Path warehousePath = new Path(this.getWarehouseDir()); Path warehousePath = new Path(this.getWarehouseDir());