diff --git a/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java b/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java index 34a45cdf..734f007b 100644 --- a/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java +++ b/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java @@ -63,10 +63,13 @@ public Schema generate() throws IOException { field.addProp("sqlType", Integer.toString(sqlType)); fields.add(field); } - String doc = "Sqoop import of " + tableName; - Schema schema = Schema.createRecord(tableName, doc, null, false); + + String avroTableName = (tableName == null ? "QueryResult" : tableName); + + String doc = "Sqoop import of " + avroTableName; + Schema schema = Schema.createRecord(avroTableName, doc, null, false); schema.setFields(fields); - schema.addProp("tableName", tableName); + schema.addProp("tableName", avroTableName); return schema; } diff --git a/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java b/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java index e9d252ed..e9f6ae40 100644 --- a/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java +++ b/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java @@ -44,6 +44,16 @@ public class TestAvroImportExportRoundtrip extends ImportJobTestCase { public static final Log LOG = LogFactory .getLog(TestAvroImportExportRoundtrip.class.getName()); + public void testRoundtripQuery() throws IOException, SQLException { + String[] argv = {}; + + runImport(getOutputArgvForQuery(true)); + deleteTableData(); + runExport(getExportArgvForQuery(true, 10, 10, newStrArray(argv, "-m", "" + 1))); + + checkFirstColumnSum(); + } + public void testRoundtrip() throws IOException, SQLException { String[] argv = {}; @@ -79,6 +89,73 @@ protected String[] getOutputArgv(boolean includeHadoopFlags) { return args.toArray(new String[0]); } + /** + * Create the argv to pass to Sqoop. + * + * @return the argv as an array of strings. + */ + protected String[] getOutputArgvForQuery(boolean includeHadoopFlags) { + ArrayList args = new ArrayList(); + + if (includeHadoopFlags) { + CommonArgs.addHadoopFlags(args); + } + + args.add("--query"); + args.add("select * from " + HsqldbTestServer.getTableName() + " where $CONDITIONS"); + args.add("--connect"); + args.add(HsqldbTestServer.getUrl()); + args.add("--target-dir"); + args.add(getWarehouseDir() + "/query_result"); + args.add("--split-by"); + args.add("INTFIELD1"); + args.add("--as-avrodatafile"); + + return args.toArray(new String[0]); + } + + protected String [] getExportArgv(boolean includeHadoopFlags, + int rowsPerStmt, int statementsPerTx, String... additionalArgv) { + ArrayList args = formatAdditionalArgs(additionalArgv); + + args.add("--table"); + args.add(getTableName()); + args.add("--export-dir"); + args.add(getTablePath().toString()); + args.add("--connect"); + args.add(getConnectString()); + args.add("-m"); + args.add("1"); + + LOG.debug("args:"); + for (String a : args) { + LOG.debug(" " + a); + } + + return args.toArray(new String[0]); + } + + protected String [] getExportArgvForQuery(boolean includeHadoopFlags, + int rowsPerStmt, int statementsPerTx, String... additionalArgv) { + ArrayList args = formatAdditionalArgs(additionalArgv); + + args.add("--table"); + args.add(getTableName()); + args.add("--export-dir"); + args.add(getWarehouseDir() + "/query_result"); + args.add("--connect"); + args.add(getConnectString()); + args.add("-m"); + args.add("1"); + + LOG.debug("args:"); + for (String a : args) { + LOG.debug(" " + a); + } + + return args.toArray(new String[0]); + } + /** * Create the argv to pass to Sqoop. * @param includeHadoopFlags if true, then include -D various.settings=values @@ -86,8 +163,7 @@ protected String[] getOutputArgv(boolean includeHadoopFlags) { * @param statementsPerTx ## of statements to use in a transaction. * @return the argv as an array of strings. */ - protected String [] getExportArgv(boolean includeHadoopFlags, - int rowsPerStmt, int statementsPerTx, String... additionalArgv) { + protected ArrayList formatAdditionalArgs(String... additionalArgv) { ArrayList args = new ArrayList(); // Any additional Hadoop flags (-D foo=bar) are prepended. @@ -120,22 +196,7 @@ protected String[] getOutputArgv(boolean includeHadoopFlags) { } } } - - args.add("--table"); - args.add(getTableName()); - args.add("--export-dir"); - args.add(getTablePath().toString()); - args.add("--connect"); - args.add(getConnectString()); - args.add("-m"); - args.add("1"); - - LOG.debug("args:"); - for (String a : args) { - LOG.debug(" " + a); - } - - return args.toArray(new String[0]); + return args; } // this test just uses the two int table.