in HBase")
+ .withLongOpt(HBASE_TABLE_ARG)
+ .create());
+ hbaseOpts.addOption(OptionBuilder.withArgName("family")
+ .hasArg()
+ .withDescription("Sets the target column family for the import")
+ .withLongOpt(HBASE_COL_FAM_ARG)
+ .create());
+ hbaseOpts.addOption(OptionBuilder.withArgName("col")
+ .hasArg()
+ .withDescription("Specifies which input column to use as the row key")
+ .withLongOpt(HBASE_ROW_KEY_ARG)
+ .create());
+ hbaseOpts.addOption(OptionBuilder
+ .withDescription("If specified, create missing HBase tables")
+ .withLongOpt(HBASE_CREATE_TABLE_ARG)
+ .create());
+
+ return hbaseOpts;
+ }
+
+
/**
* Apply common command-line to the state.
@@ -582,6 +615,24 @@ protected void applyCodeGenOptions(CommandLine in, SqoopOptions out,
}
}
+ protected void applyHBaseOptions(CommandLine in, SqoopOptions out) {
+ if (in.hasOption(HBASE_TABLE_ARG)) {
+ out.setHBaseTable(in.getOptionValue(HBASE_TABLE_ARG));
+ }
+
+ if (in.hasOption(HBASE_COL_FAM_ARG)) {
+ out.setHBaseColFamily(in.getOptionValue(HBASE_COL_FAM_ARG));
+ }
+
+ if (in.hasOption(HBASE_ROW_KEY_ARG)) {
+ out.setHBaseRowKeyColumn(in.getOptionValue(HBASE_ROW_KEY_ARG));
+ }
+
+ if (in.hasOption(HBASE_CREATE_TABLE_ARG)) {
+ out.setCreateHBaseTable(true);
+ }
+ }
+
protected void validateCommonOptions(SqoopOptions options)
throws InvalidOptionsException {
if (options.getConnectString() == null) {
@@ -623,10 +674,21 @@ protected void validateOutputFormatOptions(SqoopOptions options)
}
}
- protected void validateHiveOptions(SqoopOptions options) {
+ protected void validateHiveOptions(SqoopOptions options)
+ throws InvalidOptionsException {
// Empty; this method is present to maintain API consistency, and
// is reserved for future constraints on Hive options.
}
+ protected void validateHBaseOptions(SqoopOptions options)
+ throws InvalidOptionsException {
+ if ((options.getHBaseColFamily() != null && options.getHBaseTable() == null)
+ || (options.getHBaseColFamily() == null
+ && options.getHBaseTable() != null)) {
+ throw new InvalidOptionsException(
+ "Both --hbase-table and --column-family must be set together."
+ + HELP_STR);
+ }
+ }
}
diff --git a/src/java/com/cloudera/sqoop/tool/ImportTool.java b/src/java/com/cloudera/sqoop/tool/ImportTool.java
index 460fcbc1..b69c17d8 100644
--- a/src/java/com/cloudera/sqoop/tool/ImportTool.java
+++ b/src/java/com/cloudera/sqoop/tool/ImportTool.java
@@ -108,6 +108,7 @@ protected void importTable(SqoopOptions options, String tableName,
/**
* @return the output path for the imported files;
* in append mode this will point to a temporary folder.
+ * if importing to hbase, this may return null.
*/
private Path getOutputPath(SqoopOptions options, String tableName) {
// Get output directory
@@ -124,7 +125,7 @@ private Path getOutputPath(SqoopOptions options, String tableName) {
outputPath = new Path(hdfsTargetDir);
} else if (hdfsWarehouseDir != null) {
outputPath = new Path(hdfsWarehouseDir, tableName);
- } else {
+ } else if (null != tableName) {
outputPath = new Path(tableName);
}
}
@@ -272,6 +273,7 @@ public void configureOptions(ToolOptions toolOptions) {
toolOptions.addUniqueOptions(getOutputFormatOptions());
toolOptions.addUniqueOptions(getInputFormatOptions());
toolOptions.addUniqueOptions(getHiveOptions(true));
+ toolOptions.addUniqueOptions(getHBaseOptions());
// get common codegen opts.
RelatedOptions codeGenOpts = getCodeGenOpts(allTables);
@@ -384,6 +386,7 @@ public void applyOptions(CommandLine in, SqoopOptions out)
applyOutputFormatOptions(in, out);
applyInputFormatOptions(in, out);
applyCodeGenOptions(in, out, allTables);
+ applyHBaseOptions(in, out);
} catch (NumberFormatException nfe) {
throw new InvalidOptionsException("Error: expected numeric argument.\n"
+ "Try --help for usage.");
@@ -417,7 +420,7 @@ protected void validateImportOptions(SqoopOptions options)
"Cannot specify --" + SQL_QUERY_ARG + " and --table together."
+ HELP_STR);
} else if (options.getSqlQuery() != null
- && options.getTargetDir() == null) {
+ && options.getTargetDir() == null && options.getHBaseTable() == null) {
throw new InvalidOptionsException(
"Must specify destination with --target-dir."
+ HELP_STR);
@@ -458,6 +461,7 @@ public void validateOptions(SqoopOptions options)
validateCommonOptions(options);
validateCodeGenOptions(options);
validateOutputFormatOptions(options);
+ validateHBaseOptions(options);
}
}
diff --git a/src/scripts/hudson/run-code-quality.sh b/src/scripts/hudson/run-code-quality.sh
index 708c5386..2f569275 100755
--- a/src/scripts/hudson/run-code-quality.sh
+++ b/src/scripts/hudson/run-code-quality.sh
@@ -43,7 +43,8 @@ ${ANT} clean jar-all-shims findbugs javadoc cobertura checkstyle \
-Divy.home=$IVY_HOME -Dhadoop.dist=${COMPILE_HADOOP_DIST} \
-Dcobertura.home=${COBERTURA_HOME} -Dcobertura.format=xml \
-Dfindbugs.home=${FINDBUGS_HOME} \
- -Dtest.junit.output.format=xml
+ -Dhbase.home=${HBASE_HOME} -Dzookeeper.home=${ZOOKEEPER_HOME} \
+ -Dtest.junit.output.format=xml ${ANT_ARGUMENTS}
if [ "$?" != "0" ]; then
echo "Error during compilation phase. Aborting!"
@@ -56,7 +57,8 @@ ${ANT} cobertura \
-Dhadoop.dist=${COMPILE_HADOOP_DIST} \
-Dcobertura.home=${COBERTURA_HOME} -Dcobertura.format=xml \
-Dsqoop.thirdparty.lib.dir=${THIRDPARTY_LIBS} \
- -Dtestcase=ThirdPartyTests
+ -Dhbase.home=${HBASE_HOME} -Dzookeeper.home=${ZOOKEEPER_HOME} \
+ -Dtestcase=ThirdPartyTests ${ANT_ARGUMENTS}
if [ "$?" != "0" ]; then
echo "Unit tests failed!"
diff --git a/src/scripts/hudson/run-tests.sh b/src/scripts/hudson/run-tests.sh
index 8f574196..03b687f6 100755
--- a/src/scripts/hudson/run-tests.sh
+++ b/src/scripts/hudson/run-tests.sh
@@ -29,7 +29,9 @@ source ${bin}/test-config.sh
# Run compilation step.
-${ANT} clean jar -Divy.home=$IVY_HOME -Dhadoop.dist=${COMPILE_HADOOP_DIST}
+${ANT} clean jar -Divy.home=$IVY_HOME -Dhadoop.dist=${COMPILE_HADOOP_DIST} \
+ -Dhbase.home=${HBASE_HOME} -Dzookeeper.home=${ZOOKEEPER_HOME} \
+ ${ANT_ARGUMENTS}
if [ "$?" != "0" ]; then
echo "Error during compilation phase. Aborting!"
exit 1
@@ -40,7 +42,8 @@ testfailed=0
# Run basic unit tests.
${ANT} clean-cache test -Divy.home=$IVY_HOME -Dtest.junit.output.format=xml \
- -Dhadoop.dist=${TEST_HADOOP_DIST}
+ -Dhbase.home=${HBASE_HOME} -Dzookeeper.home=${ZOOKEEPER_HOME} \
+ -Dhadoop.dist=${TEST_HADOOP_DIST} ${ANT_ARGUMENTS}
if [ "$?" != "0" ]; then
testfailed=1
fi
@@ -53,7 +56,8 @@ fi
${ANT} test -Dthirdparty=true -Dsqoop.thirdparty.lib.dir=${THIRDPARTY_LIBS} \
-Dtest.junit.output.format=xml -Divy.home=$IVY_HOME \
- -Dhadoop.dist=${TEST_HADOOP_DIST}
+ -Dhbase.home=${HBASE_HOME} -Dzookeeper.home=${ZOOKEEPER_HOME} \
+ -Dhadoop.dist=${TEST_HADOOP_DIST} ${ANT_ARGUMENTS}
if [ "$?" != "0" ]; then
testfailed=1
fi
diff --git a/src/scripts/hudson/test-config.sh b/src/scripts/hudson/test-config.sh
index 97650649..c30cd723 100755
--- a/src/scripts/hudson/test-config.sh
+++ b/src/scripts/hudson/test-config.sh
@@ -45,3 +45,10 @@ export TEST_HADOOP_DIST=${TEST_HADOOP_DIST:-apache}
export WORKSPACE=${WORKSPACE:-$projroot}
export IVY_HOME=${IVY_HOME:-$WORKSPACE/.ivy2}
+export HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}
+export ZOOKEEPER_HOME=${ZOOKEEPER_HOME:-/usr/lib/zookeeper}
+
+if [ -z "${ANT_ARGUMENTS}" ]; then
+ export ANT_ARGUMENTS=""
+fi
+
diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java b/src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java
index 9013c55e..c5353091 100644
--- a/src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java
+++ b/src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java
@@ -95,16 +95,7 @@ public void checkOutputSpecs(JobContext context)
/** {@inheritDoc} */
public OutputCommitter getOutputCommitter(TaskAttemptContext context)
throws IOException, InterruptedException {
- return new OutputCommitter() {
- public void abortTask(TaskAttemptContext taskContext) { }
- public void cleanupJob(JobContext jobContext) { }
- public void commitTask(TaskAttemptContext taskContext) { }
- public boolean needsTaskCommit(TaskAttemptContext taskContext) {
- return false;
- }
- public void setupJob(JobContext jobContext) { }
- public void setupTask(TaskAttemptContext taskContext) { }
- };
+ return new NullOutputCommitter();
}
/**
diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java b/src/shims/common/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java
new file mode 100644
index 00000000..d9ddd861
--- /dev/null
+++ b/src/shims/common/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to Cloudera, Inc. under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.cloudera.sqoop.mapreduce;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.OutputCommitter;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import com.cloudera.sqoop.lib.FieldMappable;
+import com.cloudera.sqoop.lib.FieldMapProcessor;
+import com.cloudera.sqoop.lib.ProcessingException;
+
+/**
+ * OutputFormat that produces a RecordReader which instantiates
+ * a FieldMapProcessor which will process FieldMappable
+ * output keys.
+ *
+ * The output value is ignored.
+ *
+ * The FieldMapProcessor implementation may do any arbitrary
+ * processing on the object. For example, it may write an object
+ * to HBase, etc.
+ *
+ * If the FieldMapProcessor implementation also implements
+ * Closeable, it will be close()'d in the RecordReader's close()
+ * method.
+ *
+ * If the FMP implements Configurable, it will be configured
+ * correctly via ReflectionUtils.
+ */
+public class DelegatingOutputFormat
+ extends OutputFormat {
+
+ /** conf key: the FieldMapProcessor class to instantiate. */
+ public static final String DELEGATE_CLASS_KEY =
+ "sqoop.output.delegate.field.map.processor.class";
+
+ @Override
+ /** {@inheritDoc} */
+ public void checkOutputSpecs(JobContext context)
+ throws IOException, InterruptedException {
+ Configuration conf = context.getConfiguration();
+
+ if (null == conf.get(DELEGATE_CLASS_KEY)) {
+ throw new IOException("Delegate FieldMapProcessor class is not set.");
+ }
+ }
+
+ @Override
+ /** {@inheritDoc} */
+ public OutputCommitter getOutputCommitter(TaskAttemptContext context)
+ throws IOException, InterruptedException {
+ return new NullOutputCommitter();
+ }
+
+ @Override
+ /** {@inheritDoc} */
+ public RecordWriter getRecordWriter(TaskAttemptContext context)
+ throws IOException {
+ try {
+ return new DelegatingRecordWriter(context);
+ } catch (ClassNotFoundException cnfe) {
+ throw new IOException(cnfe);
+ }
+ }
+
+ /**
+ * RecordWriter to write the output to a row in a database table.
+ * The actual database updates are executed in a second thread.
+ */
+ public class DelegatingRecordWriter extends RecordWriter {
+
+ private Configuration conf;
+
+ private FieldMapProcessor mapProcessor;
+
+ public DelegatingRecordWriter(TaskAttemptContext context)
+ throws ClassNotFoundException {
+
+ this.conf = context.getConfiguration();
+
+ @SuppressWarnings("unchecked")
+ Class extends FieldMapProcessor> procClass =
+ (Class extends FieldMapProcessor>)
+ conf.getClass(DELEGATE_CLASS_KEY, null);
+ this.mapProcessor = ReflectionUtils.newInstance(procClass, this.conf);
+ }
+
+ protected Configuration getConf() {
+ return this.conf;
+ }
+
+ @Override
+ /** {@inheritDoc} */
+ public void close(TaskAttemptContext context)
+ throws IOException, InterruptedException {
+ if (mapProcessor instanceof Closeable) {
+ ((Closeable) mapProcessor).close();
+ }
+ }
+
+ @Override
+ /** {@inheritDoc} */
+ public void write(K key, V value)
+ throws InterruptedException, IOException {
+ try {
+ mapProcessor.accept(key);
+ } catch (ProcessingException pe) {
+ throw new IOException(pe);
+ }
+ }
+ }
+}
diff --git a/src/test/com/cloudera/sqoop/AllTests.java b/src/test/com/cloudera/sqoop/AllTests.java
index e79f7451..3d82a242 100644
--- a/src/test/com/cloudera/sqoop/AllTests.java
+++ b/src/test/com/cloudera/sqoop/AllTests.java
@@ -18,6 +18,9 @@
package com.cloudera.sqoop;
+import com.cloudera.sqoop.hbase.TestHBaseImport;
+import com.cloudera.sqoop.hbase.TestHBaseQueryImport;
+
import junit.framework.Test;
import junit.framework.TestSuite;
@@ -33,6 +36,8 @@ public static Test suite() {
suite.addTest(SmokeTests.suite());
suite.addTest(ThirdPartyTests.suite());
+ suite.addTestSuite(TestHBaseImport.class);
+ suite.addTestSuite(TestHBaseQueryImport.class);
return suite;
}
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
new file mode 100644
index 00000000..52689b12
--- /dev/null
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to Cloudera, Inc. under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.cloudera.sqoop.hbase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.testutil.HsqldbTestServer;
+import com.cloudera.sqoop.testutil.ImportJobTestCase;
+
+/**
+ * Utility methods that facilitate HBase import tests.
+ */
+public class HBaseTestCase extends ImportJobTestCase {
+
+ /**
+ * Create the argv to pass to Sqoop.
+ * @return the argv as an array of strings.
+ */
+ protected String [] getArgv(boolean includeHadoopFlags,
+ String hbaseTable, String hbaseColFam, boolean hbaseCreate,
+ String queryStr) {
+
+ ArrayList args = new ArrayList();
+
+ if (includeHadoopFlags) {
+ CommonArgs.addHadoopFlags(args);
+ }
+
+ if (null != queryStr) {
+ args.add("--query");
+ args.add(queryStr);
+ } else {
+ args.add("--table");
+ args.add(getTableName());
+ }
+ args.add("--split-by");
+ args.add(getColName(0));
+ args.add("--connect");
+ args.add(HsqldbTestServer.getUrl());
+ args.add("--num-mappers");
+ args.add("1");
+ args.add("--column-family");
+ args.add(hbaseColFam);
+ args.add("--hbase-table");
+ args.add(hbaseTable);
+ if (hbaseCreate) {
+ args.add("--hbase-create-table");
+ }
+
+ return args.toArray(new String[0]);
+ }
+
+ // Starts a mini hbase cluster in this process.
+ private HBaseTestingUtility hbaseTestUtil;
+
+ private void startMaster() throws Exception {
+ if (null == hbaseTestUtil) {
+ Configuration conf = new Configuration();
+ conf = HBaseConfiguration.addHbaseResources(conf);
+ hbaseTestUtil = new HBaseTestingUtility(conf);
+ hbaseTestUtil.startMiniCluster(1);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUp() {
+ try {
+ startMaster();
+ } catch (Exception e) {
+ fail(e.toString());
+ }
+ super.setUp();
+ }
+
+
+ @AfterClass
+ public void shutdown() throws Exception {
+ LOG.info("In shutdown() method");
+ if (null != hbaseTestUtil) {
+ LOG.info("Shutting down HBase cluster");
+ hbaseTestUtil.shutdownMiniCluster();
+ this.hbaseTestUtil = null;
+ }
+ }
+
+ protected void verifyHBaseCell(String tableName, String rowKey,
+ String colFamily, String colName, String val) throws IOException {
+ Get get = new Get(Bytes.toBytes(rowKey));
+ get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(colName));
+ HTable table = new HTable(Bytes.toBytes(tableName));
+ try {
+ Result r = table.get(get);
+ byte [] actualVal = r.getValue(Bytes.toBytes(colFamily),
+ Bytes.toBytes(colName));
+ if (null == val) {
+ assertNull("Got a result when expected null", actualVal);
+ } else {
+ assertNotNull("No result, but we expected one", actualVal);
+ assertEquals(val, Bytes.toString(actualVal));
+ }
+ } finally {
+ table.close();
+ }
+ }
+}
diff --git a/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java b/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
new file mode 100644
index 00000000..a4466215
--- /dev/null
+++ b/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to Cloudera, Inc. under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.cloudera.sqoop.hbase;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+/**
+ * Test imports of tables into HBase.
+ */
+public class TestHBaseImport extends HBaseTestCase {
+
+ @Test
+ public void testBasicUsage() throws IOException {
+ // Create the HBase table in Sqoop as we run the job.
+ String [] argv = getArgv(true, "BasicUsage", "BasicColFam", true, null);
+ String [] types = { "INT", "INT" };
+ String [] vals = { "0", "1" };
+ createTableWithColTypes(types, vals);
+ runImport(argv);
+ verifyHBaseCell("BasicUsage", "0", "BasicColFam", getColName(1), "1");
+ }
+
+ @Test
+ public void testMissingTableFails() throws IOException {
+ // Test that if the table doesn't exist, we fail unless we
+ // explicitly create the table.
+ String [] argv = getArgv(true, "MissingTable", "MissingFam", false, null);
+ String [] types = { "INT", "INT" };
+ String [] vals = { "0", "1" };
+ createTableWithColTypes(types, vals);
+ try {
+ runImport(argv);
+ fail("Expected IOException");
+ } catch (IOException ioe) {
+ LOG.info("Got exception -- ok; we expected that job to fail.");
+ }
+ }
+
+ @Test
+ public void testOverwriteSucceeds() throws IOException {
+ // Test that we can create a table and then import immediately
+ // back on top of it without problem.
+ String [] argv = getArgv(true, "OverwriteT", "OverwriteF", true, null);
+ String [] types = { "INT", "INT" };
+ String [] vals = { "0", "1" };
+ createTableWithColTypes(types, vals);
+ runImport(argv);
+ verifyHBaseCell("OverwriteT", "0", "OverwriteF", getColName(1), "1");
+ // Run a second time.
+ runImport(argv);
+ verifyHBaseCell("OverwriteT", "0", "OverwriteF", getColName(1), "1");
+ }
+
+ @Test
+ public void testStrings() throws IOException {
+ String [] argv = getArgv(true, "stringT", "stringF", true, null);
+ String [] types = { "INT", "VARCHAR(32)" };
+ String [] vals = { "0", "'abc'" };
+ createTableWithColTypes(types, vals);
+ runImport(argv);
+ verifyHBaseCell("stringT", "0", "stringF", getColName(1), "abc");
+ }
+
+ @Test
+ public void testNulls() throws IOException {
+ String [] argv = getArgv(true, "nullT", "nullF", true, null);
+ String [] types = { "INT", "INT", "INT" };
+ String [] vals = { "0", "42", "null" };
+ createTableWithColTypes(types, vals);
+ runImport(argv);
+
+ // This cell should import correctly.
+ verifyHBaseCell("nullT", "0", "nullF", getColName(1), "42");
+
+ // This cell should not be placed in the results..
+ verifyHBaseCell("nullT", "0", "nullF", getColName(2), null);
+ }
+}
diff --git a/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java b/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
new file mode 100644
index 00000000..b40d7aba
--- /dev/null
+++ b/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to Cloudera, Inc. under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.cloudera.sqoop.hbase;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+/**
+ * Test import of free-form query into HBase.
+ */
+public class TestHBaseQueryImport extends HBaseTestCase {
+
+ @Test
+ public void testImportFromQuery() throws IOException {
+ String [] types = { "INT", "INT", "INT" };
+ String [] vals = { "0", "42", "43" };
+ createTableWithColTypes(types, vals);
+
+ String [] argv = getArgv(true, "queryT", "queryF", true,
+ "SELECT " + getColName(0) + ", " + getColName(1) + " FROM "
+ + getTableName() + " WHERE $CONDITIONS");
+ runImport(argv);
+
+ // This cell should import correctly.
+ verifyHBaseCell("queryT", "0", "queryF", getColName(1), "42");
+
+ // This cell should not be placed in the results..
+ verifyHBaseCell("queryT", "0", "queryF", getColName(2), null);
+ }
+}
diff --git a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
index 9efab286..e675df92 100644
--- a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
+++ b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
@@ -222,6 +222,10 @@ public void tearDown() {
static final String BASE_COL_NAME = "DATA_COL";
+ protected String getColName(int i) {
+ return BASE_COL_NAME + i;
+ }
+
/**
* Drop a table if it already exists in the database.
* @param table the name of the table to drop.