mirror of
https://github.com/apache/sqoop.git
synced 2025-05-02 03:02:07 +08:00
SIP-1. Refactor operations into distinct tools.
Introduced SqoopTool interface. Added cli package for option parsing: includes RelatedOptions, ToolOptions, SqoopParser. 'Sqoop' is now a wrapper that invokes a SqoopTool. Added setter methods for all fields of SqoopOptions. Added commons-cli 1.2 build dependency. Argument parsing is removed from SqoopOptions and pushed into individual tools. Added HelpTool to display basic usage information for Sqoop and usage for subcommands. Added ImportTool to perform imports. Added EvalSqlTool. Added ExportTool. Added ImportAllTablesTool. Added ListDatabasesTool, ListTablesTool. Added CodeGenTool. Added CreateHiveTableTool. Small changes to orm.ClassWriter. Auto-generate bin scripts for all tools; include in release package. Allow user to provide build properties in a file. Shim use of GenericOptionsParser to allow cross-compilation. Fix Hive testcases to pass under CDH. From: Aaron Kimball <aaron@cloudera.com> git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149894 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c37e2d0568
commit
b7a3f47222
1
.gitignore
vendored
1
.gitignore
vendored
@ -19,4 +19,5 @@
|
||||
.launches
|
||||
.settings
|
||||
/tags
|
||||
/build.properties
|
||||
|
||||
|
16
bin/.gitignore
vendored
Normal file
16
bin/.gitignore
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
# Licensed to Cloudera, Inc. under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
/sqoop-*
|
||||
|
@ -56,6 +56,6 @@ SQOOP_JAR=`ls -1 ${SQOOP_JAR_DIR}/sqoop-*.jar | head -n 1`
|
||||
|
||||
# Invoke Hadoop
|
||||
HADOOP_CLASSPATH="${SQOOP_CLASSPATH}:${HADOOP_CLASSPATH}" \
|
||||
HADOOP_OPTS="-Dsqoop.shim.jar.dir=${SQOOP_SHIM_DIR}" \
|
||||
HADOOP_OPTS="-Dsqoop.shim.jar.dir=${SQOOP_SHIM_DIR} ${HADOOP_OPTS}" \
|
||||
${HADOOP_HOME}/bin/hadoop jar ${SQOOP_JAR} org.apache.hadoop.sqoop.Sqoop "$@"
|
||||
|
||||
|
50
build.xml
50
build.xml
@ -21,6 +21,11 @@
|
||||
xmlns:artifact="urn:maven-artifact-ant"
|
||||
xmlns:ivy="antlib:org.apache.ivy.ant">
|
||||
|
||||
<!-- Load system-wide and project-wide default properties set by
|
||||
the user, to avoid needing to override with -D. -->
|
||||
<property file="${user.home}/build.properties" />
|
||||
<property file="${basedir}/build.properties" />
|
||||
|
||||
<!-- some basic properties -->
|
||||
<property environment="env"/>
|
||||
<property name="name" value="sqoop" />
|
||||
@ -30,13 +35,16 @@
|
||||
<property name="dest.jar" value="${artifact.name}.jar"/>
|
||||
|
||||
<!-- locations in the source tree -->
|
||||
<property name="src.dir" location="${basedir}/src/java" />
|
||||
<property name="test.dir" location="${basedir}/src/test" />
|
||||
<property name="shim.src.dir" location="${basedir}/src/shims" />
|
||||
<property name="base.src.dir" location="${basedir}/src" />
|
||||
<property name="src.dir" location="${base.src.dir}/java" />
|
||||
<property name="test.dir" location="${base.src.dir}/test" />
|
||||
<property name="shim.src.dir" location="${base.src.dir}/shims" />
|
||||
<property name="lib.dir" location="${basedir}/lib" />
|
||||
<property name="docs.src.dir" location="${basedir}/src/docs" />
|
||||
<property name="docs.src.dir" location="${base.src.dir}/docs" />
|
||||
<property name="script.src.dir" location="${base.src.dir}/scripts" />
|
||||
|
||||
<property name="build.dir" location="${basedir}/build" />
|
||||
<property name="build.bin.dir" location="${build.dir}/bin" />
|
||||
<property name="build.classes" location="${build.dir}/classes"/>
|
||||
<property name="build.shim.dir" location="${build.dir}/shims"/>
|
||||
<property name="build.shim.classes" location="${build.shim.dir}/classes"/>
|
||||
@ -211,7 +219,21 @@
|
||||
<target name="compile-all" depends="compile,compile-test,jar-all-shims"
|
||||
description="Compile all sources"/>
|
||||
|
||||
<target name="package" depends="jar,compile-all,docs,javadoc,ivy-retrieve-redist"
|
||||
<target name="scripts" depends="jar"
|
||||
description="Create tool-specific wrapper scripts">
|
||||
<!-- Take the list of available tools from 'sqoop help' and generate
|
||||
the wrapper scripts to invoke each of these.
|
||||
-->
|
||||
<mkdir dir="${build.bin.dir}" />
|
||||
<exec executable="${script.src.dir}/create-tool-scripts.sh"
|
||||
dir="${basedir}" failonerror="true">
|
||||
<arg value="${build.bin.dir}" />
|
||||
<arg value="${script.src.dir}/tool-script.sh.template" />
|
||||
</exec>
|
||||
</target>
|
||||
|
||||
<target name="package"
|
||||
depends="jar,compile-all,docs,javadoc,ivy-retrieve-redist,scripts"
|
||||
description="Create a redistributable package">
|
||||
|
||||
<mkdir dir="${dist.dir}"/>
|
||||
@ -231,7 +253,6 @@
|
||||
</fileset>
|
||||
</copy>
|
||||
|
||||
|
||||
<!-- copy in various components of the initial source layout
|
||||
so that the redistributable can bootstrap itself. -->
|
||||
<copy todir="${dist.dir}" includeEmptyDirs="false" flatten="false">
|
||||
@ -268,19 +289,36 @@
|
||||
</fileset>
|
||||
</copy>
|
||||
|
||||
<!-- copy in auto-generated bin scripts -->
|
||||
<copy todir="${dist.dir}/bin" includeEmptyDirs="false" flatten="true">
|
||||
<fileset dir="${build.bin.dir}">
|
||||
<include name="*" />
|
||||
</fileset>
|
||||
</copy>
|
||||
|
||||
<!-- make sure the bin scripts are executable. -->
|
||||
<chmod perm="ugo+x" type="file" parallel="false">
|
||||
<fileset dir="${dist.dir}/bin" />
|
||||
</chmod>
|
||||
|
||||
<!-- make sure any scripts named *.sh are executable. -->
|
||||
<chmod perm="ugo+x" type="file" parallel="false">
|
||||
<fileset dir="${dist.dir}">
|
||||
<include name="**/*.sh" />
|
||||
</fileset>
|
||||
</chmod>
|
||||
</target>
|
||||
|
||||
<target name="tar" depends="package" description="Create release tarball">
|
||||
<tar compression="gzip" longfile="gnu" destfile="${tar.file}">
|
||||
<tarfileset dir="${build.dir}" mode="664">
|
||||
<exclude name="${artifact.name}/bin/*" />
|
||||
<exclude name="${artifact.name}/**/*.sh" />
|
||||
<include name="${artifact.name}/**" />
|
||||
</tarfileset>
|
||||
<tarfileset dir="${build.dir}" mode="755">
|
||||
<include name="${artifact.name}/bin/*" />
|
||||
<include name="${artifact.name}/**/*.sh" />
|
||||
</tarfileset>
|
||||
</tar>
|
||||
</target>
|
||||
|
2
ivy.xml
2
ivy.xml
@ -80,6 +80,8 @@
|
||||
rev="${hadoop-core.cloudera.version}" conf="clouderatest->default"/>
|
||||
|
||||
<!-- Common dependencies for Sqoop -->
|
||||
<dependency org="commons-cli" name="commons-cli"
|
||||
rev="${commons-cli.version}" conf="common->default"/>
|
||||
<dependency org="commons-logging" name="commons-logging"
|
||||
rev="${commons-logging.version}" conf="common->default"/>
|
||||
<dependency org="log4j" name="log4j" rev="${log4j.version}"
|
||||
|
@ -16,6 +16,7 @@
|
||||
# This properties file lists the versions of the various artifacts we use.
|
||||
# It drives ivy and the generation of a maven POM
|
||||
|
||||
commons-cli.version=1.2
|
||||
commons-io.version=1.4
|
||||
commons-logging.version=1.0.4
|
||||
|
||||
|
@ -20,9 +20,11 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -30,6 +32,7 @@
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.manager.ExportJobContext;
|
||||
@ -37,6 +40,8 @@
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
import org.apache.hadoop.sqoop.tool.HelpTool;
|
||||
import org.apache.hadoop.sqoop.tool.SqoopTool;
|
||||
import org.apache.hadoop.sqoop.util.ExportException;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
@ -59,99 +64,62 @@ public class Sqoop extends Configured implements Tool {
|
||||
Configuration.addDefaultResource("sqoop-site.xml");
|
||||
}
|
||||
|
||||
private SqoopTool tool;
|
||||
private SqoopOptions options;
|
||||
private ConnManager manager;
|
||||
private HiveImport hiveImport;
|
||||
private List<String> generatedJarFiles;
|
||||
private String [] childPrgmArgs;
|
||||
|
||||
public Sqoop() {
|
||||
this((Configuration) null);
|
||||
/**
|
||||
* Creates a new instance of Sqoop set to run the supplied SqoopTool
|
||||
* with the default configuration.
|
||||
* @param tool the SqoopTool to run in the main body of Sqoop.
|
||||
*/
|
||||
public Sqoop(SqoopTool tool) {
|
||||
this(tool, (Configuration) null);
|
||||
}
|
||||
|
||||
public Sqoop(Configuration conf) {
|
||||
this(conf, new SqoopOptions());
|
||||
/**
|
||||
* Creates a new instance of Sqoop set to run the supplied SqoopTool
|
||||
* with the provided configuration.
|
||||
* @param tool the SqoopTool to run in the main body of Sqoop.
|
||||
* @param conf the Configuration to use (e.g., from ToolRunner).
|
||||
*/
|
||||
public Sqoop(SqoopTool tool, Configuration conf) {
|
||||
this(tool, conf, new SqoopOptions());
|
||||
}
|
||||
|
||||
public Sqoop(Configuration conf, SqoopOptions opts) {
|
||||
generatedJarFiles = new ArrayList<String>();
|
||||
/**
|
||||
* Creates a new instance of Sqoop set to run the supplied SqoopTool
|
||||
* with the provided configuration and SqoopOptions.
|
||||
* @param tool the SqoopTool to run in the main body of Sqoop.
|
||||
* @param conf the Configuration to use (e.g., from ToolRunner).
|
||||
* @param opts the SqoopOptions which control the tool's parameters.
|
||||
*/
|
||||
public Sqoop(SqoopTool tool, Configuration conf, SqoopOptions opts) {
|
||||
if (null != conf) {
|
||||
setConf(conf);
|
||||
}
|
||||
|
||||
this.options = opts;
|
||||
this.options.setConf(getConf());
|
||||
|
||||
this.tool = tool;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the SqoopOptions used in this Sqoop instance.
|
||||
*/
|
||||
public SqoopOptions getOptions() {
|
||||
return options;
|
||||
return this.options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of jar files generated as part of this im/export process
|
||||
* @return the SqoopTool used in this Sqoop instance.
|
||||
*/
|
||||
public List<String> getGeneratedJarFiles() {
|
||||
ArrayList<String> out = new ArrayList<String>(generatedJarFiles);
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the .class and .jar files
|
||||
* @return the filename of the emitted jar file.
|
||||
* @throws IOException
|
||||
*/
|
||||
private String generateORM(String tableName) throws IOException {
|
||||
String existingJar = options.getExistingJarName();
|
||||
if (existingJar != null) {
|
||||
// The user has pre-specified a jar and class to use. Don't generate.
|
||||
LOG.info("Using existing jar: " + existingJar);
|
||||
return existingJar;
|
||||
}
|
||||
|
||||
LOG.info("Beginning code generation");
|
||||
CompilationManager compileMgr = new CompilationManager(options);
|
||||
ClassWriter classWriter = new ClassWriter(options, manager, tableName, compileMgr);
|
||||
classWriter.generate();
|
||||
compileMgr.compile();
|
||||
compileMgr.jar();
|
||||
String jarFile = compileMgr.getJarFilename();
|
||||
this.generatedJarFiles.add(jarFile);
|
||||
return jarFile;
|
||||
}
|
||||
|
||||
private void importTable(String tableName) throws IOException, ImportException {
|
||||
String jarFile = null;
|
||||
|
||||
// Generate the ORM code for the tables.
|
||||
jarFile = generateORM(tableName);
|
||||
|
||||
if (options.getAction() == SqoopOptions.ControlAction.FullImport) {
|
||||
// check if data import is to be performed
|
||||
if (!options.doCreateHiveTableOnly()) {
|
||||
// Proceed onward to do the import.
|
||||
ImportJobContext context = new ImportJobContext(tableName, jarFile, options);
|
||||
manager.importTable(context);
|
||||
}
|
||||
}
|
||||
|
||||
// If the user wants this table to be in Hive, perform that post-load.
|
||||
// If the user is in gen-only mode, this code will generate a Hive DDL
|
||||
// statement and write it to a file, but will not actually perform the
|
||||
// import.
|
||||
if (options.doHiveImport()) {
|
||||
hiveImport.importTable(tableName, options.getHiveTableName());
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTable(String tableName) throws ExportException, IOException {
|
||||
String jarFile = null;
|
||||
|
||||
// Generate the ORM code for the tables.
|
||||
jarFile = generateORM(tableName);
|
||||
|
||||
ExportJobContext context = new ExportJobContext(tableName, jarFile, options);
|
||||
manager.exportTable(context);
|
||||
public SqoopTool getTool() {
|
||||
return this.tool;
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Actual main entry-point for the program
|
||||
*/
|
||||
@ -164,139 +132,90 @@ public int run(String [] args) {
|
||||
}
|
||||
|
||||
try {
|
||||
options.parse(args);
|
||||
options.validate();
|
||||
options = tool.parseArguments(args, null, options, false);
|
||||
tool.appendArgs(this.childPrgmArgs);
|
||||
tool.validateOptions(options);
|
||||
} catch (ParseException pe) {
|
||||
// Couldn't parse arguments. Just print a usage message and exit.
|
||||
new HelpTool().run(new SqoopOptions(getConf()));
|
||||
return 1;
|
||||
} catch (SqoopOptions.InvalidOptionsException e) {
|
||||
// Error validating arguments. Print an error message and exit.
|
||||
System.err.println(e.getMessage());
|
||||
return 1; // Exit on exception here.
|
||||
}
|
||||
|
||||
// Make sure shim jar is classloaded early.
|
||||
ShimLoader.getHadoopShim(getConf());
|
||||
|
||||
// Get the connection to the database.
|
||||
try {
|
||||
manager = new ConnFactory(getConf()).getManager(options);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Got error creating database manager: " + e.toString());
|
||||
if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(e);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (options.doHiveImport()) {
|
||||
hiveImport = new HiveImport(options, manager, getConf());
|
||||
}
|
||||
|
||||
SqoopOptions.ControlAction action = options.getAction();
|
||||
if (action == SqoopOptions.ControlAction.ListTables) {
|
||||
String [] tables = manager.listTables();
|
||||
if (null == tables) {
|
||||
System.err.println("Could not retrieve tables list from server");
|
||||
LOG.error("manager.listTables() returned null");
|
||||
return 1;
|
||||
} else {
|
||||
for (String tbl : tables) {
|
||||
System.out.println(tbl);
|
||||
}
|
||||
}
|
||||
} else if (action == SqoopOptions.ControlAction.ListDatabases) {
|
||||
String [] databases = manager.listDatabases();
|
||||
if (null == databases) {
|
||||
System.err.println("Could not retrieve database list from server");
|
||||
LOG.error("manager.listDatabases() returned null");
|
||||
return 1;
|
||||
} else {
|
||||
for (String db : databases) {
|
||||
System.out.println(db);
|
||||
}
|
||||
}
|
||||
} else if (action == SqoopOptions.ControlAction.DebugExec) {
|
||||
// just run a SQL statement for debugging purposes.
|
||||
manager.execAndPrint(options.getDebugSqlCmd());
|
||||
return 0;
|
||||
} else if (action == SqoopOptions.ControlAction.Export) {
|
||||
// Export a table.
|
||||
try {
|
||||
exportTable(options.getTableName());
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running export job: " + ioe.toString());
|
||||
if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} catch (ExportException ee) {
|
||||
LOG.error("Error during export: " + ee.toString());
|
||||
if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ee);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// This is either FullImport or GenerateOnly.
|
||||
|
||||
try {
|
||||
if (options.isAllTables()) {
|
||||
String [] tables = manager.listTables();
|
||||
if (null == tables) {
|
||||
System.err.println("Could not retrieve tables list from server");
|
||||
LOG.error("manager.listTables() returned null");
|
||||
return 1;
|
||||
} else {
|
||||
for (String tableName : tables) {
|
||||
importTable(tableName);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// just import a single table the user specified.
|
||||
importTable(options.getTableName());
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running import job: " + ioe.toString());
|
||||
if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} catch (ImportException ie) {
|
||||
LOG.error("Error during import: " + ie.toString());
|
||||
if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ie);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
manager.close();
|
||||
} catch (SQLException sqlE) {
|
||||
LOG.warn("Error while closing connection: " + sqlE);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
return tool.run(options);
|
||||
}
|
||||
|
||||
public static void main(String [] args) {
|
||||
int ret;
|
||||
/**
|
||||
* SqoopTools sometimes pass arguments to a child program (e.g., mysqldump).
|
||||
* Users can specify additional args to these programs by preceeding the
|
||||
* additional arguments with a standalone '--'; but
|
||||
* ToolRunner/GenericOptionsParser will cull out this argument. We remove
|
||||
* the child-program arguments in advance, and store them to be readded
|
||||
* later.
|
||||
* @param argv the argv in to the SqoopTool
|
||||
* @return the argv with a "--" and any subsequent arguments removed.
|
||||
*/
|
||||
private String [] stashChildPrgmArgs(String [] argv) {
|
||||
for (int i = 0; i < argv.length; i++) {
|
||||
if ("--".equals(argv[i])) {
|
||||
this.childPrgmArgs = Arrays.copyOfRange(argv, i, argv.length);
|
||||
return Arrays.copyOfRange(argv, 0, i);
|
||||
}
|
||||
}
|
||||
|
||||
// Didn't find child-program arguments.
|
||||
return argv;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a Sqoop object and a set of arguments to deliver to
|
||||
* its embedded SqoopTool, run the tool, wrapping the call to
|
||||
* ToolRunner.
|
||||
* This entry-point is preferred to ToolRunner.run() because
|
||||
* it has a chance to stash child program arguments before
|
||||
* GenericOptionsParser would remove them.
|
||||
*/
|
||||
public static int runSqoop(Sqoop sqoop, String [] args) {
|
||||
try {
|
||||
Sqoop importer = new Sqoop();
|
||||
ret = ToolRunner.run(importer, args);
|
||||
String [] toolArgs = sqoop.stashChildPrgmArgs(args);
|
||||
return ToolRunner.run(sqoop, toolArgs);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Got exception running Sqoop: " + e.toString());
|
||||
e.printStackTrace();
|
||||
ret = 1;
|
||||
if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Entry-point that parses the correct SqoopTool to use from the args,
|
||||
* but does not call System.exit() as main() will.
|
||||
*/
|
||||
public static int runTool(String [] args) {
|
||||
String toolName = args[0];
|
||||
SqoopTool tool = SqoopTool.getTool(toolName);
|
||||
if (null == tool) {
|
||||
System.err.println("No such sqoop tool: " + toolName + ". See 'sqoop help'.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
Sqoop sqoop = new Sqoop(tool);
|
||||
return runSqoop(sqoop, Arrays.copyOfRange(args, 1, args.length));
|
||||
}
|
||||
|
||||
public static void main(String [] args) {
|
||||
if (args.length == 0) {
|
||||
System.err.println("Try 'sqoop help' for usage.");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
int ret = runTool(args);
|
||||
System.exit(ret);
|
||||
}
|
||||
}
|
||||
|
@ -64,16 +64,6 @@ public String toString() {
|
||||
}
|
||||
}
|
||||
|
||||
// control-flow selector based on command-line switches.
|
||||
public enum ControlAction {
|
||||
ListDatabases, // list available databases and exit.
|
||||
ListTables, // list available tables and exit.
|
||||
GenerateOnly, // generate ORM code but do not import.
|
||||
FullImport, // generate code (as needed) and import.
|
||||
DebugExec, // just execute a single sql command and print its results.
|
||||
Export // export a table from HDFS to a database.
|
||||
}
|
||||
|
||||
// selects in-HDFS destination file format
|
||||
public enum FileLayout {
|
||||
TextFile,
|
||||
@ -81,18 +71,19 @@ public enum FileLayout {
|
||||
}
|
||||
|
||||
|
||||
// TODO(aaron): Adding something here? Add a getter, a cmdline switch, and a properties file
|
||||
// entry in loadFromProperties(). Add a default value in initDefaults() if you need one.
|
||||
// Make sure you add the stub to the testdata/sqoop.properties.template file.
|
||||
// TODO(aaron): Adding something here? Add a setter and a getter.
|
||||
// Add a default value in initDefaults() if you need one.
|
||||
// If you want to load from a properties file, add an entry in the
|
||||
// loadFromProperties() method.
|
||||
// Then add command-line arguments in the appropriate tools. The
|
||||
// names of all command-line args are stored as constants in BaseSqoopTool.
|
||||
private String connectString;
|
||||
private String tableName;
|
||||
private String [] columns;
|
||||
private boolean allTables;
|
||||
private String username;
|
||||
private String password;
|
||||
private String codeOutputDir;
|
||||
private String jarOutputDir;
|
||||
private ControlAction action;
|
||||
private String hadoopHome;
|
||||
private String splitByCol;
|
||||
private String whereClause;
|
||||
@ -104,7 +95,6 @@ public enum FileLayout {
|
||||
private String tmpDir; // where temp data goes; usually /tmp
|
||||
private String hiveHome;
|
||||
private boolean hiveImport;
|
||||
private boolean createHiveTableOnly;
|
||||
private boolean overwriteHiveTable;
|
||||
private String hiveTableName;
|
||||
private String packageName; // package to prepend to auto-named classes.
|
||||
@ -212,7 +202,6 @@ private void loadFromProperties() {
|
||||
|
||||
this.direct = getBooleanProperty(props, "direct.import", this.direct);
|
||||
this.hiveImport = getBooleanProperty(props, "hive.import", this.hiveImport);
|
||||
this.createHiveTableOnly = getBooleanProperty(props, "hive.create.table.only", this.createHiveTableOnly);
|
||||
this.overwriteHiveTable = getBooleanProperty(props, "hive.overwrite.table", this.overwriteHiveTable);
|
||||
this.useCompression = getBooleanProperty(props, "compression", this.useCompression);
|
||||
this.directSplitSize = getLongProperty(props, "direct.split.size",
|
||||
@ -241,7 +230,6 @@ public String getTempDir() {
|
||||
private void initDefaults(Configuration baseConfiguration) {
|
||||
// first, set the true defaults if nothing else happens.
|
||||
// default action is to run the full pipeline.
|
||||
this.action = ControlAction.FullImport;
|
||||
this.hadoopHome = System.getenv("HADOOP_HOME");
|
||||
|
||||
// Set this with $HIVE_HOME, but -Dhive.home can override.
|
||||
@ -291,102 +279,6 @@ private void initDefaults(Configuration baseConfiguration) {
|
||||
loadFromProperties();
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow the user to enter his password on the console without printing characters.
|
||||
* @return the password as a string
|
||||
*/
|
||||
private String securePasswordEntry() {
|
||||
return new String(System.console().readPassword("Enter password: "));
|
||||
}
|
||||
|
||||
/**
|
||||
* Print usage strings for the program's arguments.
|
||||
*/
|
||||
public static void printUsage() {
|
||||
System.out.println("Usage: hadoop sqoop.jar org.apache.hadoop.sqoop.Sqoop (options)");
|
||||
System.out.println("");
|
||||
System.out.println("Database connection options:");
|
||||
System.out.println("--connect (jdbc-uri) Specify JDBC connect string");
|
||||
System.out.println("--driver (class-name) Manually specify JDBC driver class to use");
|
||||
System.out.println("--username (username) Set authentication username");
|
||||
System.out.println("--password (password) Set authentication password");
|
||||
System.out.println("-P Read password from console");
|
||||
System.out.println("--direct Use direct import fast path (mysql only)");
|
||||
System.out.println("");
|
||||
System.out.println("Import control options:");
|
||||
System.out.println("--table (tablename) Table to read");
|
||||
System.out.println("--columns (col,col,col...) Columns to export from table");
|
||||
System.out.println("--split-by (column-name) Column of the table used to split work units");
|
||||
System.out.println("--where (where clause) Where clause to use during export");
|
||||
System.out.println("--hadoop-home (dir) Override $HADOOP_HOME");
|
||||
System.out.println("--hive-home (dir) Override $HIVE_HOME");
|
||||
System.out.println("--warehouse-dir (dir) HDFS path for table destination");
|
||||
System.out.println("--as-sequencefile Imports data to SequenceFiles");
|
||||
System.out.println("--as-textfile Imports data as plain text (default)");
|
||||
System.out.println("--all-tables Import all tables in database");
|
||||
System.out.println(" (Ignores --table, --columns and --split-by)");
|
||||
System.out.println("--hive-import If set, then import the table into Hive.");
|
||||
System.out.println(" (Uses Hive's default delimiters if none are set.)");
|
||||
System.out.println("--hive-table (tablename) Sets the table name to use when importing");
|
||||
System.out.println(" to hive.");
|
||||
System.out.println("-m, --num-mappers (n) Use 'n' map tasks to import in parallel");
|
||||
System.out.println("-z, --compress Enable compression");
|
||||
System.out.println("--direct-split-size (n) Split the input stream every 'n' bytes");
|
||||
System.out.println(" when importing in direct mode.");
|
||||
System.out.println("--inline-lob-limit (n) Set the maximum size for an inline LOB");
|
||||
System.out.println("");
|
||||
System.out.println("Export options:");
|
||||
System.out.println("--export-dir (dir) Export from an HDFS path into a table");
|
||||
System.out.println(" (set with --table)");
|
||||
System.out.println("");
|
||||
System.out.println("Output line formatting options:");
|
||||
System.out.println("--fields-terminated-by (char) Sets the field separator character");
|
||||
System.out.println("--lines-terminated-by (char) Sets the end-of-line character");
|
||||
System.out.println("--optionally-enclosed-by (char) Sets a field enclosing character");
|
||||
System.out.println("--enclosed-by (char) Sets a required field enclosing char");
|
||||
System.out.println("--escaped-by (char) Sets the escape character");
|
||||
System.out.println("--mysql-delimiters Uses MySQL's default delimiter set");
|
||||
System.out.println(" fields: , lines: \\n escaped-by: \\ optionally-enclosed-by: '");
|
||||
System.out.println("");
|
||||
System.out.println("Input parsing options:");
|
||||
System.out.println("--input-fields-terminated-by (char) Sets the input field separator");
|
||||
System.out.println("--input-lines-terminated-by (char) Sets the input end-of-line char");
|
||||
System.out.println("--input-optionally-enclosed-by (char) Sets a field enclosing character");
|
||||
System.out.println("--input-enclosed-by (char) Sets a required field encloser");
|
||||
System.out.println("--input-escaped-by (char) Sets the input escape character");
|
||||
System.out.println("");
|
||||
System.out.println("Code generation options:");
|
||||
System.out.println("--outdir (dir) Output directory for generated code");
|
||||
System.out.println("--bindir (dir) Output directory for compiled objects");
|
||||
System.out.println("--generate-only Stop after code generation; do not import");
|
||||
System.out.println("--package-name (name) Put auto-generated classes in this package");
|
||||
System.out.println("--class-name (name) When generating one class, use this name.");
|
||||
System.out.println(" This overrides --package-name.");
|
||||
System.out.println("");
|
||||
System.out.println("Library loading options:");
|
||||
System.out.println("--jar-file (file) Disable code generation; use specified jar");
|
||||
System.out.println("--class-name (name) The class within the jar that represents");
|
||||
System.out.println(" the table to import/export");
|
||||
System.out.println("");
|
||||
System.out.println("Additional commands:");
|
||||
System.out.println("--list-tables List tables in database and exit");
|
||||
System.out.println("--list-databases List all databases available and exit");
|
||||
System.out.println("--debug-sql (statement) Execute 'statement' in SQL and exit");
|
||||
System.out.println("--verbose Print more information while working");
|
||||
System.out.println("");
|
||||
System.out.println("Database-specific options:");
|
||||
System.out.println("Arguments may be passed to the database manager after a lone '-':");
|
||||
System.out.println(" MySQL direct mode: arguments passed directly to mysqldump");
|
||||
System.out.println("");
|
||||
System.out.println("Generic Hadoop command-line options:");
|
||||
ToolRunner.printGenericCommandUsage(System.out);
|
||||
System.out.println("");
|
||||
System.out.println("At minimum, you must specify --connect "
|
||||
+ "and either --table or --all-tables.");
|
||||
System.out.println("Alternatively, you can specify --generate-only or one of the additional");
|
||||
System.out.println("commands.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a string containing a single character or an escape sequence representing
|
||||
* a char, return that char itself.
|
||||
@ -397,12 +289,16 @@ public static void printUsage() {
|
||||
*
|
||||
* Strings like "\0ooo" return the character specified by the octal sequence 'ooo'
|
||||
* Strings like "\0xhhh" or "\0Xhhh" return the character specified by the hex sequence 'hhh'
|
||||
*
|
||||
* If the input string contains leading or trailing spaces, these are ignored.
|
||||
*/
|
||||
static char toChar(String charish) throws InvalidOptionsException {
|
||||
if (null == charish) {
|
||||
public static char toChar(String charish) throws InvalidOptionsException {
|
||||
if (null == charish || charish.length() == 0) {
|
||||
throw new InvalidOptionsException("Character argument expected."
|
||||
+ "\nTry --help for usage instructions.");
|
||||
} else if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
|
||||
}
|
||||
|
||||
if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
|
||||
if (charish.length() == 3) {
|
||||
throw new InvalidOptionsException("Base-16 value expected for character argument."
|
||||
+ "\nTry --help for usage instructions.");
|
||||
@ -452,9 +348,6 @@ static char toChar(String charish) throws InvalidOptionsException {
|
||||
+ "\nTry --help for usage instructions.");
|
||||
}
|
||||
}
|
||||
} else if (charish.length() == 0) {
|
||||
throw new InvalidOptionsException("Character argument expected."
|
||||
+ "\nTry --help for usage instructions.");
|
||||
} else {
|
||||
// it's a normal character.
|
||||
if (charish.length() > 1) {
|
||||
@ -466,252 +359,6 @@ static char toChar(String charish) throws InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read args from the command-line into member fields.
|
||||
* @throws Exception if there's a problem parsing arguments.
|
||||
*/
|
||||
public void parse(String [] args) throws InvalidOptionsException {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Parsing sqoop arguments:");
|
||||
for (String arg : args) {
|
||||
LOG.debug(" " + arg);
|
||||
}
|
||||
}
|
||||
|
||||
int i = 0;
|
||||
try {
|
||||
for (i = 0; i < args.length; i++) {
|
||||
if (args[i].equals("--connect")) {
|
||||
this.connectString = args[++i];
|
||||
} else if (args[i].equals("--driver")) {
|
||||
this.driverClassName = args[++i];
|
||||
} else if (args[i].equals("--table")) {
|
||||
this.tableName = args[++i];
|
||||
} else if (args[i].equals("--columns")) {
|
||||
String columnString = args[++i];
|
||||
this.columns = columnString.split(",");
|
||||
} else if (args[i].equals("--split-by")) {
|
||||
this.splitByCol = args[++i];
|
||||
} else if (args[i].equals("--where")) {
|
||||
this.whereClause = args[++i];
|
||||
} else if (args[i].equals("--list-tables")) {
|
||||
this.action = ControlAction.ListTables;
|
||||
} else if (args[i].equals("--all-tables")) {
|
||||
this.allTables = true;
|
||||
} else if (args[i].equals("--export-dir")) {
|
||||
this.exportDir = args[++i];
|
||||
this.action = ControlAction.Export;
|
||||
} else if (args[i].equals("--local")) {
|
||||
// TODO(aaron): Remove this after suitable deprecation time period.
|
||||
LOG.warn("--local is deprecated; use --direct instead.");
|
||||
this.direct = true;
|
||||
} else if (args[i].equals("--direct")) {
|
||||
this.direct = true;
|
||||
} else if (args[i].equals("--username")) {
|
||||
this.username = args[++i];
|
||||
if (null == this.password) {
|
||||
// Set password to empty if the username is set first,
|
||||
// to ensure that they're either both null or neither.
|
||||
this.password = "";
|
||||
}
|
||||
} else if (args[i].equals("--password")) {
|
||||
LOG.warn("Setting your password on the command-line is insecure. "
|
||||
+ "Consider using -P instead.");
|
||||
this.password = args[++i];
|
||||
} else if (args[i].equals("-P")) {
|
||||
this.password = securePasswordEntry();
|
||||
} else if (args[i].equals("--hadoop-home")) {
|
||||
this.hadoopHome = args[++i];
|
||||
} else if (args[i].equals("--hive-home")) {
|
||||
this.hiveHome = args[++i];
|
||||
} else if (args[i].equals("--hive-import")) {
|
||||
this.hiveImport = true;
|
||||
} else if (args[i].equals("--hive-create-only")) {
|
||||
this.createHiveTableOnly = true;
|
||||
} else if (args[i].equals("--hive-overwrite")) {
|
||||
this.overwriteHiveTable = true;
|
||||
} else if (args[i].equals("--hive-table")) {
|
||||
this.hiveTableName = args[++i];
|
||||
} else if (args[i].equals("--num-mappers") || args[i].equals("-m")) {
|
||||
String numMappersStr = args[++i];
|
||||
this.numMappers = Integer.valueOf(numMappersStr);
|
||||
} else if (args[i].equals("--fields-terminated-by")) {
|
||||
this.outputFieldDelim = SqoopOptions.toChar(args[++i]);
|
||||
this.areDelimsManuallySet = true;
|
||||
} else if (args[i].equals("--lines-terminated-by")) {
|
||||
this.outputRecordDelim = SqoopOptions.toChar(args[++i]);
|
||||
this.areDelimsManuallySet = true;
|
||||
} else if (args[i].equals("--optionally-enclosed-by")) {
|
||||
this.outputEnclosedBy = SqoopOptions.toChar(args[++i]);
|
||||
this.outputMustBeEnclosed = false;
|
||||
this.areDelimsManuallySet = true;
|
||||
} else if (args[i].equals("--enclosed-by")) {
|
||||
this.outputEnclosedBy = SqoopOptions.toChar(args[++i]);
|
||||
this.outputMustBeEnclosed = true;
|
||||
this.areDelimsManuallySet = true;
|
||||
} else if (args[i].equals("--escaped-by")) {
|
||||
this.outputEscapedBy = SqoopOptions.toChar(args[++i]);
|
||||
this.areDelimsManuallySet = true;
|
||||
} else if (args[i].equals("--mysql-delimiters")) {
|
||||
this.outputFieldDelim = ',';
|
||||
this.outputRecordDelim = '\n';
|
||||
this.outputEnclosedBy = '\'';
|
||||
this.outputEscapedBy = '\\';
|
||||
this.outputMustBeEnclosed = false;
|
||||
this.areDelimsManuallySet = true;
|
||||
} else if (args[i].equals("--input-fields-terminated-by")) {
|
||||
this.inputFieldDelim = SqoopOptions.toChar(args[++i]);
|
||||
} else if (args[i].equals("--input-lines-terminated-by")) {
|
||||
this.inputRecordDelim = SqoopOptions.toChar(args[++i]);
|
||||
} else if (args[i].equals("--input-optionally-enclosed-by")) {
|
||||
this.inputEnclosedBy = SqoopOptions.toChar(args[++i]);
|
||||
this.inputMustBeEnclosed = false;
|
||||
} else if (args[i].equals("--input-enclosed-by")) {
|
||||
this.inputEnclosedBy = SqoopOptions.toChar(args[++i]);
|
||||
this.inputMustBeEnclosed = true;
|
||||
} else if (args[i].equals("--input-escaped-by")) {
|
||||
this.inputEscapedBy = SqoopOptions.toChar(args[++i]);
|
||||
} else if (args[i].equals("--outdir")) {
|
||||
this.codeOutputDir = args[++i];
|
||||
} else if (args[i].equals("--as-sequencefile")) {
|
||||
this.layout = FileLayout.SequenceFile;
|
||||
} else if (args[i].equals("--as-textfile")) {
|
||||
this.layout = FileLayout.TextFile;
|
||||
} else if (args[i].equals("--bindir")) {
|
||||
this.jarOutputDir = args[++i];
|
||||
} else if (args[i].equals("--warehouse-dir")) {
|
||||
this.warehouseDir = args[++i];
|
||||
} else if (args[i].equals("--package-name")) {
|
||||
this.packageName = args[++i];
|
||||
} else if (args[i].equals("--class-name")) {
|
||||
this.className = args[++i];
|
||||
} else if (args[i].equals("-z") || args[i].equals("--compress")) {
|
||||
this.useCompression = true;
|
||||
} else if (args[i].equals("--direct-split-size")) {
|
||||
this.directSplitSize = Long.parseLong(args[++i]);
|
||||
} else if (args[i].equals("--inline-lob-limit")) {
|
||||
this.maxInlineLobSize = Long.parseLong(args[++i]);
|
||||
} else if (args[i].equals("--jar-file")) {
|
||||
this.existingJarFile = args[++i];
|
||||
} else if (args[i].equals("--list-databases")) {
|
||||
this.action = ControlAction.ListDatabases;
|
||||
} else if (args[i].equals("--generate-only")) {
|
||||
this.action = ControlAction.GenerateOnly;
|
||||
} else if (args[i].equals("--debug-sql")) {
|
||||
this.action = ControlAction.DebugExec;
|
||||
// read the entire remainder of the commandline into the debug sql statement.
|
||||
if (null == this.debugSqlCmd) {
|
||||
this.debugSqlCmd = "";
|
||||
}
|
||||
for (i++; i < args.length; i++) {
|
||||
this.debugSqlCmd = this.debugSqlCmd + args[i] + " ";
|
||||
}
|
||||
} else if (args[i].equals("--verbose")) {
|
||||
// Immediately switch into DEBUG logging.
|
||||
Category sqoopLogger =
|
||||
Logger.getLogger(SqoopOptions.class.getName()).getParent();
|
||||
sqoopLogger.setLevel(Level.DEBUG);
|
||||
|
||||
} else if (args[i].equals("--help")) {
|
||||
printUsage();
|
||||
throw new InvalidOptionsException("");
|
||||
} else if (args[i].equals("-")) {
|
||||
// Everything after a '--' goes into extraArgs.
|
||||
ArrayList<String> extra = new ArrayList<String>();
|
||||
for (i++; i < args.length; i++) {
|
||||
extra.add(args[i]);
|
||||
}
|
||||
this.extraArgs = extra.toArray(new String[0]);
|
||||
} else {
|
||||
throw new InvalidOptionsException("Invalid argument: " + args[i] + ".\n"
|
||||
+ "Try --help for usage.");
|
||||
}
|
||||
}
|
||||
} catch (ArrayIndexOutOfBoundsException oob) {
|
||||
throw new InvalidOptionsException("Error: " + args[--i] + " expected argument.\n"
|
||||
+ "Try --help for usage.");
|
||||
} catch (NumberFormatException nfe) {
|
||||
throw new InvalidOptionsException("Error: " + args[--i] + " expected numeric argument.\n"
|
||||
+ "Try --help for usage.");
|
||||
}
|
||||
}
|
||||
|
||||
private static final String HELP_STR = "\nTry --help for usage instructions.";
|
||||
|
||||
/**
|
||||
* Validates options and ensures that any required options are
|
||||
* present and that any mutually-exclusive options are not selected.
|
||||
* @throws Exception if there's a problem.
|
||||
*/
|
||||
public void validate() throws InvalidOptionsException {
|
||||
if (this.allTables && this.columns != null) {
|
||||
// If we're reading all tables in a database, can't filter column names.
|
||||
throw new InvalidOptionsException("--columns and --all-tables are incompatible options."
|
||||
+ HELP_STR);
|
||||
} else if (this.allTables && this.splitByCol != null) {
|
||||
// If we're reading all tables in a database, can't set pkey
|
||||
throw new InvalidOptionsException("--split-by and --all-tables are incompatible options."
|
||||
+ HELP_STR);
|
||||
} else if (this.allTables && this.className != null) {
|
||||
// If we're reading all tables, can't set individual class name
|
||||
throw new InvalidOptionsException("--class-name and --all-tables are incompatible options."
|
||||
+ HELP_STR);
|
||||
} else if (this.allTables && this.hiveTableName != null) {
|
||||
// If we're reading all tables, can't set hive target table name
|
||||
throw new InvalidOptionsException(
|
||||
"--hive-table and --all-tables are incompatible options."
|
||||
+ HELP_STR);
|
||||
} else if (this.hiveTableName != null && !this.hiveImport) {
|
||||
throw new InvalidOptionsException(
|
||||
"--hive-table is invalid without --hive-import"
|
||||
+ HELP_STR);
|
||||
} else if (this.connectString == null) {
|
||||
throw new InvalidOptionsException("Error: Required argument --connect is missing."
|
||||
+ HELP_STR);
|
||||
} else if (this.className != null && this.packageName != null) {
|
||||
throw new InvalidOptionsException(
|
||||
"--class-name overrides --package-name. You cannot use both." + HELP_STR);
|
||||
} else if (this.action == ControlAction.FullImport && !this.allTables
|
||||
&& this.tableName == null) {
|
||||
throw new InvalidOptionsException(
|
||||
"One of --table or --all-tables is required for import." + HELP_STR);
|
||||
} else if (this.action == ControlAction.Export && this.allTables) {
|
||||
throw new InvalidOptionsException("You cannot export with --all-tables." + HELP_STR);
|
||||
} else if (this.action == ControlAction.Export && this.tableName == null) {
|
||||
throw new InvalidOptionsException("Export requires a --table argument." + HELP_STR);
|
||||
} else if (this.existingJarFile != null && this.className == null) {
|
||||
throw new InvalidOptionsException("Jar specified with --jar-file, but no "
|
||||
+ "class specified with --class-name." + HELP_STR);
|
||||
} else if (this.existingJarFile != null && this.action == ControlAction.GenerateOnly) {
|
||||
throw new InvalidOptionsException("Cannot generate code using existing jar." + HELP_STR);
|
||||
}
|
||||
|
||||
if (this.hiveImport) {
|
||||
if (!areDelimsManuallySet) {
|
||||
// user hasn't manually specified delimiters, and wants to import straight to Hive.
|
||||
// Use Hive-style delimiters.
|
||||
LOG.info("Using Hive-specific delimiters for output. You can override");
|
||||
LOG.info("delimiters with --fields-terminated-by, etc.");
|
||||
this.outputFieldDelim = (char)0x1; // ^A
|
||||
this.outputRecordDelim = '\n';
|
||||
this.outputEnclosedBy = '\000'; // no enclosing in Hive.
|
||||
this.outputEscapedBy = '\000'; // no escaping in Hive
|
||||
this.outputMustBeEnclosed = false;
|
||||
}
|
||||
|
||||
if (this.getOutputEscapedBy() != '\000') {
|
||||
LOG.warn("Hive does not support escape characters in fields;");
|
||||
LOG.warn("parse errors in Hive may result from using --escaped-by.");
|
||||
}
|
||||
|
||||
if (this.getOutputEnclosedBy() != '\000') {
|
||||
LOG.warn("Hive does not support quoted strings; parse errors");
|
||||
LOG.warn("in Hive may result from using --enclosed-by.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** get the temporary directory; guaranteed to end in File.separator
|
||||
* (e.g., '/')
|
||||
*/
|
||||
@ -719,6 +366,10 @@ public String getTmpDir() {
|
||||
return tmpDir;
|
||||
}
|
||||
|
||||
public void setTmpDir(String tmp) {
|
||||
this.tmpDir = tmpDir;
|
||||
}
|
||||
|
||||
public String getConnectString() {
|
||||
return connectString;
|
||||
}
|
||||
@ -739,10 +390,18 @@ public String getExportDir() {
|
||||
return exportDir;
|
||||
}
|
||||
|
||||
public void setExportDir(String exportDir) {
|
||||
this.exportDir = exportDir;
|
||||
}
|
||||
|
||||
public String getExistingJarName() {
|
||||
return existingJarFile;
|
||||
}
|
||||
|
||||
public void setExistingJarName(String jarFile) {
|
||||
this.existingJarFile = jarFile;
|
||||
}
|
||||
|
||||
public String[] getColumns() {
|
||||
if (null == columns) {
|
||||
return null;
|
||||
@ -751,34 +410,70 @@ public String[] getColumns() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setColumns(String [] cols) {
|
||||
if (null == cols) {
|
||||
this.columns = null;
|
||||
} else {
|
||||
this.columns = Arrays.copyOf(cols, cols.length);
|
||||
}
|
||||
}
|
||||
|
||||
public String getSplitByCol() {
|
||||
return splitByCol;
|
||||
}
|
||||
|
||||
public void setSplitByCol(String splitBy) {
|
||||
this.splitByCol = splitBy;
|
||||
}
|
||||
|
||||
public String getWhereClause() {
|
||||
return whereClause;
|
||||
}
|
||||
|
||||
public ControlAction getAction() {
|
||||
return action;
|
||||
}
|
||||
|
||||
public boolean isAllTables() {
|
||||
return allTables;
|
||||
public void setWhereClause(String where) {
|
||||
this.whereClause = where;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public void setUsername(String user) {
|
||||
this.username = user;
|
||||
}
|
||||
|
||||
public String getPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow the user to enter his password on the console without printing characters.
|
||||
* @return the password as a string
|
||||
*/
|
||||
private String securePasswordEntry() {
|
||||
return new String(System.console().readPassword("Enter password: "));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the password in this SqoopOptions from the console without printing
|
||||
* characters.
|
||||
*/
|
||||
public void setPasswordFromConsole() {
|
||||
this.password = securePasswordEntry();
|
||||
}
|
||||
|
||||
public void setPassword(String pass) {
|
||||
this.password = pass;
|
||||
}
|
||||
|
||||
public boolean isDirect() {
|
||||
return direct;
|
||||
}
|
||||
|
||||
public void setDirectMode(boolean isDirect) {
|
||||
this.direct = isDirect;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of map tasks to use for import
|
||||
*/
|
||||
@ -786,6 +481,10 @@ public int getNumMappers() {
|
||||
return this.numMappers;
|
||||
}
|
||||
|
||||
public void setNumMappers(int numMappers) {
|
||||
this.numMappers = numMappers;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the user-specified absolute class name for the table
|
||||
*/
|
||||
@ -793,6 +492,10 @@ public String getClassName() {
|
||||
return className;
|
||||
}
|
||||
|
||||
public void setClassName(String className) {
|
||||
this.className = className;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the user-specified package to prepend to table names via --package-name.
|
||||
*/
|
||||
@ -800,20 +503,25 @@ public String getPackageName() {
|
||||
return packageName;
|
||||
}
|
||||
|
||||
public void setPackageName(String packageName) {
|
||||
this.packageName = packageName;
|
||||
}
|
||||
|
||||
public String getHiveHome() {
|
||||
return hiveHome;
|
||||
}
|
||||
|
||||
public void setHiveHome(String hiveHome) {
|
||||
this.hiveHome = hiveHome;
|
||||
}
|
||||
|
||||
/** @return true if we should import the table into Hive */
|
||||
public boolean doHiveImport() {
|
||||
return hiveImport;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the user-specified option to create tables in hive with no loading
|
||||
*/
|
||||
public boolean doCreateHiveTableOnly() {
|
||||
return createHiveTableOnly;
|
||||
public void setHiveImport(boolean hiveImport) {
|
||||
this.hiveImport = hiveImport;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -823,6 +531,10 @@ public boolean doOverwriteHiveTable() {
|
||||
return overwriteHiveTable;
|
||||
}
|
||||
|
||||
public void setOverwriteHiveTable(boolean overwrite) {
|
||||
this.overwriteHiveTable = overwrite;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return location where .java files go; guaranteed to end with '/'
|
||||
*/
|
||||
@ -834,6 +546,10 @@ public String getCodeOutputDir() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setCodeOutputDir(String outputDir) {
|
||||
this.codeOutputDir = outputDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return location where .jar and .class files go; guaranteed to end with '/'
|
||||
*/
|
||||
@ -845,6 +561,10 @@ public String getJarOutputDir() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setJarOutputDir(String outDir) {
|
||||
this.jarOutputDir = outDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the value of $HADOOP_HOME
|
||||
* @return $HADOOP_HOME, or null if it's not set.
|
||||
@ -853,6 +573,10 @@ public String getHadoopHome() {
|
||||
return hadoopHome;
|
||||
}
|
||||
|
||||
public void setHadoopHome(String hadoopHome) {
|
||||
this.hadoopHome = hadoopHome;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a sql command to execute and exit with.
|
||||
*/
|
||||
@ -860,6 +584,10 @@ public String getDebugSqlCmd() {
|
||||
return debugSqlCmd;
|
||||
}
|
||||
|
||||
public void setDebugSqlCmd(String sqlStatement) {
|
||||
this.debugSqlCmd = sqlStatement;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The JDBC driver class name specified with --driver
|
||||
*/
|
||||
@ -867,6 +595,10 @@ public String getDriverClassName() {
|
||||
return driverClassName;
|
||||
}
|
||||
|
||||
public void setDriverClassName(String driverClass) {
|
||||
this.driverClassName = driverClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the base destination path for table uploads.
|
||||
*/
|
||||
@ -874,6 +606,10 @@ public String getWarehouseDir() {
|
||||
return warehouseDir;
|
||||
}
|
||||
|
||||
public void setWarehouseDir(String warehouse) {
|
||||
this.warehouseDir = warehouse;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the destination file format
|
||||
*/
|
||||
@ -881,12 +617,8 @@ public FileLayout getFileLayout() {
|
||||
return this.layout;
|
||||
}
|
||||
|
||||
public void setUsername(String name) {
|
||||
this.username = name;
|
||||
}
|
||||
|
||||
public void setPassword(String pass) {
|
||||
this.password = pass;
|
||||
public void setFileLayout(FileLayout layout) {
|
||||
this.layout = layout;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -901,6 +633,10 @@ public char getInputFieldDelim() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setInputFieldsTerminatedBy(char c) {
|
||||
this.inputFieldDelim = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the record delimiter to use when parsing lines. Defaults to the record delim
|
||||
* to use when printing lines.
|
||||
@ -913,6 +649,10 @@ public char getInputRecordDelim() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setInputLinesTerminatedBy(char c) {
|
||||
this.inputRecordDelim = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the character that may enclose fields when parsing lines. Defaults to the
|
||||
* enclosing-char to use when printing lines.
|
||||
@ -925,6 +665,10 @@ public char getInputEnclosedBy() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setInputEnclosedBy(char c) {
|
||||
this.inputEnclosedBy = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the escape character to use when parsing lines. Defaults to the escape
|
||||
* character used when printing lines.
|
||||
@ -937,6 +681,10 @@ public char getInputEscapedBy() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setInputEscapedBy(char c) {
|
||||
this.inputEscapedBy = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if fields must be enclosed by the --enclosed-by character when parsing.
|
||||
* Defaults to false. Set true when --input-enclosed-by is used.
|
||||
@ -949,6 +697,10 @@ public boolean isInputEncloseRequired() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setInputEncloseRequired(boolean required) {
|
||||
this.inputMustBeEnclosed = required;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the character to print between fields when importing them to text.
|
||||
*/
|
||||
@ -956,6 +708,10 @@ public char getOutputFieldDelim() {
|
||||
return this.outputFieldDelim;
|
||||
}
|
||||
|
||||
public void setFieldsTerminatedBy(char c) {
|
||||
this.outputFieldDelim = c;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the character to print between records when importing them to text.
|
||||
@ -964,6 +720,10 @@ public char getOutputRecordDelim() {
|
||||
return this.outputRecordDelim;
|
||||
}
|
||||
|
||||
public void setLinesTerminatedBy(char c) {
|
||||
this.outputRecordDelim = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a character which may enclose the contents of fields when imported to text.
|
||||
*/
|
||||
@ -971,6 +731,10 @@ public char getOutputEnclosedBy() {
|
||||
return this.outputEnclosedBy;
|
||||
}
|
||||
|
||||
public void setEnclosedBy(char c) {
|
||||
this.outputEnclosedBy = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a character which signifies an escape sequence when importing to text.
|
||||
*/
|
||||
@ -978,6 +742,10 @@ public char getOutputEscapedBy() {
|
||||
return this.outputEscapedBy;
|
||||
}
|
||||
|
||||
public void setEscapedBy(char c) {
|
||||
this.outputEscapedBy = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if fields imported to text must be enclosed by the EnclosedBy char.
|
||||
* default is false; set to true if --enclosed-by is used instead of --optionally-enclosed-by.
|
||||
@ -986,6 +754,10 @@ public boolean isOutputEncloseRequired() {
|
||||
return this.outputMustBeEnclosed;
|
||||
}
|
||||
|
||||
public void setOutputEncloseRequired(boolean required) {
|
||||
this.outputMustBeEnclosed = required;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the user wants imported results to be compressed.
|
||||
*/
|
||||
@ -993,10 +765,14 @@ public boolean shouldUseCompression() {
|
||||
return this.useCompression;
|
||||
}
|
||||
|
||||
public void setUseCompression(boolean useCompression) {
|
||||
this.useCompression = useCompression;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the name of the destination table when importing to Hive
|
||||
*/
|
||||
public String getHiveTableName( ) {
|
||||
public String getHiveTableName() {
|
||||
if (null != this.hiveTableName) {
|
||||
return this.hiveTableName;
|
||||
} else {
|
||||
@ -1004,6 +780,10 @@ public String getHiveTableName( ) {
|
||||
}
|
||||
}
|
||||
|
||||
public void setHiveTableName(String tableName) {
|
||||
this.hiveTableName = tableName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the file size to split by when using --direct mode.
|
||||
*/
|
||||
@ -1011,6 +791,10 @@ public long getDirectSplitSize() {
|
||||
return this.directSplitSize;
|
||||
}
|
||||
|
||||
public void setDirectSplitSize(long splitSize) {
|
||||
this.directSplitSize = splitSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the max size of a LOB before we spill to a separate file.
|
||||
*/
|
||||
@ -1018,6 +802,24 @@ public long getInlineLobLimit() {
|
||||
return this.maxInlineLobSize;
|
||||
}
|
||||
|
||||
public void setInlineLobLimit(long limit) {
|
||||
this.maxInlineLobSize = limit;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the delimiters have been explicitly set by the user.
|
||||
*/
|
||||
public boolean explicitDelims() {
|
||||
return areDelimsManuallySet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flag the delimiter settings as explicit user settings, or implicit.
|
||||
*/
|
||||
public void setExplicitDelims(boolean explicit) {
|
||||
this.areDelimsManuallySet = explicit;
|
||||
}
|
||||
|
||||
public Configuration getConf() {
|
||||
return conf;
|
||||
}
|
||||
@ -1040,4 +842,16 @@ public void setConf(Configuration config) {
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
public void setExtraArgs(String [] args) {
|
||||
if (null == args) {
|
||||
this.extraArgs = null;
|
||||
return;
|
||||
}
|
||||
|
||||
this.extraArgs = new String[args.length];
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
this.extraArgs[i] = args[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
49
src/java/org/apache/hadoop/sqoop/cli/RelatedOptions.java
Normal file
49
src/java/org/apache/hadoop/sqoop/cli/RelatedOptions.java
Normal file
@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.cli;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* Class that extends a set of options to provide a title for a
|
||||
* related set of options.
|
||||
*/
|
||||
public class RelatedOptions extends Options {
|
||||
|
||||
private String relatedTitle;
|
||||
|
||||
public RelatedOptions() {
|
||||
this("");
|
||||
}
|
||||
|
||||
public RelatedOptions(final String title) {
|
||||
super();
|
||||
this.relatedTitle = title;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return relatedTitle;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return relatedTitle + "\n" + super.toString();
|
||||
}
|
||||
}
|
||||
|
123
src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java
Normal file
123
src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java
Normal file
@ -0,0 +1,123 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.cli;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.cli.GnuParser;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.MissingArgumentException;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Options parser that follows Sqoop's specific options-parsing needs.
|
||||
* <ul>
|
||||
* <li>Option values may contain the '"' character as the first
|
||||
* or last character in the value.</li>
|
||||
* <li>The argument '--' must be preserved in the returned (unparsed)
|
||||
* argument list.</li>
|
||||
* </ul>
|
||||
*/
|
||||
public class SqoopParser extends GnuParser {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(SqoopParser.class.getName());
|
||||
|
||||
// We need a handle to Option.addValueForProcessing(String).
|
||||
// Since Hadoop will load this in a different classloader than
|
||||
// this Sqoop class, we cannot see their package-specific methods.
|
||||
// So we just call it by reflection. As long as we're at it, this
|
||||
// allows us to also put SqoopParser in its own package.
|
||||
static java.lang.reflect.Method addValForProcessing;
|
||||
|
||||
static {
|
||||
try {
|
||||
addValForProcessing = Option.class.getDeclaredMethod(
|
||||
"addValueForProcessing", String.class);
|
||||
addValForProcessing.setAccessible(true);
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
LOG.error("Could not load required method of Parser: "
|
||||
+ StringUtils.stringifyException(nsme));
|
||||
addValForProcessing = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Processes arguments to options but only strips matched quotes.
|
||||
*/
|
||||
public void processArgs(Option opt, ListIterator iter)
|
||||
throws ParseException {
|
||||
// Loop until an option is found.
|
||||
while (iter.hasNext()) {
|
||||
String str = (String) iter.next();
|
||||
|
||||
if (getOptions().hasOption(str) && str.startsWith("-")) {
|
||||
// found an Option, not an argument.
|
||||
iter.previous();
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, this is a value.
|
||||
try {
|
||||
// Note that we only strip matched quotes here.
|
||||
addValForProcessing.invoke(opt, stripMatchedQuotes(str));
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new RuntimeException(iae);
|
||||
} catch (java.lang.reflect.InvocationTargetException ite) {
|
||||
// Any runtime exception thrown within addValForProcessing()
|
||||
// will be wrapped in an InvocationTargetException.
|
||||
iter.previous();
|
||||
break;
|
||||
} catch (RuntimeException re) {
|
||||
iter.previous();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (opt.getValues() == null && !opt.hasOptionalArg()) {
|
||||
throw new MissingArgumentException(opt);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Util.stripLeadingAndTrailingQuotes() will strip a '"'
|
||||
* character from either or both sides of a string. We only
|
||||
* strip the matched pair.
|
||||
*/
|
||||
private String stripMatchedQuotes(String in) {
|
||||
if (null == in || "\"".equals(in)) {
|
||||
return in; // single quote stays as-is.
|
||||
} else if(in.startsWith("\"") && in.endsWith("\"")) {
|
||||
// Strip this pair of matched quotes.
|
||||
return in.substring(1, in.length() - 1);
|
||||
} else {
|
||||
// return as-is.
|
||||
return in;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
151
src/java/org/apache/hadoop/sqoop/cli/ToolOptions.java
Normal file
151
src/java/org/apache/hadoop/sqoop/cli/ToolOptions.java
Normal file
@ -0,0 +1,151 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.cli;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* Class that holds several sets of related options, providing a container
|
||||
* for all the options associated with a single tool.
|
||||
* The order in which sets of related options are added to this tool is
|
||||
* preserved in printing and iteration.
|
||||
*/
|
||||
public class ToolOptions implements Iterable<RelatedOptions> {
|
||||
|
||||
private List<RelatedOptions> optGroups;
|
||||
|
||||
public ToolOptions() {
|
||||
this.optGroups = new ArrayList<RelatedOptions>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a block of related options to the options for this tool.
|
||||
* @param opts the set of RelatedOptions to add.
|
||||
*/
|
||||
public void addOptions(RelatedOptions opts) {
|
||||
optGroups.add(opts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a block of related options to the options for this tool,
|
||||
* if a block has not already been added with the same title.
|
||||
* @param opts the set of RelatedOptions to add.
|
||||
*/
|
||||
public void addUniqueOptions(RelatedOptions opts) {
|
||||
if (!containsGroup(opts.getTitle())) {
|
||||
optGroups.add(opts);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reports whether this collection of RelatedOptions contains
|
||||
* a RelatedOptions with the specified title.
|
||||
* @param title the group title to search for
|
||||
* @return true if a RelatedOptions with this group title is
|
||||
* in the collection.
|
||||
*/
|
||||
public boolean containsGroup(String title) {
|
||||
for (RelatedOptions related : this) {
|
||||
if (related.getTitle().equals(title)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide an iterator over all sets of RelatedOptions.
|
||||
* @return an iterator returning each RelatedOptions element.
|
||||
*/
|
||||
public Iterator<RelatedOptions> iterator() {
|
||||
return optGroups.iterator();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Flatten the different sets of related options into a single collection
|
||||
* of options.
|
||||
* @return all options in the ToolOptions as a single set
|
||||
*/
|
||||
public Options merge() {
|
||||
Options mergedOpts = new Options();
|
||||
int totalOpts = 0;
|
||||
for (RelatedOptions relatedOpts : this) {
|
||||
for (Object optObj : relatedOpts.getOptions()) {
|
||||
Option opt = (Option) optObj;
|
||||
mergedOpts.addOption(opt);
|
||||
totalOpts++;
|
||||
}
|
||||
}
|
||||
|
||||
return mergedOpts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help to the console using a default help formatter.
|
||||
*/
|
||||
public void printHelp() {
|
||||
printHelp(new HelpFormatter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help to the console using the specified help formatter
|
||||
* @param formatter the HelpFormatter to use.
|
||||
*/
|
||||
public void printHelp(HelpFormatter formatter) {
|
||||
printHelp(formatter, new PrintWriter(System.out, true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help to the specified PrintWriter, using the specified
|
||||
* help formatter.
|
||||
* @param formatter the HelpFormatter to use.
|
||||
* @param pw the PrintWriter to emit to.
|
||||
*/
|
||||
public void printHelp(HelpFormatter formatter, PrintWriter pw) {
|
||||
boolean first = true;
|
||||
for (RelatedOptions optGroup : optGroups) {
|
||||
if (!first) {
|
||||
pw.println("");
|
||||
}
|
||||
pw.println(optGroup.getTitle() + ":");
|
||||
formatter.printOptions(pw, formatter.getWidth(), optGroup, 0, 4);
|
||||
first = false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringWriter sw = new StringWriter();
|
||||
printHelp(new HelpFormatter(), new PrintWriter(sw));
|
||||
sw.flush();
|
||||
return sw.getBuffer().toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -49,11 +49,14 @@ public class HiveImport {
|
||||
private SqoopOptions options;
|
||||
private ConnManager connManager;
|
||||
private Configuration configuration;
|
||||
private boolean generateOnly;
|
||||
|
||||
public HiveImport(final SqoopOptions opts, final ConnManager connMgr, final Configuration conf) {
|
||||
public HiveImport(final SqoopOptions opts, final ConnManager connMgr,
|
||||
final Configuration conf, final boolean generateOnly) {
|
||||
this.options = opts;
|
||||
this.connManager = connMgr;
|
||||
this.configuration = conf;
|
||||
this.generateOnly = generateOnly;
|
||||
}
|
||||
|
||||
|
||||
@ -110,7 +113,7 @@ private void removeTempLogs(String tableName) throws IOException {
|
||||
* do any side-effecting actions in Hive.
|
||||
*/
|
||||
private boolean isGenerateOnly() {
|
||||
return options.getAction() == SqoopOptions.ControlAction.GenerateOnly;
|
||||
return generateOnly;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -134,9 +137,11 @@ private File getScriptFile(String outputTableName) throws IOException {
|
||||
*
|
||||
* @param inputTableName the name of the table as loaded into HDFS
|
||||
* @param outputTableName the name of the table to create in Hive.
|
||||
* @param createOnly if true, run the CREATE TABLE statement but not
|
||||
* LOAD DATA.
|
||||
*/
|
||||
public void importTable(String inputTableName, String outputTableName)
|
||||
throws IOException {
|
||||
public void importTable(String inputTableName, String outputTableName,
|
||||
boolean createOnly) throws IOException {
|
||||
|
||||
if (!isGenerateOnly()) {
|
||||
removeTempLogs(inputTableName);
|
||||
@ -177,7 +182,7 @@ public void importTable(String inputTableName, String outputTableName)
|
||||
FileOutputStream fos = new FileOutputStream(scriptFile);
|
||||
w = new BufferedWriter(new OutputStreamWriter(fos));
|
||||
w.write(createTableStr, 0, createTableStr.length());
|
||||
if (!options.doCreateHiveTableOnly()) {
|
||||
if (!createOnly) {
|
||||
w.write(loadDataStmtStr, 0, loadDataStmtStr.length());
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.apache.hadoop.sqoop.manager;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.sql.PreparedStatement;
|
||||
@ -31,6 +32,7 @@
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
@ -191,7 +193,8 @@ private void checkDateTimeBehavior(ImportJobContext context) {
|
||||
* @param stmt The SQL statement to execute
|
||||
* @return A ResultSet encapsulating the results or null on error
|
||||
*/
|
||||
protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
||||
protected ResultSet execute(String stmt, Object... args)
|
||||
throws SQLException {
|
||||
// Free any previous resources.
|
||||
release();
|
||||
|
||||
@ -210,6 +213,33 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
||||
return statement.executeQuery();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execAndPrint(String s) {
|
||||
// Override default execAndPrint() with a special version that forces
|
||||
// use of fully-buffered ResultSets (MySQLManager uses streaming ResultSets
|
||||
// in the default execute() method; but the execAndPrint() method needs to
|
||||
// issue overlapped queries for metadata.)
|
||||
|
||||
ResultSet results = null;
|
||||
try {
|
||||
// Use default execute() statement which does not issue the
|
||||
// MySQL-specific setFetchSize() command.
|
||||
results = super.execute(s);
|
||||
} catch (SQLException sqlE) {
|
||||
LOG.error("Error executing statement: "
|
||||
+ StringUtils.stringifyException(sqlE));
|
||||
release();
|
||||
return;
|
||||
}
|
||||
|
||||
PrintWriter pw = new PrintWriter(System.out, true);
|
||||
try {
|
||||
formatAndPrintResultSet(results, pw);
|
||||
} finally {
|
||||
pw.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void release() {
|
||||
if (null != this.lastStatement) {
|
||||
try {
|
||||
|
@ -29,6 +29,7 @@
|
||||
import org.apache.hadoop.sqoop.util.ResultSetPrinter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.DriverManager;
|
||||
@ -46,6 +47,7 @@
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* ConnManager implementation for generic SQL-compliant database.
|
||||
@ -407,36 +409,36 @@ public void close() throws SQLException {
|
||||
}
|
||||
|
||||
/**
|
||||
* Poor man's SQL query interface; used for debugging.
|
||||
* @param s
|
||||
* Prints the contents of a ResultSet to the specified PrintWriter.
|
||||
* The ResultSet is closed at the end of this method.
|
||||
* @param results the ResultSet to print.
|
||||
* @param pw the location to print the data to.
|
||||
*/
|
||||
public void execAndPrint(String s) {
|
||||
System.out.println("Executing statement: " + s);
|
||||
ResultSet results;
|
||||
try {
|
||||
results = execute(s);
|
||||
} catch (SQLException sqlE) {
|
||||
LOG.error("Error executing statement: " + sqlE.toString());
|
||||
release();
|
||||
return;
|
||||
}
|
||||
|
||||
protected void formatAndPrintResultSet(ResultSet results, PrintWriter pw) {
|
||||
try {
|
||||
try {
|
||||
int cols = results.getMetaData().getColumnCount();
|
||||
System.out.println("Got " + cols + " columns back");
|
||||
pw.println("Got " + cols + " columns back");
|
||||
if (cols > 0) {
|
||||
System.out.println("Schema: " + results.getMetaData().getSchemaName(1));
|
||||
System.out.println("Table: " + results.getMetaData().getTableName(1));
|
||||
ResultSetMetaData rsmd = results.getMetaData();
|
||||
String schema = rsmd.getSchemaName(1);
|
||||
String table = rsmd.getTableName(1);
|
||||
if (null != schema) {
|
||||
pw.println("Schema: " + schema);
|
||||
}
|
||||
|
||||
if (null != table) {
|
||||
pw.println("Table: " + table);
|
||||
}
|
||||
}
|
||||
} catch (SQLException sqlE) {
|
||||
LOG.error("SQLException reading result metadata: " + sqlE.toString());
|
||||
}
|
||||
|
||||
try {
|
||||
new ResultSetPrinter().printResultSet(System.out, results);
|
||||
new ResultSetPrinter().printResultSet(pw, results);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("IOException writing results to stdout: " + ioe.toString());
|
||||
LOG.error("IOException writing results: " + ioe.toString());
|
||||
return;
|
||||
}
|
||||
} finally {
|
||||
@ -451,6 +453,29 @@ public void execAndPrint(String s) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poor man's SQL query interface; used for debugging.
|
||||
* @param s the SQL statement to execute.
|
||||
*/
|
||||
public void execAndPrint(String s) {
|
||||
ResultSet results = null;
|
||||
try {
|
||||
results = execute(s);
|
||||
} catch (SQLException sqlE) {
|
||||
LOG.error("Error executing statement: "
|
||||
+ StringUtils.stringifyException(sqlE));
|
||||
release();
|
||||
return;
|
||||
}
|
||||
|
||||
PrintWriter pw = new PrintWriter(System.out, true);
|
||||
try {
|
||||
formatAndPrintResultSet(results, pw);
|
||||
} finally {
|
||||
pw.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a connection to the database; usually used only from within
|
||||
* getConnection(), which enforces a singleton guarantee around the
|
||||
|
@ -906,6 +906,7 @@ public StringBuilder generateClassForColumns(Map<String, Integer> columnTypes,
|
||||
sb.append("import java.sql.Timestamp;\n");
|
||||
sb.append("import java.util.Iterator;\n");
|
||||
sb.append("import java.util.List;\n");
|
||||
sb.append("\n");
|
||||
|
||||
String className = tableNameInfo.getShortClassForTable(tableName);
|
||||
sb.append("public class " + className
|
||||
|
@ -37,6 +37,16 @@ public abstract class HadoopShim {
|
||||
protected HadoopShim() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse arguments in 'args' via the GenericOptionsParser and
|
||||
* embed the results in the supplied configuration.
|
||||
* @param conf the configuration to populate with generic options.
|
||||
* @param args the arguments to process.
|
||||
* @return the unused args to be passed to the application itself.
|
||||
*/
|
||||
public abstract String [] parseGenericOptions(Configuration conf,
|
||||
String [] args) throws IOException;
|
||||
|
||||
/**
|
||||
* @return the number of mapper output records from a job using its counters.
|
||||
*/
|
||||
|
633
src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java
Normal file
633
src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java
Normal file
@ -0,0 +1,633 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.log4j.Category;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.apache.hadoop.sqoop.ConnFactory;
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
|
||||
/**
|
||||
* Layer on top of SqoopTool that provides some basic common code
|
||||
* that most SqoopTool implementations will use.
|
||||
*
|
||||
* Subclasses should call init() at the top of their run() method,
|
||||
* and call destroy() at the end in a finally block.
|
||||
*/
|
||||
public abstract class BaseSqoopTool extends SqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(BaseSqoopTool.class.getName());
|
||||
|
||||
public static final String HELP_STR = "\nTry --help for usage instructions.";
|
||||
|
||||
// Here are all the arguments that are used by the standard sqoop tools.
|
||||
// Their names are recorded here so that tools can share them and their
|
||||
// use consistently. The argument parser applies the leading '--' to each
|
||||
// string.
|
||||
public static final String CONNECT_STRING_ARG = "connect";
|
||||
public static final String DRIVER_ARG = "driver";
|
||||
public static final String USERNAME_ARG = "username";
|
||||
public static final String PASSWORD_ARG = "password";
|
||||
public static final String PASSWORD_PROMPT_ARG = "P";
|
||||
public static final String DIRECT_ARG = "direct";
|
||||
public static final String TABLE_ARG = "table";
|
||||
public static final String COLUMNS_ARG = "columns";
|
||||
public static final String SPLIT_BY_ARG = "split-by";
|
||||
public static final String WHERE_ARG = "where";
|
||||
public static final String HADOOP_HOME_ARG = "hadoop-home";
|
||||
public static final String HIVE_HOME_ARG = "hive-home";
|
||||
public static final String WAREHOUSE_DIR_ARG = "warehouse-dir";
|
||||
public static final String FMT_SEQUENCEFILE_ARG = "as-sequencefile";
|
||||
public static final String FMT_TEXTFILE_ARG = "as-textfile";
|
||||
public static final String HIVE_IMPORT_ARG = "hive-import";
|
||||
public static final String HIVE_TABLE_ARG = "hive-table";
|
||||
public static final String HIVE_OVERWRITE_ARG = "hive-overwrite";
|
||||
public static final String NUM_MAPPERS_ARG = "num-mappers";
|
||||
public static final String NUM_MAPPERS_SHORT_ARG = "m";
|
||||
public static final String COMPRESS_ARG = "compress";
|
||||
public static final String COMPRESS_SHORT_ARG = "z";
|
||||
public static final String DIRECT_SPLIT_SIZE_ARG = "direct-split-size";
|
||||
public static final String INLINE_LOB_LIMIT_ARG = "inline-lob-limit";
|
||||
public static final String EXPORT_PATH_ARG = "export-dir";
|
||||
public static final String FIELDS_TERMINATED_BY_ARG = "fields-terminated-by";
|
||||
public static final String LINES_TERMINATED_BY_ARG = "lines-terminated-by";
|
||||
public static final String OPTIONALLY_ENCLOSED_BY_ARG =
|
||||
"optionally-enclosed-by";
|
||||
public static final String ENCLOSED_BY_ARG = "enclosed-by";
|
||||
public static final String ESCAPED_BY_ARG = "escaped-by";
|
||||
public static final String MYSQL_DELIMITERS_ARG = "mysql-delimiters";
|
||||
public static final String INPUT_FIELDS_TERMINATED_BY_ARG =
|
||||
"input-fields-terminated-by";
|
||||
public static final String INPUT_LINES_TERMINATED_BY_ARG =
|
||||
"input-lines-terminated-by";
|
||||
public static final String INPUT_OPTIONALLY_ENCLOSED_BY_ARG =
|
||||
"input-optionally-enclosed-by";
|
||||
public static final String INPUT_ENCLOSED_BY_ARG = "input-enclosed-by";
|
||||
public static final String INPUT_ESCAPED_BY_ARG = "input-escaped-by";
|
||||
public static final String CODE_OUT_DIR_ARG = "outdir";
|
||||
public static final String BIN_OUT_DIR_ARG = "bindir";
|
||||
public static final String PACKAGE_NAME_ARG = "package-name";
|
||||
public static final String CLASS_NAME_ARG = "class-name";
|
||||
public static final String JAR_FILE_NAME_ARG = "jar-file";
|
||||
public static final String DEBUG_SQL_ARG = "expr";
|
||||
public static final String DEBUG_SQL_SHORT_ARG = "e";
|
||||
public static final String VERBOSE_ARG = "verbose";
|
||||
public static final String HELP_ARG = "help";
|
||||
|
||||
|
||||
public BaseSqoopTool() {
|
||||
}
|
||||
|
||||
public BaseSqoopTool(String toolName) {
|
||||
super(toolName);
|
||||
}
|
||||
|
||||
protected ConnManager manager;
|
||||
|
||||
public ConnManager getManager() {
|
||||
return manager;
|
||||
}
|
||||
|
||||
protected void setManager(ConnManager mgr) {
|
||||
this.manager = mgr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should be called at the beginning of the run() method to initialize
|
||||
* the connection manager, etc. If this succeeds (returns true), it should
|
||||
* be paired with a call to destroy().
|
||||
* @return true on success, false on failure.
|
||||
*/
|
||||
protected boolean init(SqoopOptions sqoopOpts) {
|
||||
|
||||
// Make sure shim jar is classloaded early.
|
||||
ShimLoader.getHadoopShim(sqoopOpts.getConf());
|
||||
|
||||
// Get the connection to the database.
|
||||
try {
|
||||
this.manager = new ConnFactory(sqoopOpts.getConf()).getManager(sqoopOpts);
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
LOG.error("Got error creating database manager: "
|
||||
+ StringUtils.stringifyException(e));
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should be called in a 'finally' block at the end of the run() method.
|
||||
*/
|
||||
protected void destroy(SqoopOptions sqoopOpts) {
|
||||
if (null != manager) {
|
||||
try {
|
||||
manager.close();
|
||||
} catch (SQLException sqlE) {
|
||||
LOG.warn("Error while closing connection: " + sqlE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Examines a subset of the arrray presented, and determines if it
|
||||
* contains any non-empty arguments. If so, logs the arguments
|
||||
* and returns true.
|
||||
*
|
||||
* @param argv an array of strings to check.
|
||||
* @param offset the first element of the array to check
|
||||
* @param len the number of elements to check
|
||||
* @return true if there are any non-null, non-empty argument strings
|
||||
* present.
|
||||
*/
|
||||
protected boolean hasUnrecognizedArgs(String [] argv, int offset, int len) {
|
||||
if (argv == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean unrecognized = false;
|
||||
boolean printedBanner = false;
|
||||
for (int i = offset; i < Math.min(argv.length, offset + len); i++) {
|
||||
if (argv[i] != null && argv[i].length() > 0) {
|
||||
if (!printedBanner) {
|
||||
LOG.error("Error parsing arguments for " + getToolName() + ":");
|
||||
printedBanner = true;
|
||||
}
|
||||
LOG.error("Unrecognized argument: " + argv[i]);
|
||||
unrecognized = true;
|
||||
}
|
||||
}
|
||||
|
||||
return unrecognized;
|
||||
}
|
||||
|
||||
protected boolean hasUnrecognizedArgs(String [] argv) {
|
||||
if (null == argv) {
|
||||
return false;
|
||||
}
|
||||
return hasUnrecognizedArgs(argv, 0, argv.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If argv contains an entry "--", return an array containing all elements
|
||||
* after the "--" separator. Otherwise, return null.
|
||||
* @param argv a set of arguments to scan for the subcommand arguments.
|
||||
*/
|
||||
protected String [] getSubcommandArgs(String [] argv) {
|
||||
if (null == argv) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (int i = 0; i < argv.length; i++) {
|
||||
if (argv[i].equals("--")) {
|
||||
return Arrays.copyOfRange(argv, i + 1, argv.length);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return RelatedOptions used by most/all Sqoop tools.
|
||||
*/
|
||||
protected RelatedOptions getCommonOptions() {
|
||||
// Connection args (common)
|
||||
RelatedOptions commonOpts = new RelatedOptions("Common arguments");
|
||||
commonOpts.addOption(OptionBuilder.withArgName("jdbc-uri")
|
||||
.hasArg().withDescription("Specify JDBC connect string")
|
||||
.withLongOpt(CONNECT_STRING_ARG)
|
||||
.create());
|
||||
commonOpts.addOption(OptionBuilder.withArgName("class-name")
|
||||
.hasArg().withDescription("Manually specify JDBC driver class to use")
|
||||
.withLongOpt(DRIVER_ARG)
|
||||
.create());
|
||||
commonOpts.addOption(OptionBuilder.withArgName("username")
|
||||
.hasArg().withDescription("Set authentication username")
|
||||
.withLongOpt(USERNAME_ARG)
|
||||
.create());
|
||||
commonOpts.addOption(OptionBuilder.withArgName("password")
|
||||
.hasArg().withDescription("Set authentication password")
|
||||
.withLongOpt(PASSWORD_ARG)
|
||||
.create());
|
||||
commonOpts.addOption(OptionBuilder.withDescription("Read password from console")
|
||||
.create(PASSWORD_PROMPT_ARG));
|
||||
|
||||
commonOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg().withDescription("Override $HADOOP_HOME")
|
||||
.withLongOpt(HADOOP_HOME_ARG)
|
||||
.create());
|
||||
|
||||
// misc (common)
|
||||
commonOpts.addOption(OptionBuilder
|
||||
.withDescription("Print more information while working")
|
||||
.withLongOpt(VERBOSE_ARG)
|
||||
.create());
|
||||
commonOpts.addOption(OptionBuilder
|
||||
.withDescription("Print usage instructions")
|
||||
.withLongOpt(HELP_ARG)
|
||||
.create());
|
||||
|
||||
return commonOpts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param explicitHiveImport true if the user has an explicit --hive-import
|
||||
* available, or false if this is implied by the tool.
|
||||
* @return options governing interaction with Hive
|
||||
*/
|
||||
protected RelatedOptions getHiveOptions(boolean explicitHiveImport) {
|
||||
RelatedOptions hiveOpts = new RelatedOptions("Hive arguments");
|
||||
if (explicitHiveImport) {
|
||||
hiveOpts.addOption(OptionBuilder
|
||||
.withDescription("Import tables into Hive "
|
||||
+ "(Uses Hive's default delimiters if none are set.)")
|
||||
.withLongOpt(HIVE_IMPORT_ARG)
|
||||
.create());
|
||||
}
|
||||
|
||||
hiveOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg().withDescription("Override $HIVE_HOME")
|
||||
.withLongOpt(HIVE_HOME_ARG)
|
||||
.create());
|
||||
hiveOpts.addOption(OptionBuilder
|
||||
.withDescription("Overwrite existing data in the Hive table")
|
||||
.withLongOpt(HIVE_OVERWRITE_ARG)
|
||||
.create());
|
||||
hiveOpts.addOption(OptionBuilder.withArgName("table-name")
|
||||
.hasArg()
|
||||
.withDescription("Sets the table name to use when importing to hive")
|
||||
.withLongOpt(HIVE_TABLE_ARG)
|
||||
.create());
|
||||
|
||||
return hiveOpts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return options governing output format delimiters
|
||||
*/
|
||||
protected RelatedOptions getOutputFormatOptions() {
|
||||
RelatedOptions formatOpts = new RelatedOptions(
|
||||
"Output line formatting arguments");
|
||||
formatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets the field separator character")
|
||||
.withLongOpt(FIELDS_TERMINATED_BY_ARG)
|
||||
.create());
|
||||
formatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets the end-of-line character")
|
||||
.withLongOpt(LINES_TERMINATED_BY_ARG)
|
||||
.create());
|
||||
formatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets a field enclosing character")
|
||||
.withLongOpt(OPTIONALLY_ENCLOSED_BY_ARG)
|
||||
.create());
|
||||
formatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets a required field enclosing character")
|
||||
.withLongOpt(ENCLOSED_BY_ARG)
|
||||
.create());
|
||||
formatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets the escape character")
|
||||
.withLongOpt(ESCAPED_BY_ARG)
|
||||
.create());
|
||||
formatOpts.addOption(OptionBuilder
|
||||
.withDescription("Uses MySQL's default delimiter set: "
|
||||
+ "fields: , lines: \\n escaped-by: \\ optionally-enclosed-by: '")
|
||||
.withLongOpt(MYSQL_DELIMITERS_ARG)
|
||||
.create());
|
||||
|
||||
return formatOpts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return options governing input format delimiters
|
||||
*/
|
||||
protected RelatedOptions getInputFormatOptions() {
|
||||
RelatedOptions inputFormatOpts = new RelatedOptions("Input parsing arguments");
|
||||
inputFormatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets the input field separator")
|
||||
.withLongOpt(INPUT_FIELDS_TERMINATED_BY_ARG)
|
||||
.create());
|
||||
inputFormatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets the input end-of-line char")
|
||||
.withLongOpt(INPUT_LINES_TERMINATED_BY_ARG)
|
||||
.create());
|
||||
inputFormatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets a field enclosing character")
|
||||
.withLongOpt(INPUT_OPTIONALLY_ENCLOSED_BY_ARG)
|
||||
.create());
|
||||
inputFormatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets a required field encloser")
|
||||
.withLongOpt(INPUT_ENCLOSED_BY_ARG)
|
||||
.create());
|
||||
inputFormatOpts.addOption(OptionBuilder.withArgName("char")
|
||||
.hasArg()
|
||||
.withDescription("Sets the input escape character")
|
||||
.withLongOpt(INPUT_ESCAPED_BY_ARG)
|
||||
.create());
|
||||
|
||||
return inputFormatOpts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param multiTable true if these options will be used for bulk code-gen.
|
||||
* @return options related to code generation.
|
||||
*/
|
||||
protected RelatedOptions getCodeGenOpts(boolean multiTable) {
|
||||
RelatedOptions codeGenOpts = new RelatedOptions("Code generation arguments");
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg()
|
||||
.withDescription("Output directory for generated code")
|
||||
.withLongOpt(CODE_OUT_DIR_ARG)
|
||||
.create());
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg()
|
||||
.withDescription("Output directory for compiled objects")
|
||||
.withLongOpt(BIN_OUT_DIR_ARG)
|
||||
.create());
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("name")
|
||||
.hasArg()
|
||||
.withDescription("Put auto-generated classes in this package")
|
||||
.withLongOpt(PACKAGE_NAME_ARG)
|
||||
.create());
|
||||
if (!multiTable) {
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("name")
|
||||
.hasArg()
|
||||
.withDescription("Sets the generated class name."
|
||||
+ "This overrides --" + PACKAGE_NAME_ARG + ". When combined "
|
||||
+ "with --" + JAR_FILE_NAME_ARG + ", sets the input class.")
|
||||
.withLongOpt(CLASS_NAME_ARG)
|
||||
.create());
|
||||
}
|
||||
return codeGenOpts;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Apply common command-line to the state.
|
||||
*/
|
||||
protected void applyCommonOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
// common options.
|
||||
if (in.hasOption(VERBOSE_ARG)) {
|
||||
// Immediately switch into DEBUG logging.
|
||||
Category sqoopLogger = Logger.getLogger(
|
||||
Sqoop.class.getName()).getParent();
|
||||
sqoopLogger.setLevel(Level.DEBUG);
|
||||
LOG.debug("Enabled debug logging.");
|
||||
}
|
||||
|
||||
if (in.hasOption(HELP_ARG)) {
|
||||
ToolOptions toolOpts = new ToolOptions();
|
||||
configureOptions(toolOpts);
|
||||
printHelp(toolOpts);
|
||||
throw new InvalidOptionsException("");
|
||||
}
|
||||
|
||||
if (in.hasOption(CONNECT_STRING_ARG)) {
|
||||
out.setConnectString(in.getOptionValue(CONNECT_STRING_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(DRIVER_ARG)) {
|
||||
out.setDriverClassName(in.getOptionValue(DRIVER_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(USERNAME_ARG)) {
|
||||
out.setUsername(in.getOptionValue(USERNAME_ARG));
|
||||
if (null == out.getPassword()) {
|
||||
// Set password to empty if the username is set first,
|
||||
// to ensure that they're either both null or neither is.
|
||||
out.setPassword("");
|
||||
}
|
||||
}
|
||||
|
||||
if (in.hasOption(PASSWORD_ARG)) {
|
||||
LOG.warn("Setting your password on the command-line is insecure. "
|
||||
+ "Consider using -" + PASSWORD_PROMPT_ARG + " instead.");
|
||||
out.setPassword(in.getOptionValue(PASSWORD_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(PASSWORD_PROMPT_ARG)) {
|
||||
out.setPasswordFromConsole();
|
||||
}
|
||||
|
||||
if (in.hasOption(HADOOP_HOME_ARG)) {
|
||||
out.setHadoopHome(in.getOptionValue(HADOOP_HOME_ARG));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void applyHiveOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (in.hasOption(HIVE_HOME_ARG)) {
|
||||
out.setHiveHome(in.getOptionValue(HIVE_HOME_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(HIVE_IMPORT_ARG)) {
|
||||
out.setHiveImport(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(HIVE_OVERWRITE_ARG)) {
|
||||
out.setOverwriteHiveTable(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(HIVE_TABLE_ARG)) {
|
||||
out.setHiveTableName(in.getOptionValue(HIVE_TABLE_ARG));
|
||||
}
|
||||
}
|
||||
|
||||
protected void applyOutputFormatOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
if (in.hasOption(FIELDS_TERMINATED_BY_ARG)) {
|
||||
out.setFieldsTerminatedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(FIELDS_TERMINATED_BY_ARG)));
|
||||
out.setExplicitDelims(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(LINES_TERMINATED_BY_ARG)) {
|
||||
out.setLinesTerminatedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(LINES_TERMINATED_BY_ARG)));
|
||||
out.setExplicitDelims(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(OPTIONALLY_ENCLOSED_BY_ARG)) {
|
||||
out.setEnclosedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(OPTIONALLY_ENCLOSED_BY_ARG)));
|
||||
out.setOutputEncloseRequired(false);
|
||||
out.setExplicitDelims(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(ENCLOSED_BY_ARG)) {
|
||||
out.setEnclosedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(ENCLOSED_BY_ARG)));
|
||||
out.setOutputEncloseRequired(true);
|
||||
out.setExplicitDelims(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(ESCAPED_BY_ARG)) {
|
||||
out.setEscapedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(ESCAPED_BY_ARG)));
|
||||
out.setExplicitDelims(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(MYSQL_DELIMITERS_ARG)) {
|
||||
out.setOutputEncloseRequired(false);
|
||||
out.setFieldsTerminatedBy(',');
|
||||
out.setLinesTerminatedBy('\n');
|
||||
out.setEscapedBy('\\');
|
||||
out.setEnclosedBy('\'');
|
||||
out.setExplicitDelims(true);
|
||||
}
|
||||
}
|
||||
|
||||
protected void applyInputFormatOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
if (in.hasOption(INPUT_FIELDS_TERMINATED_BY_ARG)) {
|
||||
out.setInputFieldsTerminatedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(INPUT_FIELDS_TERMINATED_BY_ARG)));
|
||||
}
|
||||
|
||||
if (in.hasOption(INPUT_LINES_TERMINATED_BY_ARG)) {
|
||||
out.setInputLinesTerminatedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(INPUT_LINES_TERMINATED_BY_ARG)));
|
||||
}
|
||||
|
||||
if (in.hasOption(INPUT_OPTIONALLY_ENCLOSED_BY_ARG)) {
|
||||
out.setInputEnclosedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(INPUT_OPTIONALLY_ENCLOSED_BY_ARG)));
|
||||
out.setInputEncloseRequired(false);
|
||||
}
|
||||
|
||||
if (in.hasOption(INPUT_ENCLOSED_BY_ARG)) {
|
||||
out.setInputEnclosedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(INPUT_ENCLOSED_BY_ARG)));
|
||||
out.setInputEncloseRequired(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(INPUT_ESCAPED_BY_ARG)) {
|
||||
out.setInputEscapedBy(SqoopOptions.toChar(
|
||||
in.getOptionValue(INPUT_ESCAPED_BY_ARG)));
|
||||
}
|
||||
}
|
||||
|
||||
protected void applyCodeGenOptions(CommandLine in, SqoopOptions out,
|
||||
boolean multiTable) throws InvalidOptionsException {
|
||||
if (in.hasOption(CODE_OUT_DIR_ARG)) {
|
||||
out.setCodeOutputDir(in.getOptionValue(CODE_OUT_DIR_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(BIN_OUT_DIR_ARG)) {
|
||||
out.setJarOutputDir(in.getOptionValue(BIN_OUT_DIR_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(PACKAGE_NAME_ARG)) {
|
||||
out.setPackageName(in.getOptionValue(PACKAGE_NAME_ARG));
|
||||
}
|
||||
|
||||
if (!multiTable && in.hasOption(CLASS_NAME_ARG)) {
|
||||
out.setClassName(in.getOptionValue(CLASS_NAME_ARG));
|
||||
}
|
||||
}
|
||||
|
||||
protected void validateCommonOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
if (options.getConnectString() == null) {
|
||||
throw new InvalidOptionsException(
|
||||
"Error: Required argument --connect is missing."
|
||||
+ HELP_STR);
|
||||
}
|
||||
}
|
||||
|
||||
protected void validateCodeGenOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
if (options.getClassName() != null && options.getPackageName() != null) {
|
||||
throw new InvalidOptionsException(
|
||||
"--class-name overrides --package-name. You cannot use both."
|
||||
+ HELP_STR);
|
||||
}
|
||||
}
|
||||
|
||||
protected void validateOutputFormatOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
if (options.doHiveImport()) {
|
||||
if (!options.explicitDelims()) {
|
||||
// user hasn't manually specified delimiters, and wants to import
|
||||
// straight to Hive. Use Hive-style delimiters.
|
||||
LOG.info("Using Hive-specific delimiters for output. You can override");
|
||||
LOG.info("delimiters with --fields-terminated-by, etc.");
|
||||
options.setFieldsTerminatedBy((char) 0x1); // ^A
|
||||
options.setLinesTerminatedBy('\n');
|
||||
options.setEnclosedBy('\000'); // no enclosing in Hive.
|
||||
options.setEscapedBy('\000'); // no escaping in Hive.
|
||||
options.setOutputEncloseRequired(false);
|
||||
}
|
||||
|
||||
if (options.getOutputEscapedBy() != '\000') {
|
||||
LOG.warn("Hive does not support escape characters in fields;");
|
||||
LOG.warn("parse errors in Hive may result from using --escaped-by.");
|
||||
}
|
||||
|
||||
if (options.getOutputEnclosedBy() != '\000') {
|
||||
LOG.warn("Hive does not support quoted strings; parse errors");
|
||||
LOG.warn("in Hive may result from using --enclosed-by.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void validateHiveOptions(SqoopOptions options) {
|
||||
// Empty; this method is present to maintain API consistency, and
|
||||
// is reserved for future constraints on Hive options.
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
193
src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java
Normal file
193
src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java
Normal file
@ -0,0 +1,193 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that generates code from a database schema.
|
||||
*/
|
||||
public class CodeGenTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(CodeGenTool.class.getName());
|
||||
|
||||
private List<String> generatedJarFiles;
|
||||
|
||||
public CodeGenTool() {
|
||||
super("codegen");
|
||||
generatedJarFiles = new ArrayList<String>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of jar files generated as part of this import process
|
||||
*/
|
||||
public List<String> getGeneratedJarFiles() {
|
||||
ArrayList<String> out = new ArrayList<String>(generatedJarFiles);
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the .class and .jar files
|
||||
* @return the filename of the emitted jar file.
|
||||
* @throws IOException
|
||||
*/
|
||||
public String generateORM(SqoopOptions options, String tableName)
|
||||
throws IOException {
|
||||
String existingJar = options.getExistingJarName();
|
||||
if (existingJar != null) {
|
||||
// This code generator is being invoked as part of an import or export
|
||||
// process, and the user has pre-specified a jar and class to use.
|
||||
// Don't generate.
|
||||
LOG.info("Using existing jar: " + existingJar);
|
||||
return existingJar;
|
||||
}
|
||||
|
||||
LOG.info("Beginning code generation");
|
||||
CompilationManager compileMgr = new CompilationManager(options);
|
||||
ClassWriter classWriter = new ClassWriter(options, manager, tableName,
|
||||
compileMgr);
|
||||
classWriter.generate();
|
||||
compileMgr.compile();
|
||||
compileMgr.jar();
|
||||
String jarFile = compileMgr.getJarFilename();
|
||||
this.generatedJarFiles.add(jarFile);
|
||||
return jarFile;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
generateORM(options, options.getTableName());
|
||||
|
||||
// If the user has also specified Hive import code generation,
|
||||
// use a HiveImport to generate the DDL statements and write
|
||||
// them to files (but don't actually perform the import -- thus
|
||||
// the generateOnly=true in the constructor).
|
||||
if (options.doHiveImport()) {
|
||||
HiveImport hiveImport = new HiveImport(options, manager,
|
||||
options.getConf(), true);
|
||||
hiveImport.importTable(options.getTableName(),
|
||||
options.getHiveTableName(), true);
|
||||
}
|
||||
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running codegen job: "
|
||||
+ StringUtils.stringifyException(ioe));
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
|
||||
RelatedOptions codeGenOpts = getCodeGenOpts(false);
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("table-name")
|
||||
.hasArg()
|
||||
.withDescription("Table to generate code for")
|
||||
.withLongOpt(TABLE_ARG)
|
||||
.create());
|
||||
toolOptions.addUniqueOptions(codeGenOpts);
|
||||
|
||||
toolOptions.addUniqueOptions(getOutputFormatOptions());
|
||||
toolOptions.addUniqueOptions(getInputFormatOptions());
|
||||
toolOptions.addUniqueOptions(getHiveOptions(true));
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void printHelp(ToolOptions toolOptions) {
|
||||
super.printHelp(toolOptions);
|
||||
System.out.println("");
|
||||
System.out.println(
|
||||
"At minimum, you must specify --connect and --table");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (in.hasOption(TABLE_ARG)) {
|
||||
out.setTableName(in.getOptionValue(TABLE_ARG));
|
||||
}
|
||||
|
||||
applyCommonOptions(in, out);
|
||||
applyOutputFormatOptions(in, out);
|
||||
applyInputFormatOptions(in, out);
|
||||
applyCodeGenOptions(in, out, false);
|
||||
applyHiveOptions(in, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (hasUnrecognizedArgs(extraArguments)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
|
||||
validateCommonOptions(options);
|
||||
validateCodeGenOptions(options);
|
||||
validateOutputFormatOptions(options);
|
||||
validateHiveOptions(options);
|
||||
|
||||
if (options.getTableName() == null) {
|
||||
throw new InvalidOptionsException(
|
||||
"--table is required for code generation." + HELP_STR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
142
src/java/org/apache/hadoop/sqoop/tool/CreateHiveTableTool.java
Normal file
142
src/java/org/apache/hadoop/sqoop/tool/CreateHiveTableTool.java
Normal file
@ -0,0 +1,142 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that creates a Hive table definition.
|
||||
*/
|
||||
public class CreateHiveTableTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(
|
||||
CreateHiveTableTool.class.getName());
|
||||
|
||||
public CreateHiveTableTool() {
|
||||
super("create-hive-table");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
HiveImport hiveImport = new HiveImport(options, manager,
|
||||
options.getConf(), false);
|
||||
hiveImport.importTable(options.getTableName(),
|
||||
options.getHiveTableName(), true);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running create table job: "
|
||||
+ StringUtils.stringifyException(ioe));
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
|
||||
RelatedOptions hiveOpts = getHiveOptions(false);
|
||||
hiveOpts.addOption(OptionBuilder.withArgName("table-name")
|
||||
.hasArg()
|
||||
.withDescription("The db table to read the definition from")
|
||||
.withLongOpt(TABLE_ARG)
|
||||
.create());
|
||||
toolOptions.addUniqueOptions(hiveOpts);
|
||||
|
||||
toolOptions.addUniqueOptions(getOutputFormatOptions());
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void printHelp(ToolOptions toolOptions) {
|
||||
super.printHelp(toolOptions);
|
||||
System.out.println("");
|
||||
System.out.println(
|
||||
"At minimum, you must specify --connect and --table");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (in.hasOption(TABLE_ARG)) {
|
||||
out.setTableName(in.getOptionValue(TABLE_ARG));
|
||||
}
|
||||
|
||||
out.setHiveImport(true);
|
||||
|
||||
applyCommonOptions(in, out);
|
||||
applyHiveOptions(in, out);
|
||||
applyOutputFormatOptions(in, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (hasUnrecognizedArgs(extraArguments)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
|
||||
validateCommonOptions(options);
|
||||
validateOutputFormatOptions(options);
|
||||
validateHiveOptions(options);
|
||||
|
||||
if (options.getTableName() == null) {
|
||||
throw new InvalidOptionsException(
|
||||
"--table is required for table definition importing." + HELP_STR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
104
src/java/org/apache/hadoop/sqoop/tool/EvalSqlTool.java
Normal file
104
src/java/org/apache/hadoop/sqoop/tool/EvalSqlTool.java
Normal file
@ -0,0 +1,104 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
|
||||
/**
|
||||
* Tool that evaluates a SQL statement and displays the results.
|
||||
*/
|
||||
public class EvalSqlTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(EvalSqlTool.class.getName());
|
||||
|
||||
public EvalSqlTool() {
|
||||
super("eval");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
// just run a SQL statement for debugging purposes.
|
||||
manager.execAndPrint(options.getDebugSqlCmd());
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
|
||||
RelatedOptions evalOpts = new RelatedOptions("SQL evaluation arguments");
|
||||
evalOpts.addOption(OptionBuilder.withArgName("statement")
|
||||
.hasArg()
|
||||
.withDescription("Execute 'statement' in SQL and exit")
|
||||
.withLongOpt(DEBUG_SQL_ARG)
|
||||
.create(DEBUG_SQL_SHORT_ARG));
|
||||
|
||||
toolOptions.addUniqueOptions(evalOpts);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
applyCommonOptions(in, out);
|
||||
if (in.hasOption(DEBUG_SQL_ARG)) {
|
||||
out.setDebugSqlCmd(in.getOptionValue(DEBUG_SQL_ARG));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (hasUnrecognizedArgs(extraArguments)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
|
||||
String sqlCmd = options.getDebugSqlCmd();
|
||||
if (null == sqlCmd || sqlCmd.length() == 0) {
|
||||
throw new InvalidOptionsException(
|
||||
"This command requires the " + DEBUG_SQL_ARG + " argument."
|
||||
+ HELP_STR);
|
||||
}
|
||||
|
||||
validateCommonOptions(options);
|
||||
}
|
||||
}
|
||||
|
265
src/java/org/apache/hadoop/sqoop/tool/ExportTool.java
Normal file
265
src/java/org/apache/hadoop/sqoop/tool/ExportTool.java
Normal file
@ -0,0 +1,265 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.manager.ExportJobContext;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
import org.apache.hadoop.sqoop.tool.SqoopTool;
|
||||
import org.apache.hadoop.sqoop.util.ExportException;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that performs HDFS exports to databases.
|
||||
*/
|
||||
public class ExportTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(ExportTool.class.getName());
|
||||
|
||||
private CodeGenTool codeGenerator;
|
||||
|
||||
public ExportTool() {
|
||||
super("export");
|
||||
this.codeGenerator = new CodeGenTool();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of jar files generated as part of this im/export process
|
||||
*/
|
||||
public List<String> getGeneratedJarFiles() {
|
||||
return codeGenerator.getGeneratedJarFiles();
|
||||
}
|
||||
|
||||
private void exportTable(SqoopOptions options, String tableName)
|
||||
throws ExportException, IOException {
|
||||
String jarFile = null;
|
||||
|
||||
// Generate the ORM code for the tables.
|
||||
jarFile = codeGenerator.generateORM(options, tableName);
|
||||
|
||||
ExportJobContext context = new ExportJobContext(tableName, jarFile, options);
|
||||
manager.exportTable(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
codeGenerator.setManager(manager);
|
||||
|
||||
try {
|
||||
exportTable(options, options.getTableName());
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running export job: " + ioe.toString());
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} catch (ExportException ee) {
|
||||
LOG.error("Error during export: " + ee.toString());
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ee);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the set of options that control exports
|
||||
* @return the RelatedOptions that can be used to parse the export
|
||||
* arguments.
|
||||
*/
|
||||
protected RelatedOptions getExportOptions() {
|
||||
RelatedOptions exportOpts = new RelatedOptions("Export control arguments");
|
||||
|
||||
exportOpts.addOption(OptionBuilder
|
||||
.withDescription("Use direct export fast path")
|
||||
.withLongOpt(DIRECT_ARG)
|
||||
.create());
|
||||
exportOpts.addOption(OptionBuilder.withArgName("table-name")
|
||||
.hasArg().withDescription("Table to populate")
|
||||
.withLongOpt(TABLE_ARG)
|
||||
.create());
|
||||
exportOpts.addOption(OptionBuilder.withArgName("n")
|
||||
.hasArg().withDescription("Use 'n' map tasks to export in parallel")
|
||||
.withLongOpt(NUM_MAPPERS_ARG)
|
||||
.create(NUM_MAPPERS_SHORT_ARG));
|
||||
exportOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg()
|
||||
.withDescription("HDFS source path for the export")
|
||||
.withLongOpt(EXPORT_PATH_ARG)
|
||||
.create());
|
||||
|
||||
return exportOpts;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
toolOptions.addUniqueOptions(getExportOptions());
|
||||
|
||||
// Input parsing delimiters
|
||||
toolOptions.addUniqueOptions(getInputFormatOptions());
|
||||
|
||||
// Used when sending data to a direct-mode export.
|
||||
toolOptions.addUniqueOptions(getOutputFormatOptions());
|
||||
|
||||
// get common codegen opts.
|
||||
RelatedOptions codeGenOpts = getCodeGenOpts(false);
|
||||
|
||||
// add export-specific codegen opts:
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("file")
|
||||
.hasArg()
|
||||
.withDescription("Disable code generation; use specified jar")
|
||||
.withLongOpt(JAR_FILE_NAME_ARG)
|
||||
.create());
|
||||
|
||||
toolOptions.addUniqueOptions(codeGenOpts);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void printHelp(ToolOptions toolOptions) {
|
||||
super.printHelp(toolOptions);
|
||||
System.out.println("");
|
||||
System.out.println(
|
||||
"At minimum, you must specify --connect, --export-dir, and --table");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
try {
|
||||
applyCommonOptions(in, out);
|
||||
|
||||
if (in.hasOption(DIRECT_ARG)) {
|
||||
out.setDirectMode(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(TABLE_ARG)) {
|
||||
out.setTableName(in.getOptionValue(TABLE_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(NUM_MAPPERS_ARG)) {
|
||||
out.setNumMappers(Integer.parseInt(in.getOptionValue(NUM_MAPPERS_ARG)));
|
||||
}
|
||||
|
||||
if (in.hasOption(EXPORT_PATH_ARG)) {
|
||||
out.setExportDir(in.getOptionValue(EXPORT_PATH_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(JAR_FILE_NAME_ARG)) {
|
||||
out.setExistingJarName(in.getOptionValue(JAR_FILE_NAME_ARG));
|
||||
}
|
||||
|
||||
applyInputFormatOptions(in, out);
|
||||
applyOutputFormatOptions(in, out);
|
||||
applyOutputFormatOptions(in, out);
|
||||
applyCodeGenOptions(in, out, false);
|
||||
} catch (NumberFormatException nfe) {
|
||||
throw new InvalidOptionsException("Error: expected numeric argument.\n"
|
||||
+ "Try --help for usage.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate export-specific arguments.
|
||||
* @param options the configured SqoopOptions to check
|
||||
*/
|
||||
protected void validateExportOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
if (options.getTableName() == null) {
|
||||
throw new InvalidOptionsException("Export requires a --table argument."
|
||||
+ HELP_STR);
|
||||
} else if (options.getExportDir() == null) {
|
||||
throw new InvalidOptionsException(
|
||||
"Export requires an --export-dir argument."
|
||||
+ HELP_STR);
|
||||
} else if (options.getExistingJarName() != null
|
||||
&& options.getClassName() == null) {
|
||||
throw new InvalidOptionsException("Jar specified with --jar-file, but no "
|
||||
+ "class specified with --class-name." + HELP_STR);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
// If extraArguments is full, check for '--' followed by args for
|
||||
// mysqldump or other commands we rely on.
|
||||
options.setExtraArgs(getSubcommandArgs(extraArguments));
|
||||
int dashPos = extraArguments.length;
|
||||
for (int i = 0; i < extraArguments.length; i++) {
|
||||
if (extraArguments[i].equals("--")) {
|
||||
dashPos = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasUnrecognizedArgs(extraArguments, 0, dashPos)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
|
||||
validateExportOptions(options);
|
||||
validateOutputFormatOptions(options);
|
||||
validateCommonOptions(options);
|
||||
validateCodeGenOptions(options);
|
||||
}
|
||||
}
|
||||
|
111
src/java/org/apache/hadoop/sqoop/tool/HelpTool.java
Normal file
111
src/java/org/apache/hadoop/sqoop/tool/HelpTool.java
Normal file
@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
|
||||
/**
|
||||
* Tool that explains the usage of Sqoop.
|
||||
*/
|
||||
public class HelpTool extends BaseSqoopTool {
|
||||
|
||||
public HelpTool() {
|
||||
super("help");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param str the string to right-side pad
|
||||
* @param num the minimum number of characters to return
|
||||
* @return 'str' with enough right padding to make it num characters long.
|
||||
*/
|
||||
private static String padRight(String str, int num) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(str);
|
||||
for (int count = str.length(); count < num; count++) {
|
||||
sb.append(" ");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Print out a list of all SqoopTool implementations and their
|
||||
* descriptions.
|
||||
*/
|
||||
private void printAvailableTools() {
|
||||
System.out.println("usage: sqoop COMMAND [ARGS]");
|
||||
System.out.println("");
|
||||
System.out.println("Available commands:");
|
||||
|
||||
Set<String> toolNames = getToolNames();
|
||||
|
||||
int maxWidth = 0;
|
||||
for (String tool : toolNames) {
|
||||
maxWidth = Math.max(maxWidth, tool.length());
|
||||
}
|
||||
|
||||
for (String tool : toolNames) {
|
||||
System.out.println(" " + padRight(tool, maxWidth+2)
|
||||
+ getToolDescription(tool));
|
||||
}
|
||||
|
||||
System.out.println("");
|
||||
System.out.println(
|
||||
"See 'sqoop help COMMAND' for information on a specific command.");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
|
||||
if (this.extraArguments.length > 0) {
|
||||
if (hasUnrecognizedArgs(extraArguments, 1, extraArguments.length)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
SqoopTool subTool = SqoopTool.getTool(extraArguments[0]);
|
||||
if (null == subTool) {
|
||||
System.out.println("No such tool: " + extraArguments[0]);
|
||||
System.out.println(
|
||||
"Try 'sqoop help' for a list of available commands.");
|
||||
return 1;
|
||||
} else {
|
||||
ToolOptions toolOpts = new ToolOptions();
|
||||
subTool.configureOptions(toolOpts);
|
||||
subTool.printHelp(toolOpts);
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
printAvailableTools();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void printHelp(ToolOptions opts) {
|
||||
System.out.println("usage: sqoop " + getToolName() + " [COMMAND]");
|
||||
}
|
||||
}
|
||||
|
108
src/java/org/apache/hadoop/sqoop/tool/ImportAllTablesTool.java
Normal file
108
src/java/org/apache/hadoop/sqoop/tool/ImportAllTablesTool.java
Normal file
@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.manager.ExportJobContext;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
import org.apache.hadoop.sqoop.tool.SqoopTool;
|
||||
import org.apache.hadoop.sqoop.util.ExportException;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that performs database imports of all tables in a database to HDFS.
|
||||
*/
|
||||
public class ImportAllTablesTool extends ImportTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(
|
||||
ImportAllTablesTool.class.getName());
|
||||
|
||||
public ImportAllTablesTool() {
|
||||
super("import-all-tables", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
HiveImport hiveImport = null;
|
||||
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
if (options.doHiveImport()) {
|
||||
hiveImport = new HiveImport(options, manager, options.getConf(), false);
|
||||
}
|
||||
|
||||
String [] tables = manager.listTables();
|
||||
if (null == tables) {
|
||||
System.err.println("Could not retrieve tables list from server");
|
||||
LOG.error("manager.listTables() returned null");
|
||||
return 1;
|
||||
} else {
|
||||
for (String tableName : tables) {
|
||||
importTable(options, tableName, hiveImport);
|
||||
}
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running import job: " + ioe.toString());
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} catch (ImportException ie) {
|
||||
LOG.error("Error during import: " + ie.toString());
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ie);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
371
src/java/org/apache/hadoop/sqoop/tool/ImportTool.java
Normal file
371
src/java/org/apache/hadoop/sqoop/tool/ImportTool.java
Normal file
@ -0,0 +1,371 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that performs database imports to HDFS.
|
||||
*/
|
||||
public class ImportTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(ImportTool.class.getName());
|
||||
|
||||
private CodeGenTool codeGenerator;
|
||||
|
||||
// true if this is an all-tables import. Set by a subclass which
|
||||
// overrides the run() method of this tool (which can only do
|
||||
// a single table).
|
||||
private boolean allTables;
|
||||
|
||||
public ImportTool() {
|
||||
this("import", false);
|
||||
}
|
||||
|
||||
public ImportTool(String toolName, boolean allTables) {
|
||||
super(toolName);
|
||||
this.codeGenerator = new CodeGenTool();
|
||||
this.allTables = allTables;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean init(SqoopOptions sqoopOpts) {
|
||||
boolean ret = super.init(sqoopOpts);
|
||||
codeGenerator.setManager(manager);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of jar files generated as part of this import process
|
||||
*/
|
||||
public List<String> getGeneratedJarFiles() {
|
||||
return this.codeGenerator.getGeneratedJarFiles();
|
||||
}
|
||||
|
||||
protected void importTable(SqoopOptions options, String tableName,
|
||||
HiveImport hiveImport) throws IOException, ImportException {
|
||||
String jarFile = null;
|
||||
|
||||
// Generate the ORM code for the tables.
|
||||
jarFile = codeGenerator.generateORM(options, tableName);
|
||||
|
||||
// Do the actual import.
|
||||
ImportJobContext context = new ImportJobContext(tableName, jarFile, options);
|
||||
manager.importTable(context);
|
||||
|
||||
// If the user wants this table to be in Hive, perform that post-load.
|
||||
if (options.doHiveImport()) {
|
||||
hiveImport.importTable(tableName, options.getHiveTableName(), false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
HiveImport hiveImport = null;
|
||||
|
||||
if (allTables) {
|
||||
// We got into this method, but we should be in a subclass.
|
||||
// (This method only handles a single table)
|
||||
// This should not be reached, but for sanity's sake, test here.
|
||||
LOG.error("ImportTool.run() can only handle a single table.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
codeGenerator.setManager(manager);
|
||||
|
||||
try {
|
||||
if (options.doHiveImport()) {
|
||||
hiveImport = new HiveImport(options, manager, options.getConf(), false);
|
||||
}
|
||||
|
||||
// Import a single table the user specified.
|
||||
importTable(options, options.getTableName(), hiveImport);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Encountered IOException running import job: " + ioe.toString());
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ioe);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} catch (ImportException ie) {
|
||||
LOG.error("Error during import: " + ie.toString());
|
||||
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
|
||||
throw new RuntimeException(ie);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the set of options that control imports, either of one
|
||||
* table or a batch of tables.
|
||||
* @return the RelatedOptions that can be used to parse the import
|
||||
* arguments.
|
||||
*/
|
||||
protected RelatedOptions getImportOptions() {
|
||||
// Imports
|
||||
RelatedOptions importOpts = new RelatedOptions("Import control arguments");
|
||||
|
||||
importOpts.addOption(OptionBuilder
|
||||
.withDescription("Use direct import fast path")
|
||||
.withLongOpt(DIRECT_ARG)
|
||||
.create());
|
||||
|
||||
if (!allTables) {
|
||||
importOpts.addOption(OptionBuilder.withArgName("table-name")
|
||||
.hasArg().withDescription("Table to read")
|
||||
.withLongOpt(TABLE_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder.withArgName("col,col,col...")
|
||||
.hasArg().withDescription("Columns to import from table")
|
||||
.withLongOpt(COLUMNS_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder.withArgName("column-name")
|
||||
.hasArg()
|
||||
.withDescription("Column of the table used to split work units")
|
||||
.withLongOpt(SPLIT_BY_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder.withArgName("where clause")
|
||||
.hasArg().withDescription("WHERE clause to use during import")
|
||||
.withLongOpt(WHERE_ARG)
|
||||
.create());
|
||||
}
|
||||
|
||||
importOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg().withDescription("HDFS parent for table destination")
|
||||
.withLongOpt(WAREHOUSE_DIR_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder
|
||||
.withDescription("Imports data to SequenceFiles")
|
||||
.withLongOpt(FMT_SEQUENCEFILE_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder
|
||||
.withDescription("Imports data as plain text (default)")
|
||||
.withLongOpt(FMT_TEXTFILE_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder.withArgName("n")
|
||||
.hasArg().withDescription("Use 'n' map tasks to import in parallel")
|
||||
.withLongOpt(NUM_MAPPERS_ARG)
|
||||
.create(NUM_MAPPERS_SHORT_ARG));
|
||||
importOpts.addOption(OptionBuilder
|
||||
.withDescription("Enable compression")
|
||||
.withLongOpt(COMPRESS_ARG)
|
||||
.create(COMPRESS_SHORT_ARG));
|
||||
importOpts.addOption(OptionBuilder.withArgName("n")
|
||||
.hasArg()
|
||||
.withDescription("Split the input stream every 'n' bytes "
|
||||
+ "when importing in direct mode")
|
||||
.withLongOpt(DIRECT_SPLIT_SIZE_ARG)
|
||||
.create());
|
||||
importOpts.addOption(OptionBuilder.withArgName("n")
|
||||
.hasArg()
|
||||
.withDescription("Set the maximum size for an inline LOB")
|
||||
.withLongOpt(INLINE_LOB_LIMIT_ARG)
|
||||
.create());
|
||||
|
||||
return importOpts;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
toolOptions.addUniqueOptions(getImportOptions());
|
||||
toolOptions.addUniqueOptions(getOutputFormatOptions());
|
||||
toolOptions.addUniqueOptions(getInputFormatOptions());
|
||||
toolOptions.addUniqueOptions(getHiveOptions(true));
|
||||
|
||||
// get common codegen opts.
|
||||
RelatedOptions codeGenOpts = getCodeGenOpts(allTables);
|
||||
|
||||
// add import-specific codegen opts:
|
||||
codeGenOpts.addOption(OptionBuilder.withArgName("file")
|
||||
.hasArg()
|
||||
.withDescription("Disable code generation; use specified jar")
|
||||
.withLongOpt(JAR_FILE_NAME_ARG)
|
||||
.create());
|
||||
|
||||
toolOptions.addUniqueOptions(codeGenOpts);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void printHelp(ToolOptions toolOptions) {
|
||||
super.printHelp(toolOptions);
|
||||
System.out.println("");
|
||||
if (allTables) {
|
||||
System.out.println("At minimum, you must specify --connect");
|
||||
} else {
|
||||
System.out.println(
|
||||
"At minimum, you must specify --connect and --table");
|
||||
}
|
||||
|
||||
System.out.println(
|
||||
"Arguments to mysqldump and other subprograms may be supplied");
|
||||
System.out.println(
|
||||
"after a '--' on the command line.");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
try {
|
||||
applyCommonOptions(in, out);
|
||||
|
||||
if (in.hasOption(DIRECT_ARG)) {
|
||||
out.setDirectMode(true);
|
||||
}
|
||||
|
||||
if (!allTables) {
|
||||
if (in.hasOption(TABLE_ARG)) {
|
||||
out.setTableName(in.getOptionValue(TABLE_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(COLUMNS_ARG)) {
|
||||
out.setColumns(in.getOptionValue(COLUMNS_ARG).split(","));
|
||||
}
|
||||
|
||||
if (in.hasOption(SPLIT_BY_ARG)) {
|
||||
out.setSplitByCol(in.getOptionValue(SPLIT_BY_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(WHERE_ARG)) {
|
||||
out.setWhereClause(in.getOptionValue(WHERE_ARG));
|
||||
}
|
||||
}
|
||||
|
||||
if (in.hasOption(WAREHOUSE_DIR_ARG)) {
|
||||
out.setWarehouseDir(in.getOptionValue(WAREHOUSE_DIR_ARG));
|
||||
}
|
||||
|
||||
if (in.hasOption(FMT_SEQUENCEFILE_ARG)) {
|
||||
out.setFileLayout(SqoopOptions.FileLayout.SequenceFile);
|
||||
}
|
||||
|
||||
if (in.hasOption(FMT_TEXTFILE_ARG)) {
|
||||
out.setFileLayout(SqoopOptions.FileLayout.TextFile);
|
||||
}
|
||||
|
||||
if (in.hasOption(NUM_MAPPERS_ARG)) {
|
||||
out.setNumMappers(Integer.parseInt(in.getOptionValue(NUM_MAPPERS_ARG)));
|
||||
}
|
||||
|
||||
if (in.hasOption(COMPRESS_ARG)) {
|
||||
out.setUseCompression(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(DIRECT_SPLIT_SIZE_ARG)) {
|
||||
out.setDirectSplitSize(Long.parseLong(in.getOptionValue(
|
||||
DIRECT_SPLIT_SIZE_ARG)));
|
||||
}
|
||||
|
||||
if (in.hasOption(INLINE_LOB_LIMIT_ARG)) {
|
||||
out.setInlineLobLimit(Long.parseLong(in.getOptionValue(
|
||||
INLINE_LOB_LIMIT_ARG)));
|
||||
}
|
||||
|
||||
if (in.hasOption(JAR_FILE_NAME_ARG)) {
|
||||
out.setExistingJarName(in.getOptionValue(JAR_FILE_NAME_ARG));
|
||||
}
|
||||
|
||||
applyHiveOptions(in, out);
|
||||
applyOutputFormatOptions(in, out);
|
||||
applyInputFormatOptions(in, out);
|
||||
applyCodeGenOptions(in, out, allTables);
|
||||
} catch (NumberFormatException nfe) {
|
||||
throw new InvalidOptionsException("Error: expected numeric argument.\n"
|
||||
+ "Try --help for usage.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate import-specific arguments.
|
||||
* @param options the configured SqoopOptions to check
|
||||
*/
|
||||
protected void validateImportOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
if (!allTables && options.getTableName() == null) {
|
||||
throw new InvalidOptionsException(
|
||||
"--table is required for import. (Or use sqoop import-all-tables.)"
|
||||
+ HELP_STR);
|
||||
} else if (options.getExistingJarName() != null
|
||||
&& options.getClassName() == null) {
|
||||
throw new InvalidOptionsException("Jar specified with --jar-file, but no "
|
||||
+ "class specified with --class-name." + HELP_STR);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
// If extraArguments is full, check for '--' followed by args for
|
||||
// mysqldump or other commands we rely on.
|
||||
options.setExtraArgs(getSubcommandArgs(extraArguments));
|
||||
int dashPos = extraArguments.length;
|
||||
for (int i = 0; i < extraArguments.length; i++) {
|
||||
if (extraArguments[i].equals("--")) {
|
||||
dashPos = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasUnrecognizedArgs(extraArguments, 0, dashPos)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
|
||||
validateImportOptions(options);
|
||||
validateCommonOptions(options);
|
||||
validateCodeGenOptions(options);
|
||||
validateOutputFormatOptions(options);
|
||||
}
|
||||
}
|
||||
|
91
src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java
Normal file
91
src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java
Normal file
@ -0,0 +1,91 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
|
||||
/**
|
||||
* Tool that lists available databases on a server
|
||||
*/
|
||||
public class ListDatabasesTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(
|
||||
ListDatabasesTool.class.getName());
|
||||
|
||||
public ListDatabasesTool() {
|
||||
super("list-databases");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
String [] databases = manager.listDatabases();
|
||||
if (null == databases) {
|
||||
System.err.println("Could not retrieve database list from server");
|
||||
LOG.error("manager.listDatabases() returned null");
|
||||
return 1;
|
||||
} else {
|
||||
for (String db : databases) {
|
||||
System.out.println(db);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
applyCommonOptions(in, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
|
||||
if (hasUnrecognizedArgs(extraArguments)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
validateCommonOptions(options);
|
||||
}
|
||||
}
|
||||
|
91
src/java/org/apache/hadoop/sqoop/tool/ListTablesTool.java
Normal file
91
src/java/org/apache/hadoop/sqoop/tool/ListTablesTool.java
Normal file
@ -0,0 +1,91 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
|
||||
/**
|
||||
* Tool that lists available tables in a database.
|
||||
*/
|
||||
public class ListTablesTool extends BaseSqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(
|
||||
ListTablesTool.class.getName());
|
||||
|
||||
public ListTablesTool() {
|
||||
super("list-tables");
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public int run(SqoopOptions options) {
|
||||
if (!init(options)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
String [] tables = manager.listTables();
|
||||
if (null == tables) {
|
||||
System.err.println("Could not retrieve tables list from server");
|
||||
LOG.error("manager.listTables() returned null");
|
||||
return 1;
|
||||
} else {
|
||||
for (String tbl : tables) {
|
||||
System.out.println(tbl);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
destroy(options);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Configure the command-line arguments we expect to receive */
|
||||
public void configureOptions(ToolOptions toolOptions) {
|
||||
toolOptions.addUniqueOptions(getCommonOptions());
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
applyCommonOptions(in, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** {@inheritDoc} */
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
if (hasUnrecognizedArgs(extraArguments)) {
|
||||
throw new InvalidOptionsException(HELP_STR);
|
||||
}
|
||||
|
||||
validateCommonOptions(options);
|
||||
}
|
||||
}
|
||||
|
287
src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java
Normal file
287
src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java
Normal file
@ -0,0 +1,287 @@
|
||||
/**
|
||||
* Licensed to Cloudera, Inc. under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Cloudera, Inc. licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.GenericOptionsParser;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.SqoopParser;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
|
||||
/**
|
||||
* Base class for Sqoop subprograms (e.g., SqoopImport, SqoopExport, etc.)
|
||||
* Allows subprograms to configure the arguments they accept and
|
||||
* provides an entry-point to the subprogram.
|
||||
*/
|
||||
public abstract class SqoopTool {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(SqoopTool.class.getName());
|
||||
|
||||
private static final Map<String, Class<? extends SqoopTool>> TOOLS;
|
||||
private static final Map<String, String> DESCRIPTIONS;
|
||||
|
||||
static {
|
||||
// All SqoopTool instances should be registered here so that
|
||||
// they can be found internally.
|
||||
TOOLS = new TreeMap<String, Class<? extends SqoopTool>>();
|
||||
DESCRIPTIONS = new TreeMap<String, String>();
|
||||
|
||||
registerTool("codegen", CodeGenTool.class,
|
||||
"Generate code to interact with database records");
|
||||
registerTool("create-hive-table", CreateHiveTableTool.class,
|
||||
"Import a table definition into Hive");
|
||||
registerTool("eval", EvalSqlTool.class,
|
||||
"Evaluate a SQL statement and display the results");
|
||||
registerTool("export", ExportTool.class,
|
||||
"Export an HDFS directory to a database table");
|
||||
registerTool("import", ImportTool.class,
|
||||
"Import a table from a database to HDFS");
|
||||
registerTool("import-all-tables", ImportAllTablesTool.class,
|
||||
"Import tables from a database to HDFS");
|
||||
registerTool("help", HelpTool.class, "List available commands");
|
||||
registerTool("list-databases", ListDatabasesTool.class,
|
||||
"List available databases on a server");
|
||||
registerTool("list-tables", ListTablesTool.class,
|
||||
"List available tables in a database");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a tool to the available set of SqoopTool instances.
|
||||
* @param toolName the name the user access the tool through.
|
||||
* @param cls the class providing the tool.
|
||||
* @param description a user-friendly description of the tool's function.
|
||||
*/
|
||||
private static final void registerTool(String toolName,
|
||||
Class<? extends SqoopTool> cls, String description) {
|
||||
TOOLS.put(toolName, cls);
|
||||
DESCRIPTIONS.put(toolName, description);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the list of available tools.
|
||||
*/
|
||||
public static final Set<String> getToolNames() {
|
||||
return TOOLS.keySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the SqoopTool instance with the provided name, or null
|
||||
* if no such tool exists.
|
||||
*/
|
||||
public static final SqoopTool getTool(String toolName) {
|
||||
Class<? extends SqoopTool> cls = TOOLS.get(toolName);
|
||||
try {
|
||||
if (null != cls) {
|
||||
SqoopTool tool = cls.newInstance();
|
||||
tool.setToolName(toolName);
|
||||
return tool;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.error(StringUtils.stringifyException(e));
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the user-friendly description for a tool, or null if the tool
|
||||
* cannot be found.
|
||||
*/
|
||||
public static final String getToolDescription(String toolName) {
|
||||
return DESCRIPTIONS.get(toolName);
|
||||
}
|
||||
|
||||
/** The name of the current tool. */
|
||||
private String toolName;
|
||||
|
||||
/** Arguments that remained unparsed after parseArguments. */
|
||||
protected String [] extraArguments;
|
||||
|
||||
public SqoopTool() {
|
||||
this.toolName = "<" + this.getClass().getName() + ">";
|
||||
}
|
||||
|
||||
public SqoopTool(String name) {
|
||||
this.toolName = name;
|
||||
}
|
||||
|
||||
public String getToolName() {
|
||||
return this.toolName;
|
||||
}
|
||||
|
||||
protected void setToolName(String name) {
|
||||
this.toolName = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main body of code to run the tool.
|
||||
* @param options the SqoopOptions configured via
|
||||
* configureOptions()/applyOptions().
|
||||
* @return an integer return code for external programs to consume. 0
|
||||
* represents success; nonzero means failure.
|
||||
*/
|
||||
public abstract int run(SqoopOptions options);
|
||||
|
||||
/**
|
||||
* Configure the command-line arguments we expect to receive.
|
||||
* @param opts a ToolOptions that should be populated with sets of
|
||||
* RelatedOptions for the tool.
|
||||
*/
|
||||
public void configureOptions(ToolOptions opts) {
|
||||
// Default implementation does nothing.
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help message for this tool.
|
||||
* @param opts the configured tool options
|
||||
*/
|
||||
public void printHelp(ToolOptions opts) {
|
||||
System.out.println("usage: sqoop " + getToolName() + " [GENERIC-ARGS] [TOOL-ARGS]");
|
||||
System.out.println("");
|
||||
|
||||
opts.printHelp();
|
||||
|
||||
System.out.println("");
|
||||
System.out.println("Generic Hadoop command-line arguments:");
|
||||
System.out.println("(must preceed any tool-specific arguments)");
|
||||
ToolRunner.printGenericCommandUsage(System.out);
|
||||
}
|
||||
|
||||
/** Generate the SqoopOptions containing actual argument values from
|
||||
* the extracted CommandLine arguments.
|
||||
* @param in the CLI CommandLine that contain the user's set Options.
|
||||
* @param out the SqoopOptions with all fields applied.
|
||||
* @throws InvalidOptionsException if there's a problem.
|
||||
*/
|
||||
public void applyOptions(CommandLine in, SqoopOptions out)
|
||||
throws InvalidOptionsException {
|
||||
// Default implementation does nothing.
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates options and ensures that any required options are
|
||||
* present and that any mutually-exclusive options are not selected.
|
||||
* @throws InvalidOptionsException if there's a problem.
|
||||
*/
|
||||
public void validateOptions(SqoopOptions options)
|
||||
throws InvalidOptionsException {
|
||||
// Default implementation does nothing.
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures a SqoopOptions according to the specified arguments.
|
||||
* Reads a set of arguments and uses them to configure a SqoopOptions
|
||||
* and its embedded configuration (i.e., through GenericOptionsParser.)
|
||||
* Stores any unparsed arguments in the extraArguments field.
|
||||
*
|
||||
* @param args the arguments to parse.
|
||||
* @param conf if non-null, set as the configuration for the returned
|
||||
* SqoopOptions.
|
||||
* @param in a (perhaps partially-configured) SqoopOptions. If null,
|
||||
* then a new SqoopOptions will be used. If this has a null configuration
|
||||
* and conf is null, then a new Configuration will be inserted in this.
|
||||
* @param useGenericOptions if true, will also parse generic Hadoop
|
||||
* options into the Configuration.
|
||||
* @return a SqoopOptions that is fully configured by a given tool.
|
||||
*/
|
||||
public SqoopOptions parseArguments(String [] args,
|
||||
Configuration conf, SqoopOptions in, boolean useGenericOptions)
|
||||
throws ParseException, SqoopOptions.InvalidOptionsException {
|
||||
SqoopOptions out = in;
|
||||
|
||||
if (null == out) {
|
||||
out = new SqoopOptions();
|
||||
}
|
||||
|
||||
if (null != conf) {
|
||||
// User specified a configuration; use it and override any conf
|
||||
// that may have been in the SqoopOptions.
|
||||
out.setConf(conf);
|
||||
} else if (null == out.getConf()) {
|
||||
// User did not specify a configuration, but neither did the
|
||||
// SqoopOptions. Fabricate a new one.
|
||||
out.setConf(new Configuration());
|
||||
}
|
||||
|
||||
String [] toolArgs = args; // args after generic parser is done.
|
||||
if (useGenericOptions) {
|
||||
try {
|
||||
toolArgs = ShimLoader.getHadoopShim().parseGenericOptions(
|
||||
out.getConf(), args);
|
||||
} catch (IOException ioe) {
|
||||
ParseException pe = new ParseException(
|
||||
"Could not parse generic arguments");
|
||||
pe.initCause(ioe);
|
||||
throw pe;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse tool-specific arguments.
|
||||
ToolOptions toolOptions = new ToolOptions();
|
||||
configureOptions(toolOptions);
|
||||
CommandLineParser parser = new SqoopParser();
|
||||
CommandLine cmdLine = parser.parse(toolOptions.merge(), toolArgs, true);
|
||||
applyOptions(cmdLine, out);
|
||||
this.extraArguments = cmdLine.getArgs();
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append 'extra' to extraArguments.
|
||||
*/
|
||||
public void appendArgs(String [] extra) {
|
||||
int existingLen =
|
||||
(this.extraArguments == null) ? 0 : this.extraArguments.length;
|
||||
int newLen = (extra == null) ? 0 : extra.length;
|
||||
String [] newExtra = new String[existingLen + newLen];
|
||||
|
||||
if (null != this.extraArguments) {
|
||||
System.arraycopy(this.extraArguments, 0, newExtra, 0, existingLen);
|
||||
}
|
||||
|
||||
if (null != extra) {
|
||||
System.arraycopy(extra, 0, newExtra, existingLen, newLen);
|
||||
}
|
||||
|
||||
this.extraArguments = newExtra;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getToolName();
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ private ClassLoaderStack() {
|
||||
* Sets the classloader for the current thread
|
||||
*/
|
||||
public static void setCurrentClassLoader(ClassLoader cl) {
|
||||
LOG.info("Restoring classloader: " + cl.toString());
|
||||
LOG.debug("Restoring classloader: " + cl.toString());
|
||||
Thread.currentThread().setContextClassLoader(cl);
|
||||
}
|
||||
|
||||
@ -72,13 +72,13 @@ public static ClassLoader addJarFile(String jarFile, String testClassName)
|
||||
LOG.debug("Testing class in jar: " + testClassName);
|
||||
Class.forName(testClassName, true, cl);
|
||||
}
|
||||
LOG.info("Loaded jar into current JVM: " + urlPath);
|
||||
LOG.debug("Loaded jar into current JVM: " + urlPath);
|
||||
} catch (ClassNotFoundException cnfe) {
|
||||
throw new IOException("Could not load jar " + jarFile + " into JVM. (Could not find class "
|
||||
+ testClassName + ".)", cnfe);
|
||||
}
|
||||
|
||||
LOG.info("Added classloader for jar " + jarFile + ": " + cl);
|
||||
LOG.debug("Added classloader for jar " + jarFile + ": " + cl);
|
||||
Thread.currentThread().setContextClassLoader(cl);
|
||||
return prevClassLoader;
|
||||
}
|
||||
|
@ -19,20 +19,17 @@
|
||||
package org.apache.hadoop.sqoop.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.CharBuffer;
|
||||
import java.io.PrintWriter;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Utility methods to format and print ResultSet objects
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class ResultSetPrinter {
|
||||
|
||||
@ -41,21 +38,6 @@ public class ResultSetPrinter {
|
||||
// max output width to allocate to any column of the printed results.
|
||||
private static final int MAX_COL_WIDTH = 20;
|
||||
|
||||
// length of the byte buffer, in bytes, to allocate.
|
||||
private static final int BUFFER_LEN = 4096;
|
||||
|
||||
// maximum number of characters to deserialize from the stringbuilder
|
||||
// into the byte buffer at a time. Factor of 2 off b/c of Unicode.
|
||||
private static final int MAX_CHARS = 2048;
|
||||
|
||||
private ByteBuffer bytebuf;
|
||||
private char [] charArray;
|
||||
|
||||
public ResultSetPrinter() {
|
||||
bytebuf = ByteBuffer.allocate(BUFFER_LEN);
|
||||
charArray = new char[MAX_CHARS];
|
||||
}
|
||||
|
||||
/**
|
||||
* Print 'str' to the string builder, padded to 'width' chars
|
||||
*/
|
||||
@ -74,35 +56,14 @@ private static void printPadded(StringBuilder sb, String str, int width) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Takes the contents of the StringBuilder and prints it on the OutputStream
|
||||
*/
|
||||
private void sendToStream(StringBuilder sb, OutputStream os) throws IOException {
|
||||
|
||||
int pos = 0; // current pos in the string builder
|
||||
int len = sb.length(); // total length (in characters) to send to os.
|
||||
CharBuffer charbuf = bytebuf.asCharBuffer();
|
||||
|
||||
while (pos < len) {
|
||||
int copyLen = Math.min(sb.length(), MAX_CHARS);
|
||||
sb.getChars(pos, copyLen, charArray, 0);
|
||||
|
||||
charbuf.put(charArray, 0, copyLen);
|
||||
os.write(bytebuf.array());
|
||||
|
||||
pos += copyLen;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final String COL_SEPARATOR = " | ";
|
||||
|
||||
/**
|
||||
* Format the contents of the ResultSet into something that could be printed
|
||||
* neatly; the results are appended to the supplied StringBuilder.
|
||||
*/
|
||||
public final void printResultSet(OutputStream os, ResultSet results) throws IOException {
|
||||
public final void printResultSet(PrintWriter pw, ResultSet results)
|
||||
throws IOException {
|
||||
try {
|
||||
StringBuilder sbNames = new StringBuilder();
|
||||
int cols = results.getMetaData().getColumnCount();
|
||||
@ -128,9 +89,9 @@ public final void printResultSet(OutputStream os, ResultSet results) throws IOEx
|
||||
}
|
||||
sbPad.append('\n');
|
||||
|
||||
sendToStream(sbPad, os);
|
||||
sendToStream(sbNames, os);
|
||||
sendToStream(sbPad, os);
|
||||
pw.print(sbPad.toString());
|
||||
pw.print(sbNames.toString());
|
||||
pw.print(sbPad.toString());
|
||||
|
||||
while (results.next()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
@ -139,12 +100,13 @@ public final void printResultSet(OutputStream os, ResultSet results) throws IOEx
|
||||
sb.append(COL_SEPARATOR);
|
||||
}
|
||||
sb.append('\n');
|
||||
sendToStream(sb, os);
|
||||
pw.print(sb.toString());
|
||||
}
|
||||
|
||||
sendToStream(sbPad, os);
|
||||
pw.print(sbPad.toString());
|
||||
} catch (SQLException sqlException) {
|
||||
LOG.error("Error reading from database: " + sqlException.toString());
|
||||
LOG.error("Error reading from database: "
|
||||
+ StringUtils.stringifyException(sqlException));
|
||||
}
|
||||
}
|
||||
|
||||
|
63
src/scripts/create-tool-scripts.sh
Executable file
63
src/scripts/create-tool-scripts.sh
Executable file
@ -0,0 +1,63 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Licensed to Cloudera, Inc. under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Arguments to this script:
|
||||
# The output directory for the result scripts
|
||||
# The path to the script template file.
|
||||
# This script should be run in the base directory of the project.
|
||||
# It will run 'bin/sqoop help' and determine the names of all subprograms,
|
||||
# and then generate wrapper scripts for each of these.
|
||||
|
||||
bin=`dirname $0`
|
||||
bin=`cd ${bin} && pwd`
|
||||
|
||||
outdir=$1
|
||||
template=$2
|
||||
|
||||
if [ -z "$outdir" ]; then
|
||||
echo "Output directory is required as the first argument."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$template" ]; then
|
||||
echo "The script template filename is required as the second argument."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$outdir" ]; then
|
||||
echo "Output directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$template" ]; then
|
||||
echo "Could not find template file: " $template
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Query 'sqoop help' for the list of available commands.
|
||||
subprograms=`bin/sqoop help | awk ' BEGIN {use=0} /^$/ {use=0} \
|
||||
{ if (use) { print $1 } else { } } /^Available/ {use=1} '`
|
||||
|
||||
# For each of these, copy the template into place and sed the
|
||||
# subcommand's name in.
|
||||
for p in $subprograms; do
|
||||
target="${outdir}/sqoop-${p}"
|
||||
cp "${template}" "${target}"
|
||||
chmod +x "${target}"
|
||||
sed -i -e "s/SUBCOMMANDMARKER/$p/" "${target}"
|
||||
done
|
||||
|
22
src/scripts/tool-script.sh.template
Normal file
22
src/scripts/tool-script.sh.template
Normal file
@ -0,0 +1,22 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Licensed to Cloudera, Inc. under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
prgm=`readlink -f $0`
|
||||
bin=`dirname ${prgm}`
|
||||
bin=`cd ${bin} && pwd`
|
||||
|
||||
exec ${bin}/sqoop SUBCOMMANDMARKER "$@"
|
@ -18,7 +18,11 @@
|
||||
|
||||
package org.apache.hadoop.sqoop.shims;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
|
||||
import org.apache.hadoop.util.GenericOptionsParser;
|
||||
|
||||
/**
|
||||
* Contains code which belongs in all Hadoop shims which is syntactically
|
||||
@ -27,6 +31,17 @@
|
||||
*/
|
||||
public abstract class CommonHadoopShim extends HadoopShim {
|
||||
|
||||
@Override
|
||||
public String [] parseGenericOptions(Configuration conf, String [] args)
|
||||
throws IOException {
|
||||
// This needs to be shimmed because in Apache Hadoop this can throw
|
||||
// an IOException, but it does not do so in CDH. We just mandate in
|
||||
// this method that an IOException is possible.
|
||||
GenericOptionsParser genericParser = new GenericOptionsParser(
|
||||
conf, args);
|
||||
return genericParser.getRemainingArgs();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDbInputClassProperty() {
|
||||
return DBConfiguration.INPUT_CLASS_PROPERTY;
|
||||
|
@ -34,6 +34,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.CommonArgs;
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.tool.ImportAllTablesTool;
|
||||
|
||||
/**
|
||||
* Test the --all-tables functionality that can import multiple tables.
|
||||
@ -51,7 +52,6 @@ public class TestAllTables extends ImportJobTestCase {
|
||||
CommonArgs.addHadoopFlags(args);
|
||||
}
|
||||
|
||||
args.add("--all-tables");
|
||||
args.add("--warehouse-dir");
|
||||
args.add(getWarehouseDir());
|
||||
args.add("--connect");
|
||||
@ -108,7 +108,7 @@ public void setUp() {
|
||||
|
||||
public void testMultiTableImport() throws IOException {
|
||||
String [] argv = getArgv(true);
|
||||
runImport(argv);
|
||||
runImport(new ImportAllTablesTool(), argv);
|
||||
|
||||
Path warehousePath = new Path(this.getWarehouseDir());
|
||||
int i = 0;
|
||||
|
@ -46,6 +46,7 @@
|
||||
import org.apache.hadoop.sqoop.lib.RecordParser;
|
||||
import org.apache.hadoop.sqoop.lib.SqoopRecord;
|
||||
import org.apache.hadoop.sqoop.testutil.ExportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.tool.CodeGenTool;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
|
||||
import org.junit.Before;
|
||||
@ -70,6 +71,31 @@ public void setUp() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return an argv for the CodeGenTool to use when creating tables to export.
|
||||
*/
|
||||
protected String [] getCodeGenArgv(String... extraArgs) {
|
||||
List<String> codeGenArgv = new ArrayList<String>();
|
||||
|
||||
if (null != extraArgs) {
|
||||
for (String arg : extraArgs) {
|
||||
codeGenArgv.add(arg);
|
||||
}
|
||||
}
|
||||
|
||||
codeGenArgv.add("--table");
|
||||
codeGenArgv.add(getTableName());
|
||||
codeGenArgv.add("--connect");
|
||||
codeGenArgv.add(getConnectString());
|
||||
codeGenArgv.add("--fields-terminated-by");
|
||||
codeGenArgv.add("\\t");
|
||||
codeGenArgv.add("--lines-terminated-by");
|
||||
codeGenArgv.add("\\n");
|
||||
|
||||
return codeGenArgv.toArray(new String[0]);
|
||||
}
|
||||
|
||||
|
||||
private String getRecordLine(int recordNum, ColumnGenerator... extraCols) {
|
||||
String idStr = Integer.toString(recordNum);
|
||||
StringBuilder sb = new StringBuilder();
|
||||
@ -429,9 +455,8 @@ public void testMultiMapTextExport() throws IOException, SQLException {
|
||||
verifyExport(RECORDS_PER_MAP * NUM_FILES);
|
||||
}
|
||||
|
||||
|
||||
/** Export some rows from a SequenceFile, make sure they import correctly */
|
||||
public void testSequenceFileExport() throws IOException, SQLException {
|
||||
public void testSequenceFileExport() throws Exception {
|
||||
|
||||
final int TOTAL_RECORDS = 10;
|
||||
|
||||
@ -439,7 +464,14 @@ public void testSequenceFileExport() throws IOException, SQLException {
|
||||
LOG.info("Creating initial schema for SeqFile test");
|
||||
createTable();
|
||||
LOG.info("Generating code...");
|
||||
List<String> generatedJars = runExport(getArgv(true, "--generate-only"));
|
||||
CodeGenTool codeGen = new CodeGenTool();
|
||||
String [] codeGenArgs = getCodeGenArgv();
|
||||
SqoopOptions options = codeGen.parseArguments(
|
||||
codeGenArgs, null, null, true);
|
||||
codeGen.validateOptions(options);
|
||||
int ret = codeGen.run(options);
|
||||
assertEquals(0, ret);
|
||||
List<String> generatedJars = codeGen.getGeneratedJarFiles();
|
||||
|
||||
// Now, wipe the created table so we can export on top of it again.
|
||||
LOG.info("Resetting schema and data...");
|
||||
|
@ -22,6 +22,7 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
@ -37,6 +38,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.testutil.SeqFileReader;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
|
||||
/**
|
||||
@ -120,8 +122,10 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
|
||||
String [] argv = getArgv(true, columns, splitByCol);
|
||||
runImport(argv);
|
||||
try {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(getArgv(false, columns, splitByCol));
|
||||
ImportTool importTool = new ImportTool();
|
||||
SqoopOptions opts = importTool.parseArguments(
|
||||
getArgv(false, columns, splitByCol),
|
||||
null, null, true);
|
||||
|
||||
CompilationManager compileMgr = new CompilationManager(opts);
|
||||
String jarFileName = compileMgr.getJarFilename();
|
||||
@ -162,6 +166,8 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
|
||||
assertEquals("Total sum of first db column mismatch", expectedSum, curSum);
|
||||
} catch (InvalidOptionsException ioe) {
|
||||
fail(ioe.toString());
|
||||
} catch (ParseException pe) {
|
||||
fail(pe.toString());
|
||||
} finally {
|
||||
IOUtils.closeStream(reader);
|
||||
|
||||
|
@ -21,6 +21,7 @@
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
@ -32,6 +33,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.testutil.SeqFileReader;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
|
||||
/**
|
||||
@ -99,11 +101,13 @@ public void runSplitByTest(String splitByCol, int expectedSum)
|
||||
String [] argv = getArgv(true, columns, splitByCol);
|
||||
runImport(argv);
|
||||
try {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(getArgv(false, columns, splitByCol));
|
||||
SqoopOptions opts = new ImportTool().parseArguments(
|
||||
getArgv(false, columns, splitByCol),
|
||||
null, null, true);
|
||||
|
||||
CompilationManager compileMgr = new CompilationManager(opts);
|
||||
String jarFileName = compileMgr.getJarFilename();
|
||||
LOG.debug("Got jar from import job: " + jarFileName);
|
||||
|
||||
prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName());
|
||||
|
||||
@ -130,6 +134,8 @@ public void runSplitByTest(String splitByCol, int expectedSum)
|
||||
assertEquals("Total sum of first db column mismatch", expectedSum, curSum);
|
||||
} catch (InvalidOptionsException ioe) {
|
||||
fail(ioe.toString());
|
||||
} catch (ParseException pe) {
|
||||
fail(pe.toString());
|
||||
} finally {
|
||||
IOUtils.closeStream(reader);
|
||||
|
||||
|
@ -20,6 +20,8 @@
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
|
||||
|
||||
/**
|
||||
* Test aspects of the SqoopOptions class
|
||||
@ -27,11 +29,11 @@
|
||||
public class TestSqoopOptions extends TestCase {
|
||||
|
||||
// tests for the toChar() parser
|
||||
public void testNormalChar() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testNormalChar() throws Exception {
|
||||
assertEquals('a', SqoopOptions.toChar("a"));
|
||||
}
|
||||
|
||||
public void testEmptyString() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEmptyString() throws Exception {
|
||||
try {
|
||||
SqoopOptions.toChar("");
|
||||
fail("Expected exception");
|
||||
@ -40,7 +42,7 @@ public void testEmptyString() throws SqoopOptions.InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
public void testNullString() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testNullString() throws Exception {
|
||||
try {
|
||||
SqoopOptions.toChar(null);
|
||||
fail("Expected exception");
|
||||
@ -49,40 +51,46 @@ public void testNullString() throws SqoopOptions.InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
public void testTooLong() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testTooLong() throws Exception {
|
||||
// Should just use the first character and log a warning.
|
||||
assertEquals('x', SqoopOptions.toChar("xyz"));
|
||||
}
|
||||
|
||||
public void testHexChar1() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testHexChar1() throws Exception {
|
||||
assertEquals(0xF, SqoopOptions.toChar("\\0xf"));
|
||||
}
|
||||
|
||||
public void testHexChar2() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testHexChar2() throws Exception {
|
||||
assertEquals(0xF, SqoopOptions.toChar("\\0xF"));
|
||||
}
|
||||
|
||||
public void testHexChar3() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testHexChar3() throws Exception {
|
||||
assertEquals(0xF0, SqoopOptions.toChar("\\0xf0"));
|
||||
}
|
||||
|
||||
public void testHexChar4() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testHexChar4() throws Exception {
|
||||
assertEquals(0xF0, SqoopOptions.toChar("\\0Xf0"));
|
||||
}
|
||||
|
||||
public void testEscapeChar1() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeChar1() throws Exception {
|
||||
assertEquals('\n', SqoopOptions.toChar("\\n"));
|
||||
}
|
||||
|
||||
public void testEscapeChar2() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeChar2() throws Exception {
|
||||
assertEquals('\\', SqoopOptions.toChar("\\\\"));
|
||||
}
|
||||
|
||||
public void testEscapeChar3() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeChar3() throws Exception {
|
||||
assertEquals('\\', SqoopOptions.toChar("\\"));
|
||||
}
|
||||
|
||||
public void testUnknownEscape1() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testWhitespaceToChar() throws Exception {
|
||||
assertEquals(' ', SqoopOptions.toChar(" "));
|
||||
assertEquals(' ', SqoopOptions.toChar(" "));
|
||||
assertEquals('\t', SqoopOptions.toChar("\t"));
|
||||
}
|
||||
|
||||
public void testUnknownEscape1() throws Exception {
|
||||
try {
|
||||
SqoopOptions.toChar("\\Q");
|
||||
fail("Expected exception");
|
||||
@ -91,7 +99,7 @@ public void testUnknownEscape1() throws SqoopOptions.InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
public void testUnknownEscape2() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testUnknownEscape2() throws Exception {
|
||||
try {
|
||||
SqoopOptions.toChar("\\nn");
|
||||
fail("Expected exception");
|
||||
@ -100,31 +108,31 @@ public void testUnknownEscape2() throws SqoopOptions.InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
public void testEscapeNul1() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeNul1() throws Exception {
|
||||
assertEquals('\000', SqoopOptions.toChar("\\0"));
|
||||
}
|
||||
|
||||
public void testEscapeNul2() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeNul2() throws Exception {
|
||||
assertEquals('\000', SqoopOptions.toChar("\\00"));
|
||||
}
|
||||
|
||||
public void testEscapeNul3() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeNul3() throws Exception {
|
||||
assertEquals('\000', SqoopOptions.toChar("\\0000"));
|
||||
}
|
||||
|
||||
public void testEscapeNul4() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testEscapeNul4() throws Exception {
|
||||
assertEquals('\000', SqoopOptions.toChar("\\0x0"));
|
||||
}
|
||||
|
||||
public void testOctalChar1() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testOctalChar1() throws Exception {
|
||||
assertEquals(04, SqoopOptions.toChar("\\04"));
|
||||
}
|
||||
|
||||
public void testOctalChar2() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testOctalChar2() throws Exception {
|
||||
assertEquals(045, SqoopOptions.toChar("\\045"));
|
||||
}
|
||||
|
||||
public void testErrOctalChar() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testErrOctalChar() throws Exception {
|
||||
try {
|
||||
SqoopOptions.toChar("\\095");
|
||||
fail("Expected exception");
|
||||
@ -133,7 +141,7 @@ public void testErrOctalChar() throws SqoopOptions.InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
public void testErrHexChar() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testErrHexChar() throws Exception {
|
||||
try {
|
||||
SqoopOptions.toChar("\\0x9K5");
|
||||
fail("Expected exception");
|
||||
@ -142,21 +150,25 @@ public void testErrHexChar() throws SqoopOptions.InvalidOptionsException {
|
||||
}
|
||||
}
|
||||
|
||||
private SqoopOptions parse(String [] argv) throws Exception {
|
||||
ImportTool importTool = new ImportTool();
|
||||
return importTool.parseArguments(argv, null, null, false);
|
||||
}
|
||||
|
||||
// test that setting output delimiters also sets input delimiters
|
||||
public void testDelimitersInherit() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testDelimitersInherit() throws Exception {
|
||||
String [] args = {
|
||||
"--fields-terminated-by",
|
||||
"|"
|
||||
};
|
||||
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(args);
|
||||
SqoopOptions opts = parse(args);
|
||||
assertEquals('|', opts.getInputFieldDelim());
|
||||
assertEquals('|', opts.getOutputFieldDelim());
|
||||
}
|
||||
|
||||
// test that setting output delimiters and setting input delims separately works
|
||||
public void testDelimOverride1() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testDelimOverride1() throws Exception {
|
||||
String [] args = {
|
||||
"--fields-terminated-by",
|
||||
"|",
|
||||
@ -164,14 +176,13 @@ public void testDelimOverride1() throws SqoopOptions.InvalidOptionsException {
|
||||
"*"
|
||||
};
|
||||
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(args);
|
||||
SqoopOptions opts = parse(args);
|
||||
assertEquals('*', opts.getInputFieldDelim());
|
||||
assertEquals('|', opts.getOutputFieldDelim());
|
||||
}
|
||||
|
||||
// test that the order in which delims are specified doesn't matter
|
||||
public void testDelimOverride2() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testDelimOverride2() throws Exception {
|
||||
String [] args = {
|
||||
"--input-fields-terminated-by",
|
||||
"*",
|
||||
@ -179,50 +190,46 @@ public void testDelimOverride2() throws SqoopOptions.InvalidOptionsException {
|
||||
"|"
|
||||
};
|
||||
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(args);
|
||||
SqoopOptions opts = parse(args);
|
||||
assertEquals('*', opts.getInputFieldDelim());
|
||||
assertEquals('|', opts.getOutputFieldDelim());
|
||||
}
|
||||
|
||||
public void testBadNumMappers1() {
|
||||
public void testBadNumMappers1() throws Exception {
|
||||
String [] args = {
|
||||
"--num-mappers",
|
||||
"x"
|
||||
};
|
||||
|
||||
try {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(args);
|
||||
SqoopOptions opts = parse(args);
|
||||
fail("Expected InvalidOptionsException");
|
||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
||||
// expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testBadNumMappers2() {
|
||||
public void testBadNumMappers2() throws Exception {
|
||||
String [] args = {
|
||||
"-m",
|
||||
"x"
|
||||
};
|
||||
|
||||
try {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(args);
|
||||
SqoopOptions opts = parse(args);
|
||||
fail("Expected InvalidOptionsException");
|
||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
||||
// expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testGoodNumMappers() throws SqoopOptions.InvalidOptionsException {
|
||||
public void testGoodNumMappers() throws Exception {
|
||||
String [] args = {
|
||||
"-m",
|
||||
"4"
|
||||
};
|
||||
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(args);
|
||||
SqoopOptions opts = parse(args);
|
||||
assertEquals(4, opts.getNumMappers());
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,7 @@
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
@ -32,6 +33,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.testutil.SeqFileReader;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
|
||||
/**
|
||||
@ -104,8 +106,9 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
|
||||
String [] argv = getArgv(true, columns, whereClause);
|
||||
runImport(argv);
|
||||
try {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
opts.parse(getArgv(false, columns, whereClause));
|
||||
SqoopOptions opts = new ImportTool().parseArguments(
|
||||
getArgv(false, columns, whereClause),
|
||||
null, null, true);
|
||||
|
||||
CompilationManager compileMgr = new CompilationManager(opts);
|
||||
String jarFileName = compileMgr.getJarFilename();
|
||||
@ -146,6 +149,8 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
|
||||
assertEquals("Incorrect number of results for query", numExpectedResults, totalResults);
|
||||
} catch (InvalidOptionsException ioe) {
|
||||
fail(ioe.toString());
|
||||
} catch (ParseException pe) {
|
||||
fail(pe.toString());
|
||||
} finally {
|
||||
IOUtils.closeStream(reader);
|
||||
|
||||
|
@ -32,6 +32,10 @@
|
||||
import org.apache.hadoop.sqoop.testutil.CommonArgs;
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.tool.CodeGenTool;
|
||||
import org.apache.hadoop.sqoop.tool.CreateHiveTableTool;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
import org.apache.hadoop.sqoop.tool.SqoopTool;
|
||||
|
||||
/**
|
||||
* Test HiveImport capability after an import to HDFS.
|
||||
@ -40,6 +44,21 @@ public class TestHiveImport extends ImportJobTestCase {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(TestHiveImport.class.getName());
|
||||
|
||||
/**
|
||||
* Sets the expected number of columns in the table being manipulated
|
||||
* by the test. Under the hood, this sets the expected column names
|
||||
* to DATA_COLi for 0 <= i < numCols.
|
||||
* @param numCols the number of columns to be created.
|
||||
*/
|
||||
private void setNumCols(int numCols) {
|
||||
String [] cols = new String[numCols];
|
||||
for (int i = 0; i < numCols; i++) {
|
||||
cols[i] = "DATA_COL" + i;
|
||||
}
|
||||
|
||||
setColNames(cols);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the argv to pass to Sqoop
|
||||
* @return the argv as an array of strings.
|
||||
@ -51,6 +70,12 @@ public class TestHiveImport extends ImportJobTestCase {
|
||||
CommonArgs.addHadoopFlags(args);
|
||||
}
|
||||
|
||||
if (null != moreArgs) {
|
||||
for (String arg: moreArgs) {
|
||||
args.add(arg);
|
||||
}
|
||||
}
|
||||
|
||||
args.add("--table");
|
||||
args.add(getTableName());
|
||||
args.add("--warehouse-dir");
|
||||
@ -62,12 +87,48 @@ public class TestHiveImport extends ImportJobTestCase {
|
||||
if (null != colNames) {
|
||||
args.add("--split-by");
|
||||
args.add(colNames[0]);
|
||||
} else {
|
||||
fail("Could not determine column names.");
|
||||
}
|
||||
|
||||
args.add("--num-mappers");
|
||||
args.add("1");
|
||||
|
||||
if (null != moreArgs) {
|
||||
for (String arg: moreArgs) {
|
||||
for (String a : args) {
|
||||
LOG.debug("ARG : "+ a);
|
||||
}
|
||||
|
||||
return args.toArray(new String[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the argv to supply to a code-gen only job for Hive imports.
|
||||
*/
|
||||
protected String [] getCodeGenArgs() {
|
||||
ArrayList<String> args = new ArrayList<String>();
|
||||
|
||||
args.add("--table");
|
||||
args.add(getTableName());
|
||||
args.add("--connect");
|
||||
args.add(HsqldbTestServer.getUrl());
|
||||
args.add("--hive-import");
|
||||
|
||||
return args.toArray(new String[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the argv to supply to a ddl-executing-only job for Hive imports.
|
||||
*/
|
||||
protected String [] getCreateHiveTableArgs(String [] extraArgs) {
|
||||
ArrayList<String> args = new ArrayList<String>();
|
||||
|
||||
args.add("--table");
|
||||
args.add(getTableName());
|
||||
args.add("--connect");
|
||||
args.add(HsqldbTestServer.getUrl());
|
||||
|
||||
if (null != extraArgs) {
|
||||
for (String arg : extraArgs) {
|
||||
args.add(arg);
|
||||
}
|
||||
}
|
||||
@ -75,44 +136,46 @@ public class TestHiveImport extends ImportJobTestCase {
|
||||
return args.toArray(new String[0]);
|
||||
}
|
||||
|
||||
private SqoopOptions getSqoopOptions(String [] extraArgs) {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
|
||||
SqoopOptions opts = null;
|
||||
try {
|
||||
opts.parse(getArgv(false, extraArgs));
|
||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
||||
fail("Invalid options: " + ioe.toString());
|
||||
opts = tool.parseArguments(args, null, null, true);
|
||||
} catch (Exception e) {
|
||||
fail("Invalid options: " + e.toString());
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
private void runImportTest(String tableName, String [] types, String [] values,
|
||||
String verificationScript, String [] extraArgs) throws IOException {
|
||||
String verificationScript, String [] args, SqoopTool tool) throws IOException {
|
||||
|
||||
// create a table and populate it with a row...
|
||||
setCurTableName(tableName);
|
||||
createTableWithColTypes(types, values);
|
||||
|
||||
// set up our mock hive shell to compare our generated script
|
||||
// against the correct expected one.
|
||||
SqoopOptions options = getSqoopOptions(extraArgs);
|
||||
SqoopOptions options = getSqoopOptions(args, tool);
|
||||
String hiveHome = options.getHiveHome();
|
||||
assertNotNull("hive.home was not set", hiveHome);
|
||||
Path testDataPath = new Path(new Path(hiveHome), "scripts/" + verificationScript);
|
||||
System.setProperty("expected.script", testDataPath.toString());
|
||||
|
||||
// verify that we can import it correctly into hive.
|
||||
runImport(getArgv(true, extraArgs));
|
||||
runImport(tool, args);
|
||||
}
|
||||
|
||||
/** Test that we can generate a file containing the DDL and not import. */
|
||||
@Test
|
||||
public void testGenerateOnly() throws IOException {
|
||||
final String TABLE_NAME = "GenerateOnly";
|
||||
String [] extraArgs = { "--generate-only" };
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(1);
|
||||
|
||||
// Figure out where our target generated .q file is going to be.
|
||||
SqoopOptions options = getSqoopOptions(extraArgs);
|
||||
String [] emptyArgs = new String[0];
|
||||
SqoopOptions options = getSqoopOptions(getArgv(false, null),
|
||||
new ImportTool());
|
||||
Path ddlFile = new Path(new Path(options.getCodeOutputDir()),
|
||||
TABLE_NAME + ".q");
|
||||
FileSystem fs = FileSystem.getLocal(new Configuration());
|
||||
@ -128,7 +191,8 @@ public void testGenerateOnly() throws IOException {
|
||||
// Run a basic import, but specify that we're just generating definitions.
|
||||
String [] types = { "INTEGER" };
|
||||
String [] vals = { "42" };
|
||||
runImportTest(TABLE_NAME, types, vals, null, extraArgs);
|
||||
runImportTest(TABLE_NAME, types, vals, null, getCodeGenArgs(),
|
||||
new CodeGenTool());
|
||||
|
||||
// Test that the generated definition file exists.
|
||||
assertTrue("Couldn't find expected ddl file", fs.exists(ddlFile));
|
||||
@ -142,43 +206,64 @@ public void testGenerateOnly() throws IOException {
|
||||
/** Test that strings and ints are handled in the normal fashion */
|
||||
@Test
|
||||
public void testNormalHiveImport() throws IOException {
|
||||
final String TABLE_NAME = "NORMAL_HIVE_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(3);
|
||||
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
|
||||
String [] vals = { "'test'", "42", "'somestring'" };
|
||||
runImportTest("NORMAL_HIVE_IMPORT", types, vals, "normalImport.q", null);
|
||||
runImportTest(TABLE_NAME, types, vals, "normalImport.q",
|
||||
getArgv(false, null), new ImportTool());
|
||||
}
|
||||
|
||||
/** Test that table is created in hive with no data import */
|
||||
@Test
|
||||
public void testCreateOnlyHiveImport() throws IOException {
|
||||
final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(3);
|
||||
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
|
||||
String [] vals = { "'test'", "42", "'somestring'" };
|
||||
String [] extraArgs = {"--hive-create-only"};
|
||||
runImportTest("CREATE_ONLY_HIVE_IMPORT", types, vals, "createOnlyImport.q", extraArgs);
|
||||
runImportTest(TABLE_NAME, types, vals,
|
||||
"createOnlyImport.q", getCreateHiveTableArgs(null),
|
||||
new CreateHiveTableTool());
|
||||
}
|
||||
|
||||
/** Test that table is created in hive and replaces the existing table if any */
|
||||
@Test
|
||||
public void testCreateOverwriteHiveImport() throws IOException {
|
||||
final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(3);
|
||||
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
|
||||
String [] vals = { "'test'", "42", "'somestring'" };
|
||||
String [] extraArgs = {"--hive-create-only", "--hive-overwrite"};
|
||||
runImportTest("CREATE_OVERWRITE_HIVE_IMPORT", types, vals, "createOverwriteImport.q", extraArgs);
|
||||
String [] extraArgs = {"--hive-overwrite"};
|
||||
runImportTest(TABLE_NAME, types, vals,
|
||||
"createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
|
||||
new CreateHiveTableTool());
|
||||
}
|
||||
|
||||
/** Test that dates are coerced properly to strings */
|
||||
@Test
|
||||
public void testDate() throws IOException {
|
||||
final String TABLE_NAME = "DATE_HIVE_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(2);
|
||||
String [] types = { "VARCHAR(32)", "DATE" };
|
||||
String [] vals = { "'test'", "'2009-05-12'" };
|
||||
runImportTest("DATE_HIVE_IMPORT", types, vals, "dateImport.q", null);
|
||||
runImportTest(TABLE_NAME, types, vals, "dateImport.q",
|
||||
getArgv(false, null), new ImportTool());
|
||||
}
|
||||
|
||||
/** Test that NUMERICs are coerced to doubles */
|
||||
@Test
|
||||
public void testNumeric() throws IOException {
|
||||
final String TABLE_NAME = "NUMERIC_HIVE_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(2);
|
||||
String [] types = { "NUMERIC", "CHAR(64)" };
|
||||
String [] vals = { "3.14159", "'foo'" };
|
||||
runImportTest("NUMERIC_HIVE_IMPORT", types, vals, "numericImport.q", null);
|
||||
runImportTest(TABLE_NAME, types, vals, "numericImport.q",
|
||||
getArgv(false, null), new ImportTool());
|
||||
}
|
||||
|
||||
/** If bin/hive returns an error exit status, we should get an IOException */
|
||||
@ -186,10 +271,14 @@ public void testNumeric() throws IOException {
|
||||
public void testHiveExitFails() {
|
||||
// The expected script is different than the one which would be generated
|
||||
// by this, so we expect an IOException out.
|
||||
final String TABLE_NAME = "FAILING_HIVE_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(2);
|
||||
String [] types = { "NUMERIC", "CHAR(64)" };
|
||||
String [] vals = { "3.14159", "'foo'" };
|
||||
try {
|
||||
runImportTest("FAILING_HIVE_IMPORT", types, vals, "failingImport.q", null);
|
||||
runImportTest(TABLE_NAME, types, vals, "failingImport.q",
|
||||
getArgv(false, null), new ImportTool());
|
||||
// If we get here, then the run succeeded -- which is incorrect.
|
||||
fail("FAILING_HIVE_IMPORT test should have thrown IOException");
|
||||
} catch (IOException ioe) {
|
||||
@ -200,11 +289,15 @@ public void testHiveExitFails() {
|
||||
/** Test that we can set delimiters how we want them */
|
||||
@Test
|
||||
public void testCustomDelimiters() throws IOException {
|
||||
final String TABLE_NAME = "CUSTOM_DELIM_IMPORT";
|
||||
setCurTableName(TABLE_NAME);
|
||||
setNumCols(3);
|
||||
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
|
||||
String [] vals = { "'test'", "42", "'somestring'" };
|
||||
String [] extraArgs = { "--fields-terminated-by", ",", "--lines-terminated-by", "|" };
|
||||
runImportTest("CUSTOM_DELIM_IMPORT", types, vals, "customDelimImport.q", extraArgs);
|
||||
String [] extraArgs = { "--fields-terminated-by", ",",
|
||||
"--lines-terminated-by", "|" };
|
||||
runImportTest(TABLE_NAME, types, vals, "customDelimImport.q",
|
||||
getArgv(false, extraArgs), new ImportTool());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -135,6 +135,22 @@ public void tearDown() {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String [] getCodeGenArgv(String... extraArgs) {
|
||||
|
||||
String [] moreArgs = new String[extraArgs.length + 2];
|
||||
int i = 0;
|
||||
for (i = 0; i < extraArgs.length; i++) {
|
||||
moreArgs[i] = extraArgs[i];
|
||||
}
|
||||
|
||||
// Add username argument for mysql.
|
||||
moreArgs[i++] = "--username";
|
||||
moreArgs[i++] = MySQLTestUtils.getCurrentUser();
|
||||
|
||||
return super.getCodeGenArgv(moreArgs);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String [] getArgv(boolean includeHadoopFlags,
|
||||
String... additionalArgv) {
|
||||
|
@ -240,7 +240,7 @@ public void testWithExtraParams() throws IOException {
|
||||
"3,Fred,2009-01-23,15,marketing"
|
||||
};
|
||||
|
||||
String [] extraArgs = { "-", "--lock-tables" };
|
||||
String [] extraArgs = { "--", "--lock-tables" };
|
||||
|
||||
doImport(false, true, getTableName(), expectedResults, extraArgs);
|
||||
}
|
||||
|
@ -52,7 +52,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.testutil.InjectableManagerFactory;
|
||||
import org.apache.hadoop.sqoop.testutil.InjectableConnManager;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
|
||||
/**
|
||||
* Test aspects of the DataDrivenImportJob class
|
||||
@ -77,9 +77,9 @@ public void testFailedImportDueToIOException() throws IOException {
|
||||
|
||||
String [] argv = getArgv(true, new String [] { "DATA_COL0" }, conf);
|
||||
|
||||
Sqoop importer = new Sqoop();
|
||||
Sqoop importer = new Sqoop(new ImportTool());
|
||||
try {
|
||||
ToolRunner.run(importer, argv);
|
||||
Sqoop.runSqoop(importer, argv);
|
||||
fail("Expected IOException running this job.");
|
||||
} catch (Exception e) {
|
||||
// In debug mode, IOException is wrapped in RuntimeException.
|
||||
@ -152,9 +152,9 @@ public void testFailedImportDueToJobFail() throws IOException {
|
||||
DummyImportJob.class,
|
||||
ImportJobBase.class);
|
||||
|
||||
Sqoop importer = new Sqoop(conf);
|
||||
Sqoop importer = new Sqoop(new ImportTool(), conf);
|
||||
try {
|
||||
ToolRunner.run(importer, argv);
|
||||
Sqoop.runSqoop(importer, argv);
|
||||
fail("Expected ImportException running this job.");
|
||||
} catch (Exception e) {
|
||||
// In debug mode, ImportException is wrapped in RuntimeException.
|
||||
|
@ -42,6 +42,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.DirUtil;
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
|
||||
/**
|
||||
* Test that the ClassWriter generates Java classes based on the given table,
|
||||
@ -123,9 +124,10 @@ private void runGenerationTest(String [] argv, String classNameToCheck) {
|
||||
File classGenDirFile = new File(JAR_GEN_DIR);
|
||||
|
||||
try {
|
||||
options.parse(argv);
|
||||
} catch (InvalidOptionsException ioe) {
|
||||
LOG.error("Could not parse options: " + ioe.toString());
|
||||
options = new ImportTool().parseArguments(argv,
|
||||
null, options, true);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Could not parse options: " + e.toString());
|
||||
}
|
||||
|
||||
CompilationManager compileMgr = new CompilationManager(options);
|
||||
|
@ -21,6 +21,7 @@
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
@ -41,6 +42,7 @@
|
||||
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
|
||||
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
|
||||
import org.apache.hadoop.sqoop.testutil.ReparseMapper;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
|
||||
/**
|
||||
@ -98,12 +100,12 @@ public void runParseTest(String fieldTerminator, String lineTerminator, String e
|
||||
encloseRequired);
|
||||
runImport(argv);
|
||||
try {
|
||||
SqoopOptions opts = new SqoopOptions();
|
||||
|
||||
String tableClassName = getTableName();
|
||||
|
||||
opts.parse(getArgv(false, fieldTerminator, lineTerminator, encloser, escape,
|
||||
encloseRequired));
|
||||
argv = getArgv(false, fieldTerminator, lineTerminator, encloser, escape,
|
||||
encloseRequired);
|
||||
SqoopOptions opts = new ImportTool().parseArguments(argv, null, null,
|
||||
true);
|
||||
|
||||
CompilationManager compileMgr = new CompilationManager(opts);
|
||||
String jarFileName = compileMgr.getJarFilename();
|
||||
@ -137,6 +139,8 @@ public void runParseTest(String fieldTerminator, String lineTerminator, String e
|
||||
JobClient.runJob(job);
|
||||
} catch (InvalidOptionsException ioe) {
|
||||
fail(ioe.toString());
|
||||
} catch (ParseException pe) {
|
||||
fail(pe.toString());
|
||||
} finally {
|
||||
if (null != prevClassLoader) {
|
||||
ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
|
||||
|
@ -24,6 +24,7 @@
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@ -105,6 +106,14 @@ protected String getWarehouseDir() {
|
||||
return colNames;
|
||||
}
|
||||
|
||||
protected void setColNames(String [] cols) {
|
||||
if (null == cols) {
|
||||
this.colNames = null;
|
||||
} else {
|
||||
this.colNames = Arrays.copyOf(cols, cols.length);
|
||||
}
|
||||
}
|
||||
|
||||
protected HsqldbTestServer getTestServer() {
|
||||
return testServer;
|
||||
}
|
||||
|
@ -28,9 +28,10 @@
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.tool.ExportTool;
|
||||
|
||||
/**
|
||||
* Class that implements common methods required for tests which export data
|
||||
@ -82,20 +83,7 @@ protected Connection getConnection() {
|
||||
}
|
||||
}
|
||||
|
||||
args.add("--table");
|
||||
args.add(getTableName());
|
||||
args.add("--export-dir");
|
||||
args.add(getTablePath().toString());
|
||||
args.add("--connect");
|
||||
args.add(getConnectString());
|
||||
args.add("--fields-terminated-by");
|
||||
args.add("\\t");
|
||||
args.add("--lines-terminated-by");
|
||||
args.add("\\n");
|
||||
args.add("-m");
|
||||
args.add("1");
|
||||
|
||||
// The rest of the additional args are appended.
|
||||
// The sqoop-specific additional args are then added.
|
||||
if (null != additionalArgv) {
|
||||
boolean prevIsFlag = false;
|
||||
for (String arg : additionalArgv) {
|
||||
@ -112,6 +100,24 @@ protected Connection getConnection() {
|
||||
}
|
||||
}
|
||||
|
||||
args.add("--table");
|
||||
args.add(getTableName());
|
||||
args.add("--export-dir");
|
||||
args.add(getTablePath().toString());
|
||||
args.add("--connect");
|
||||
args.add(getConnectString());
|
||||
args.add("--fields-terminated-by");
|
||||
args.add("\\t");
|
||||
args.add("--lines-terminated-by");
|
||||
args.add("\\n");
|
||||
args.add("-m");
|
||||
args.add("1");
|
||||
|
||||
LOG.debug("args:");
|
||||
for (String a : args) {
|
||||
LOG.debug(" " + a);
|
||||
}
|
||||
|
||||
return args.toArray(new String[0]);
|
||||
}
|
||||
|
||||
@ -250,12 +256,13 @@ protected List<String> runExport(String [] argv) throws IOException {
|
||||
int ret;
|
||||
List<String> generatedJars = null;
|
||||
try {
|
||||
Sqoop exporter = new Sqoop();
|
||||
ret = ToolRunner.run(exporter, argv);
|
||||
ExportTool exporter = new ExportTool();
|
||||
Sqoop sqoop = new Sqoop(exporter);
|
||||
ret = Sqoop.runSqoop(sqoop, argv);
|
||||
generatedJars = exporter.getGeneratedJarFiles();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Got exception running Sqoop: " + e.toString());
|
||||
e.printStackTrace();
|
||||
LOG.error("Got exception running Sqoop: "
|
||||
+ StringUtils.stringifyException(e));
|
||||
ret = 1;
|
||||
}
|
||||
|
||||
|
@ -27,11 +27,12 @@
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.tool.SqoopTool;
|
||||
import org.apache.hadoop.sqoop.tool.ImportTool;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
|
||||
import org.junit.Test;
|
||||
@ -119,8 +120,8 @@ protected void verifyImport(String expectedVal, String [] importCols) {
|
||||
// run the tool through the normal entry-point.
|
||||
int ret;
|
||||
try {
|
||||
Sqoop importer = new Sqoop(conf, opts);
|
||||
ret = ToolRunner.run(importer, getArgv(true, importCols, conf));
|
||||
Sqoop importer = new Sqoop(new ImportTool(), conf, opts);
|
||||
ret = Sqoop.runSqoop(importer, getArgv(true, importCols, conf));
|
||||
} catch (Exception e) {
|
||||
LOG.error("Got exception running Sqoop: " + e.toString());
|
||||
throw new RuntimeException(e);
|
||||
@ -131,10 +132,13 @@ protected void verifyImport(String expectedVal, String [] importCols) {
|
||||
|
||||
opts = getSqoopOptions(conf);
|
||||
try {
|
||||
opts.parse(getArgv(false, importCols, conf));
|
||||
} catch (InvalidOptionsException ioe) {
|
||||
fail(ioe.toString());
|
||||
ImportTool importTool = new ImportTool();
|
||||
opts = importTool.parseArguments(getArgv(false, importCols, conf), conf,
|
||||
opts, true);
|
||||
} catch (Exception e) {
|
||||
fail(e.toString());
|
||||
}
|
||||
|
||||
CompilationManager compileMgr = new CompilationManager(opts);
|
||||
String jarFileName = compileMgr.getJarFilename();
|
||||
ClassLoader prevClassLoader = null;
|
||||
@ -166,7 +170,7 @@ protected void verifyImport(String expectedVal, String [] importCols) {
|
||||
/**
|
||||
* Run a MapReduce-based import (using the argv provided to control execution).
|
||||
*/
|
||||
protected void runImport(String [] argv) throws IOException {
|
||||
protected void runImport(SqoopTool tool, String [] argv) throws IOException {
|
||||
removeTableDir();
|
||||
|
||||
// run the tool through the normal entry-point.
|
||||
@ -174,8 +178,8 @@ protected void runImport(String [] argv) throws IOException {
|
||||
try {
|
||||
Configuration conf = getConf();
|
||||
SqoopOptions opts = getSqoopOptions(conf);
|
||||
Sqoop importer = new Sqoop(conf, opts);
|
||||
ret = ToolRunner.run(importer, argv);
|
||||
Sqoop sqoop = new Sqoop(tool, conf, opts);
|
||||
ret = Sqoop.runSqoop(sqoop, argv);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Got exception running Sqoop: " + e.toString());
|
||||
e.printStackTrace();
|
||||
@ -188,4 +192,9 @@ protected void runImport(String [] argv) throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
/** run an import using the default ImportTool */
|
||||
protected void runImport(String [] argv) throws IOException {
|
||||
runImport(new ImportTool(), argv);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -30,7 +30,6 @@
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
|
Loading…
Reference in New Issue
Block a user