mirror of
https://github.com/apache/sqoop.git
synced 2025-05-02 18:11:13 +08:00
SQOOP-796: Unable to use new Hadoop environment variables
(Sean Mackrory via Jarek Jarcec Cecho)
This commit is contained in:
parent
fd0143b5ae
commit
79f941b7e1
@ -40,17 +40,33 @@ fi
|
||||
|
||||
# Find paths to our dependency systems. If they are unset, use CDH defaults.
|
||||
|
||||
if [ -z "${HADOOP_HOME}" ]; then
|
||||
HADOOP_HOME=/usr/lib/hadoop
|
||||
if [ -z "${HADOOP_COMMON_HOME}" ]; then
|
||||
if [ -n "${HADOOP_HOME}" ]; then
|
||||
HADOOP_COMMON_HOME=${HADOOP_HOME}
|
||||
else
|
||||
HADOOP_COMMON_HOME=/usr/lib/hadoop
|
||||
fi
|
||||
fi
|
||||
if [ -z "${HADOOP_MAPRED_HOME}" ]; then
|
||||
if [ -n "${HADOOP_HOME}" ]; then
|
||||
HADOOP_MAPRED_HOME=${HADOOP_HOME}
|
||||
else
|
||||
HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
|
||||
fi
|
||||
fi
|
||||
if [ -z "${HBASE_HOME}" ]; then
|
||||
HBASE_HOME=/usr/lib/hbase
|
||||
fi
|
||||
|
||||
# Check: If we can't find our dependencies, give up here.
|
||||
if [ ! -d "${HADOOP_HOME}" ]; then
|
||||
echo "Error: $HADOOP_HOME does not exist!"
|
||||
echo 'Please set $HADOOP_HOME to the root of your Hadoop installation.'
|
||||
if [ ! -d "${HADOOP_COMMON_HOME}" ]; then
|
||||
echo "Error: $HADOOP_COMMON_HOME does not exist!"
|
||||
echo 'Please set $HADOOP_COMMON_HOME to the root of your Hadoop installation.'
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -d "${HADOOP_MAPRED_HOME}" ]; then
|
||||
echo "Error: $HADOOP_MAPRED_HOME does not exist!"
|
||||
echo 'Please set $HADOOP_MAPRED_HOME to the root of your Hadoop MapReduce installation.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@ -117,6 +133,7 @@ export SQOOP_CLASSPATH
|
||||
export SQOOP_CONF_DIR
|
||||
export SQOOP_JAR_DIR
|
||||
export HADOOP_CLASSPATH
|
||||
export HADOOP_HOME
|
||||
export HADOOP_COMMON_HOME
|
||||
export HADOOP_MAPRED_HOME
|
||||
export HBASE_HOME
|
||||
|
||||
|
@ -98,4 +98,4 @@ bin=`dirname ${prgm}`
|
||||
bin=`cd ${bin} && pwd`
|
||||
|
||||
source ${bin}/configure-sqoop "${bin}"
|
||||
exec ${HADOOP_HOME}/bin/hadoop org.apache.sqoop.Sqoop "$@"
|
||||
exec ${HADOOP_COMMON_HOME}/bin/hadoop org.apache.sqoop.Sqoop "$@"
|
||||
|
@ -20,7 +20,10 @@
|
||||
# Set Hadoop-specific environment variables here.
|
||||
|
||||
#Set path to where bin/hadoop is available
|
||||
#export HADOOP_HOME=
|
||||
#export HADOOP_COMMON_HOME=
|
||||
|
||||
#Set path to where hadoop-*-core.jar is available
|
||||
#export HADOOP_MAPRED_HOME=
|
||||
|
||||
#set the path to where bin/hbase is available
|
||||
#export HBASE_HOME=
|
||||
|
@ -33,8 +33,8 @@ Database connection and common options
|
||||
--connection-param-file (filename)::
|
||||
Optional properties file that provides connection parameters
|
||||
|
||||
--hadoop-home (dir)::
|
||||
Override $HADOOP_HOME
|
||||
--hadoop-mapred-home (dir)::
|
||||
Override $HADOOP_MAPRED_HOME
|
||||
|
||||
--help::
|
||||
Print usage instructions
|
||||
@ -51,3 +51,6 @@ Database connection and common options
|
||||
|
||||
--verbose::
|
||||
Print more information while working
|
||||
|
||||
--hadoop-home (dir)::
|
||||
Deprecated. Override $HADOOP_HOME
|
||||
|
@ -43,9 +43,13 @@ JAVA_HOME::
|
||||
e.g., +/usr/java/default+. Hadoop (and Sqoop) requires Sun Java 1.6 which
|
||||
can be downloaded from http://java.sun.com.
|
||||
|
||||
HADOOP_HOME::
|
||||
The location of the Hadoop jar files. If you installed Hadoop via RPM
|
||||
or DEB, these are in +/usr/lib/hadoop-20+.
|
||||
HADOOP_COMMON_HOME::
|
||||
The location of the Hadoop Common files (specifically the bin/hadoop executable).
|
||||
If you installed Hadoop via RPM or DEB, this is in +/usr/lib/hadoop+.
|
||||
|
||||
HADOOP_MAPRED_HOME::
|
||||
The location of the Hadoop MapReduce files (specifically the JAR files).
|
||||
If you installed Hadoop via RPM or DEB, this is in +/usr/lib/hadoop-mapreduce+.
|
||||
|
||||
HIVE_HOME::
|
||||
If you are performing a Hive import, you must identify the location of
|
||||
|
@ -27,7 +27,7 @@ Argument Description
|
||||
use
|
||||
+\--driver <class-name>+ Manually specify JDBC driver class\
|
||||
to use
|
||||
+\--hadoop-home <dir>+ Override $HADOOP_HOME
|
||||
+\--hadoop-mapred-home <dir>+ Override $HADOOP_MAPRED_HOME
|
||||
+\--help+ Print usage instructions
|
||||
+-P+ Read password from console
|
||||
+\--password <password>+ Set authentication password
|
||||
|
@ -68,12 +68,13 @@ Common arguments:
|
||||
--connect <jdbc-uri> Specify JDBC connect string
|
||||
--connection-manager <class-name> Specify connection manager class to use
|
||||
--driver <class-name> Manually specify JDBC driver class to use
|
||||
--hadoop-home <dir> Override $HADOOP_HOME
|
||||
--hadoop-mapred-home <dir> Override $HADOOP_MAPRED_HOME
|
||||
--help Print usage instructions
|
||||
-P Read password from console
|
||||
--password <password> Set authentication password
|
||||
--username <username> Set authentication username
|
||||
--verbose Print more information while working
|
||||
--hadoop-home <dir> Deprecated. Override $HADOOP_HOME
|
||||
|
||||
Import control arguments:
|
||||
--as-avrodatafile Imports data to Avro Data Files
|
||||
|
@ -25,10 +25,9 @@ tool you want to use and the arguments that control the tool.
|
||||
|
||||
If Sqoop is compiled from its own source, you can run Sqoop without a formal
|
||||
installation process by running the +bin/sqoop+ program. Users
|
||||
of a packaged deployment of Sqoop (such as an RPM shipped with Cloudera's
|
||||
Distribution for Hadoop) will see this program installed as +/usr/bin/sqoop+.
|
||||
The remainder of this documentation will refer to this program as
|
||||
+sqoop+. For example:
|
||||
of a packaged deployment of Sqoop (such as an RPM shipped with Apache Bigtop)
|
||||
will see this program installed as +/usr/bin/sqoop+. The remainder of this
|
||||
documentation will refer to this program as +sqoop+. For example:
|
||||
|
||||
----
|
||||
$ sqoop tool-name [tool-arguments]
|
||||
@ -87,24 +86,29 @@ You invoke Sqoop through the program launch capability provided by
|
||||
Hadoop. The +sqoop+ command-line program is a wrapper which runs the
|
||||
+bin/hadoop+ script shipped with Hadoop. If you have multiple
|
||||
installations of Hadoop present on your machine, you can select the
|
||||
Hadoop installation by setting the +$HADOOP_HOME+ environment
|
||||
variable.
|
||||
Hadoop installation by setting the +$HADOOP_COMMON_HOME+ and
|
||||
+$HADOOP_MAPRED_HOME+ environment variables.
|
||||
|
||||
For example:
|
||||
|
||||
----
|
||||
$ HADOOP_HOME=/path/to/some/hadoop sqoop import --arguments...
|
||||
$ HADOOP_COMMON_HOME=/path/to/some/hadoop \
|
||||
HADOOP_MAPRED_HOME=/path/to/some/hadoop-mapreduce \
|
||||
sqoop import --arguments...
|
||||
----
|
||||
|
||||
or:
|
||||
|
||||
----
|
||||
$ export HADOOP_HOME=/some/path/to/hadoop
|
||||
$ export HADOOP_COMMON_HOME=/some/path/to/hadoop
|
||||
$ export HADOOP_MAPRED_HOME=/some/path/to/hadoop-mapreduce
|
||||
$ sqoop import --arguments...
|
||||
-----
|
||||
|
||||
If +$HADOOP_HOME+ is not set, Sqoop will use the default installation
|
||||
location for Cloudera's Distribution for Hadoop, +/usr/lib/hadoop+.
|
||||
If either of these variables are not set, Sqoop will fall back to
|
||||
+$HADOOP_HOME+. If it is not set either, Sqoop will use the default
|
||||
installation locations for Apache Bigtop, +/usr/lib/hadoop+ and
|
||||
+/usr/lib/hadoop-mapreduce+, respectively.
|
||||
|
||||
The active Hadoop configuration is loaded from +$HADOOP_HOME/conf/+,
|
||||
unless the +$HADOOP_CONF_DIR+ environment variable is set.
|
||||
@ -126,12 +130,13 @@ Common arguments:
|
||||
--connect <jdbc-uri> Specify JDBC connect string
|
||||
--connect-manager <jdbc-uri> Specify connection manager class to use
|
||||
--driver <class-name> Manually specify JDBC driver class to use
|
||||
--hadoop-home <dir> Override $HADOOP_HOME
|
||||
--hadoop-mapred-home <dir>+ Override $HADOOP_MAPRED_HOME
|
||||
--help Print usage instructions
|
||||
-P Read password from console
|
||||
--password <password> Set authentication password
|
||||
--username <username> Set authentication username
|
||||
--verbose Print more information while working
|
||||
--hadoop-home <dir>+ Deprecated. Override $HADOOP_HOME
|
||||
|
||||
[...]
|
||||
|
||||
|
@ -119,7 +119,7 @@ public String toString() {
|
||||
// explicitly set by the user (false). If the former, disregard any value
|
||||
// for jarOutputDir saved in the metastore.
|
||||
@StoredAsProperty("codegen.auto.compile.dir") private boolean jarDirIsAuto;
|
||||
private String hadoopHome; // not serialized to metastore.
|
||||
private String hadoopMapRedHome; // not serialized to metastore.
|
||||
@StoredAsProperty("db.split.column") private String splitByCol;
|
||||
@StoredAsProperty("db.where.clause") private String whereClause;
|
||||
@StoredAsProperty("db.query") private String sqlQuery;
|
||||
@ -775,7 +775,7 @@ public static String getHiveHomeDefault() {
|
||||
private void initDefaults(Configuration baseConfiguration) {
|
||||
// first, set the true defaults if nothing else happens.
|
||||
// default action is to run the full pipeline.
|
||||
this.hadoopHome = System.getenv("HADOOP_HOME");
|
||||
this.hadoopMapRedHome = System.getenv("HADOOP_MAPRED_HOME");
|
||||
|
||||
this.hiveHome = getHiveHomeDefault();
|
||||
|
||||
@ -1253,15 +1253,15 @@ public void setJarOutputDir(String outDir) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the value of $HADOOP_HOME.
|
||||
* @return $HADOOP_HOME, or null if it's not set.
|
||||
* Return the value of $HADOOP_MAPRED_HOME.
|
||||
* @return $HADOOP_MAPRED_HOME, or null if it's not set.
|
||||
*/
|
||||
public String getHadoopHome() {
|
||||
return hadoopHome;
|
||||
public String getHadoopMapRedHome() {
|
||||
return hadoopMapRedHome;
|
||||
}
|
||||
|
||||
public void setHadoopHome(String home) {
|
||||
this.hadoopHome = home;
|
||||
public void setHadoopMapRedHome(String home) {
|
||||
this.hadoopMapRedHome = home;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -71,28 +71,29 @@ public void addSourceFile(String sourceName) {
|
||||
}
|
||||
|
||||
/**
|
||||
* locate the hadoop-*-core.jar in $HADOOP_HOME or --hadoop-home.
|
||||
* locate the hadoop-*-core.jar in $HADOOP_MAPRED_HOME or
|
||||
* --hadoop-mapred-home.
|
||||
* If that doesn't work, check our classpath.
|
||||
* @return the filename of the hadoop-*-core.jar file.
|
||||
*/
|
||||
private String findHadoopCoreJar() {
|
||||
String hadoopHome = options.getHadoopHome();
|
||||
String hadoopMapRedHome = options.getHadoopMapRedHome();
|
||||
|
||||
if (null == hadoopHome) {
|
||||
LOG.info("$HADOOP_HOME is not set");
|
||||
if (null == hadoopMapRedHome) {
|
||||
LOG.info("$HADOOP_MAPRED_HOME is not set");
|
||||
return Jars.getJarPathForClass(JobConf.class);
|
||||
}
|
||||
|
||||
if (!hadoopHome.endsWith(File.separator)) {
|
||||
hadoopHome = hadoopHome + File.separator;
|
||||
if (!hadoopMapRedHome.endsWith(File.separator)) {
|
||||
hadoopMapRedHome = hadoopMapRedHome + File.separator;
|
||||
}
|
||||
|
||||
File hadoopHomeFile = new File(hadoopHome);
|
||||
LOG.info("HADOOP_HOME is " + hadoopHomeFile.getAbsolutePath());
|
||||
File [] entries = hadoopHomeFile.listFiles();
|
||||
File hadoopMapRedHomeFile = new File(hadoopMapRedHome);
|
||||
LOG.info("HADOOP_MAPRED_HOME is " + hadoopMapRedHomeFile.getAbsolutePath());
|
||||
File [] entries = hadoopMapRedHomeFile.listFiles();
|
||||
|
||||
if (null == entries) {
|
||||
LOG.warn("HADOOP_HOME appears empty or missing");
|
||||
LOG.warn("HADOOP_MAPRED_HOME appears empty or missing");
|
||||
return Jars.getJarPathForClass(JobConf.class);
|
||||
}
|
||||
|
||||
|
@ -80,6 +80,7 @@ public abstract class BaseSqoopTool extends com.cloudera.sqoop.tool.SqoopTool {
|
||||
public static final String SPLIT_BY_ARG = "split-by";
|
||||
public static final String WHERE_ARG = "where";
|
||||
public static final String HADOOP_HOME_ARG = "hadoop-home";
|
||||
public static final String HADOOP_MAPRED_HOME_ARG = "hadoop-mapred-home";
|
||||
public static final String HIVE_HOME_ARG = "hive-home";
|
||||
public static final String WAREHOUSE_DIR_ARG = "warehouse-dir";
|
||||
public static final String TARGET_DIR_ARG = "target-dir";
|
||||
@ -385,8 +386,8 @@ protected RelatedOptions getCommonOptions() {
|
||||
.create(PASSWORD_PROMPT_ARG));
|
||||
|
||||
commonOpts.addOption(OptionBuilder.withArgName("dir")
|
||||
.hasArg().withDescription("Override $HADOOP_HOME")
|
||||
.withLongOpt(HADOOP_HOME_ARG)
|
||||
.hasArg().withDescription("Override $HADOOP_MAPRED_HOME_ARG")
|
||||
.withLongOpt(HADOOP_MAPRED_HOME_ARG)
|
||||
.create());
|
||||
|
||||
// misc (common)
|
||||
@ -750,7 +751,10 @@ protected void applyCommonOptions(CommandLine in, SqoopOptions out)
|
||||
}
|
||||
|
||||
if (in.hasOption(HADOOP_HOME_ARG)) {
|
||||
out.setHadoopHome(in.getOptionValue(HADOOP_HOME_ARG));
|
||||
out.setHadoopMapRedHome(in.getOptionValue(HADOOP_HOME_ARG));
|
||||
}
|
||||
if (in.hasOption(HADOOP_MAPRED_HOME_ARG)) {
|
||||
out.setHadoopMapRedHome(in.getOptionValue(HADOOP_MAPRED_HOME_ARG));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -37,5 +37,5 @@ source ${SQOOP_HOME}/bin/configure-sqoop "${bin}"
|
||||
PERFTEST_CLASSES=${SQOOP_HOME}/build/perftest/classes
|
||||
|
||||
export HADOOP_CLASSPATH=${PERFTEST_CLASSES}:${SQOOP_JAR}:${HADOOP_CLASSPATH}
|
||||
${HADOOP_HOME}/bin/hadoop "$@"
|
||||
${HADOOP_COMMON_HOME}/bin/hadoop "$@"
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user