5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 08:19:57 +08:00

Add checkstyle to build process.

Fix the overwhelming majority of existing violations.

From: Aaron Kimball <aaron@cloudera.com>

git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149899 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Bayer 2011-07-22 20:03:47 +00:00
parent e926bf1fe0
commit 0b96b5f1c3
136 changed files with 1981 additions and 1275 deletions

View File

@ -88,6 +88,12 @@
<property name="cobertura.format" value="html" /> <!-- may be 'xml' --> <property name="cobertura.format" value="html" /> <!-- may be 'xml' -->
<property name="cobertura.class.dir" value="${cobertura.dir}/classes" /> <property name="cobertura.class.dir" value="${cobertura.dir}/classes" />
<!-- Checking code style -->
<property name="checkstyle.xml" value="${test.dir}/checkstyle.xml" />
<property name="checkstyle.format.xsl"
value="${test.dir}/checkstyle-noframes.xsl" />
<property name="checkstyle.report.dir" value="${build.dir}" />
<!-- When testing with non-free JDBC drivers, override this parameter <!-- When testing with non-free JDBC drivers, override this parameter
to contain the path to the driver library dir. to contain the path to the driver library dir.
@ -599,6 +605,24 @@
file="${cobertura.home}/cobertura.jar" /> file="${cobertura.home}/cobertura.jar" />
</target> </target>
<target name="checkstyle" depends="ivy-retrieve-checkstyle"
description="Check source code conventions">
<taskdef resource="checkstyletask.properties">
<classpath refid="${name}.checkstyle.classpath" />
</taskdef>
<mkdir dir="${checkstyle.report.dir}" />
<checkstyle config="${checkstyle.xml}" failOnViolation="false">
<fileset dir="${base.src.dir}" includes="**/*.java" />
<formatter type="xml"
toFile="${checkstyle.report.dir}/checkstyle-errors.xml" />
</checkstyle>
<xslt style="${checkstyle.format.xsl}"
in="${checkstyle.report.dir}/checkstyle-errors.xml"
out="${checkstyle.report.dir}/checkstyle-errors.html" />
</target>
<target name="ivy-probe-antlib" > <target name="ivy-probe-antlib" >
<condition property="ivy.found"> <condition property="ivy.found">
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/> <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
@ -690,4 +714,14 @@
conf="${hadoop.dist}test" /> conf="${hadoop.dist}test" />
</target> </target>
<!-- retrieve ivy-managed artifacts for checkstyle -->
<target name="ivy-resolve-checkstyle" depends="ivy-init">
<ivy:resolve settingsRef="${name}.ivy.settings" conf="checkstyle" />
</target>
<target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle">
<ivy:retrieve settingsRef="${name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" sync="true" />
<ivy:cachepath pathid="${name}.checkstyle.classpath" conf="checkstyle" />
</target>
</project> </project>

View File

@ -52,6 +52,9 @@
Things we actually need to ship are marked with a 'redist' Things we actually need to ship are marked with a 'redist'
configuration. --> configuration. -->
<conf name="redist" visibility="private" extends="master" /> <conf name="redist" visibility="private" extends="master" />
<!-- configuration for running checkstyle -->
<conf name="checkstyle" visibility="private" />
</configurations> </configurations>
<publications> <publications>
@ -92,5 +95,9 @@
conf="common->default"/> conf="common->default"/>
<dependency org="commons-io" name="commons-io" rev="${commons-io.version}" <dependency org="commons-io" name="commons-io" rev="${commons-io.version}"
conf="common->default;redist->default"/> conf="common->default;redist->default"/>
<!-- dependencies for static analysis -->
<dependency org="checkstyle" name="checkstyle" rev="${checkstyle.version}"
conf="checkstyle->default" />
</dependencies> </dependencies>
</ivy-module> </ivy-module>

View File

@ -16,6 +16,8 @@
# This properties file lists the versions of the various artifacts we use. # This properties file lists the versions of the various artifacts we use.
# It drives ivy and the generation of a maven POM # It drives ivy and the generation of a maven POM
checkstyle.version=5.0
commons-cli.version=1.2 commons-cli.version=1.2
commons-io.version=1.4 commons-io.version=1.4
commons-logging.version=1.0.4 commons-logging.version=1.0.4

View File

@ -37,7 +37,8 @@
* *
* This class delegates the actual responsibility for instantiating * This class delegates the actual responsibility for instantiating
* ConnManagers to one or more instances of ManagerFactory. ManagerFactories * ConnManagers to one or more instances of ManagerFactory. ManagerFactories
* are consulted in the order specified in sqoop-site.xml (sqoop.connection.factories). * are consulted in the order specified in sqoop-site.xml
* (sqoop.connection.factories).
*/ */
public class ConnFactory { public class ConnFactory {
@ -51,10 +52,13 @@ public ConnFactory(Configuration conf) {
/** The sqoop-site.xml configuration property used to set the list of /** The sqoop-site.xml configuration property used to set the list of
* available ManagerFactories. * available ManagerFactories.
*/ */
public final static String FACTORY_CLASS_NAMES_KEY = "sqoop.connection.factories"; public static final String FACTORY_CLASS_NAMES_KEY =
"sqoop.connection.factories";
// The default value for sqoop.connection.factories is the name of the DefaultManagerFactory. // The default value for sqoop.connection.factories is the
final static String DEFAULT_FACTORY_CLASS_NAMES = DefaultManagerFactory.class.getName(); // name of the DefaultManagerFactory.
static final String DEFAULT_FACTORY_CLASS_NAMES =
DefaultManagerFactory.class.getName();
/** The list of ManagerFactory instances consulted by getManager(). /** The list of ManagerFactory instances consulted by getManager().
*/ */
@ -76,7 +80,8 @@ private void instantiateFactories(Configuration conf) {
LOG.debug("Loaded manager factory: " + className); LOG.debug("Loaded manager factory: " + className);
factories.add(factory); factories.add(factory);
} catch (ClassNotFoundException cnfe) { } catch (ClassNotFoundException cnfe) {
LOG.error("Could not load ManagerFactory " + className + " (not found)"); LOG.error("Could not load ManagerFactory " + className
+ " (not found)");
} }
} }
} }
@ -98,7 +103,8 @@ public ConnManager getManager(SqoopOptions opts) throws IOException {
} }
} }
throw new IOException("No manager for connect string: " + opts.getConnectString()); throw new IOException("No manager for connect string: "
+ opts.getConnectString());
} }
} }

View File

@ -18,11 +18,7 @@
package org.apache.hadoop.sqoop; package org.apache.hadoop.sqoop;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@ -32,18 +28,8 @@
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.sqoop.cli.ToolOptions;
import org.apache.hadoop.sqoop.hive.HiveImport;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.ExportJobContext;
import org.apache.hadoop.sqoop.manager.ImportJobContext;
import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.tool.HelpTool; import org.apache.hadoop.sqoop.tool.HelpTool;
import org.apache.hadoop.sqoop.tool.SqoopTool; import org.apache.hadoop.sqoop.tool.SqoopTool;
import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException;
/** /**
* Main entry-point for Sqoop * Main entry-point for Sqoop
@ -201,7 +187,8 @@ public static int runTool(String [] args) {
String toolName = args[0]; String toolName = args[0];
SqoopTool tool = SqoopTool.getTool(toolName); SqoopTool tool = SqoopTool.getTool(toolName);
if (null == tool) { if (null == tool) {
System.err.println("No such sqoop tool: " + toolName + ". See 'sqoop help'."); System.err.println("No such sqoop tool: " + toolName
+ ". See 'sqoop help'.");
return 1; return 1;
} }
@ -219,3 +206,4 @@ public static void main(String [] args) {
System.exit(ret); System.exit(ret);
} }
} }

View File

@ -37,14 +37,14 @@
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
/** /**
* Command-line arguments used by Sqoop * Command-line arguments used by Sqoop.
*/ */
public class SqoopOptions { public class SqoopOptions {
public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName()); public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
/** /**
* Thrown when invalid cmdline options are given * Thrown when invalid cmdline options are given.
*/ */
@SuppressWarnings("serial") @SuppressWarnings("serial")
public static class InvalidOptionsException extends Exception { public static class InvalidOptionsException extends Exception {
@ -64,7 +64,7 @@ public String toString() {
} }
} }
// selects in-HDFS destination file format /** Selects in-HDFS destination file format. */
public enum FileLayout { public enum FileLayout {
TextFile, TextFile,
SequenceFile SequenceFile
@ -98,18 +98,27 @@ public enum FileLayout {
private boolean overwriteHiveTable; private boolean overwriteHiveTable;
private String hiveTableName; private String hiveTableName;
private String packageName; // package to prepend to auto-named classes. private String packageName; // package to prepend to auto-named classes.
private String className; // package+class to apply to individual table import.
// also used as an *input* class with existingJarFile. // package+class to apply to individual table import.
private String existingJarFile; // Name of a jar containing existing table definition // also used as an *input* class with existingJarFile.
// class to use. private String className;
// Name of a jar containing existing table definition
// class to use.
private String existingJarFile;
private int numMappers; private int numMappers;
private boolean useCompression; private boolean useCompression;
private long directSplitSize; // In direct mode, open a new stream every X bytes.
private long maxInlineLobSize; // Max size of an inline LOB; larger LOBs are written // In direct mode, open a new stream every X bytes.
// to external files on disk. private long directSplitSize;
private String exportDir; // HDFS path to read from when performing an export // Max size of an inline LOB; larger LOBs are written
// to external files on disk.
private long maxInlineLobSize;
// HDFS path to read from when performing an export
private String exportDir;
private char inputFieldDelim; private char inputFieldDelim;
private char inputRecordDelim; private char inputRecordDelim;
@ -142,7 +151,7 @@ public SqoopOptions(Configuration conf) {
} }
/** /**
* Alternate SqoopOptions interface used mostly for unit testing * Alternate SqoopOptions interface used mostly for unit testing.
* @param connect JDBC connect string to use * @param connect JDBC connect string to use
* @param table Table to read * @param table Table to read
*/ */
@ -153,19 +162,22 @@ public SqoopOptions(final String connect, final String table) {
this.tableName = table; this.tableName = table;
} }
private boolean getBooleanProperty(Properties props, String propName, boolean defaultValue) { private boolean getBooleanProperty(Properties props, String propName,
boolean defaultValue) {
String str = props.getProperty(propName, String str = props.getProperty(propName,
Boolean.toString(defaultValue)).toLowerCase(); Boolean.toString(defaultValue)).toLowerCase();
return "true".equals(str) || "yes".equals(str) || "1".equals(str); return "true".equals(str) || "yes".equals(str) || "1".equals(str);
} }
private long getLongProperty(Properties props, String propName, long defaultValue) { private long getLongProperty(Properties props, String propName,
long defaultValue) {
String str = props.getProperty(propName, String str = props.getProperty(propName,
Long.toString(defaultValue)).toLowerCase(); Long.toString(defaultValue)).toLowerCase();
try { try {
return Long.parseLong(str); return Long.parseLong(str);
} catch (NumberFormatException nfe) { } catch (NumberFormatException nfe) {
LOG.warn("Could not parse integer value for config parameter " + propName); LOG.warn("Could not parse integer value for config parameter "
+ propName);
return defaultValue; return defaultValue;
} }
} }
@ -189,31 +201,40 @@ private void loadFromProperties() {
this.username = props.getProperty("db.username", this.username); this.username = props.getProperty("db.username", this.username);
this.password = props.getProperty("db.password", this.password); this.password = props.getProperty("db.password", this.password);
this.tableName = props.getProperty("db.table", this.tableName); this.tableName = props.getProperty("db.table", this.tableName);
this.connectString = props.getProperty("db.connect.url", this.connectString); this.connectString = props.getProperty("db.connect.url",
this.connectString);
this.splitByCol = props.getProperty("db.split.column", this.splitByCol); this.splitByCol = props.getProperty("db.split.column", this.splitByCol);
this.whereClause = props.getProperty("db.where.clause", this.whereClause); this.whereClause = props.getProperty("db.where.clause", this.whereClause);
this.driverClassName = props.getProperty("jdbc.driver", this.driverClassName); this.driverClassName = props.getProperty("jdbc.driver",
this.warehouseDir = props.getProperty("hdfs.warehouse.dir", this.warehouseDir); this.driverClassName);
this.warehouseDir = props.getProperty("hdfs.warehouse.dir",
this.warehouseDir);
this.hiveHome = props.getProperty("hive.home", this.hiveHome); this.hiveHome = props.getProperty("hive.home", this.hiveHome);
this.className = props.getProperty("java.classname", this.className); this.className = props.getProperty("java.classname", this.className);
this.packageName = props.getProperty("java.packagename", this.packageName); this.packageName = props.getProperty("java.packagename",
this.existingJarFile = props.getProperty("java.jar.file", this.existingJarFile); this.packageName);
this.existingJarFile = props.getProperty("java.jar.file",
this.existingJarFile);
this.exportDir = props.getProperty("export.dir", this.exportDir); this.exportDir = props.getProperty("export.dir", this.exportDir);
this.direct = getBooleanProperty(props, "direct.import", this.direct); this.direct = getBooleanProperty(props, "direct.import", this.direct);
this.hiveImport = getBooleanProperty(props, "hive.import", this.hiveImport); this.hiveImport = getBooleanProperty(props, "hive.import",
this.overwriteHiveTable = getBooleanProperty(props, "hive.overwrite.table", this.overwriteHiveTable); this.hiveImport);
this.useCompression = getBooleanProperty(props, "compression", this.useCompression); this.overwriteHiveTable = getBooleanProperty(props,
"hive.overwrite.table", this.overwriteHiveTable);
this.useCompression = getBooleanProperty(props, "compression",
this.useCompression);
this.directSplitSize = getLongProperty(props, "direct.split.size", this.directSplitSize = getLongProperty(props, "direct.split.size",
this.directSplitSize); this.directSplitSize);
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.error("Could not read properties file " + DEFAULT_CONFIG_FILE + ": " + ioe.toString()); LOG.error("Could not read properties file " + DEFAULT_CONFIG_FILE + ": "
+ ioe.toString());
} finally { } finally {
if (null != istream) { if (null != istream) {
try { try {
istream.close(); istream.close();
} catch (IOException ioe) { } catch (IOException ioe) {
// ignore this; we're closing. // Ignore this; we're closing.
} }
} }
} }
@ -221,7 +242,7 @@ private void loadFromProperties() {
/** /**
* @return the temp directory to use; this is guaranteed to end with * @return the temp directory to use; this is guaranteed to end with
* the file separator character (e.g., '/') * the file separator character (e.g., '/').
*/ */
public String getTempDir() { public String getTempDir() {
return this.tmpDir; return this.tmpDir;
@ -280,17 +301,19 @@ private void initDefaults(Configuration baseConfiguration) {
} }
/** /**
* Given a string containing a single character or an escape sequence representing * Given a string containing a single character or an escape sequence
* a char, return that char itself. * representing a char, return that char itself.
* *
* Normal literal characters return themselves: "x" -&gt; 'x', etc. * Normal literal characters return themselves: "x" -&gt; 'x', etc.
* Strings containing a '\' followed by one of t, r, n, or b escape to the usual * Strings containing a '\' followed by one of t, r, n, or b escape to the
* character as seen in Java: "\n" -&gt; (newline), etc. * usual character as seen in Java: "\n" -&gt; (newline), etc.
* *
* Strings like "\0ooo" return the character specified by the octal sequence 'ooo' * Strings like "\0ooo" return the character specified by the octal sequence
* Strings like "\0xhhh" or "\0Xhhh" return the character specified by the hex sequence 'hhh' * 'ooo'. Strings like "\0xhhh" or "\0Xhhh" return the character specified by
* the hex sequence 'hhh'.
* *
* If the input string contains leading or trailing spaces, these are ignored. * If the input string contains leading or trailing spaces, these are
* ignored.
*/ */
public static char toChar(String charish) throws InvalidOptionsException { public static char toChar(String charish) throws InvalidOptionsException {
if (null == charish || charish.length() == 0) { if (null == charish || charish.length() == 0) {
@ -300,8 +323,9 @@ public static char toChar(String charish) throws InvalidOptionsException {
if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) { if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
if (charish.length() == 3) { if (charish.length() == 3) {
throw new InvalidOptionsException("Base-16 value expected for character argument." throw new InvalidOptionsException(
+ "\nTry --help for usage instructions."); "Base-16 value expected for character argument."
+ "\nTry --help for usage instructions.");
} else { } else {
String valStr = charish.substring(3); String valStr = charish.substring(3);
int val = Integer.parseInt(valStr, 16); int val = Integer.parseInt(valStr, 16);
@ -323,7 +347,8 @@ public static char toChar(String charish) throws InvalidOptionsException {
return '\\'; return '\\';
} else if (charish.length() > 2) { } else if (charish.length() > 2) {
// we don't have any 3+ char escape strings. // we don't have any 3+ char escape strings.
throw new InvalidOptionsException("Cannot understand character argument: " + charish throw new InvalidOptionsException(
"Cannot understand character argument: " + charish
+ "\nTry --help for usage instructions."); + "\nTry --help for usage instructions.");
} else { } else {
// this is some sort of normal 1-character escape sequence. // this is some sort of normal 1-character escape sequence.
@ -344,7 +369,8 @@ public static char toChar(String charish) throws InvalidOptionsException {
case '\\': case '\\':
return '\\'; return '\\';
default: default:
throw new InvalidOptionsException("Cannot understand character argument: " + charish throw new InvalidOptionsException(
"Cannot understand character argument: " + charish
+ "\nTry --help for usage instructions."); + "\nTry --help for usage instructions.");
} }
} }
@ -359,8 +385,9 @@ public static char toChar(String charish) throws InvalidOptionsException {
} }
} }
/** get the temporary directory; guaranteed to end in File.separator /**
* (e.g., '/') * Get the temporary directory; guaranteed to end in File.separator
* (e.g., '/').
*/ */
public String getTmpDir() { public String getTmpDir() {
return tmpDir; return tmpDir;
@ -447,7 +474,8 @@ public String getPassword() {
} }
/** /**
* Allow the user to enter his password on the console without printing characters. * Allow the user to enter his password on the console without printing
* characters.
* @return the password as a string * @return the password as a string
*/ */
private String securePasswordEntry() { private String securePasswordEntry() {
@ -475,7 +503,7 @@ public void setDirectMode(boolean isDirect) {
} }
/** /**
* @return the number of map tasks to use for import * @return the number of map tasks to use for import.
*/ */
public int getNumMappers() { public int getNumMappers() {
return this.numMappers; return this.numMappers;
@ -486,7 +514,7 @@ public void setNumMappers(int numMappers) {
} }
/** /**
* @return the user-specified absolute class name for the table * @return the user-specified absolute class name for the table.
*/ */
public String getClassName() { public String getClassName() {
return className; return className;
@ -497,7 +525,8 @@ public void setClassName(String className) {
} }
/** /**
* @return the user-specified package to prepend to table names via --package-name. * @return the user-specified package to prepend to table names via
* --package-name.
*/ */
public String getPackageName() { public String getPackageName() {
return packageName; return packageName;
@ -515,7 +544,7 @@ public void setHiveHome(String hiveHome) {
this.hiveHome = hiveHome; this.hiveHome = hiveHome;
} }
/** @return true if we should import the table into Hive */ /** @return true if we should import the table into Hive. */
public boolean doHiveImport() { public boolean doHiveImport() {
return hiveImport; return hiveImport;
} }
@ -525,7 +554,7 @@ public void setHiveImport(boolean hiveImport) {
} }
/** /**
* @return the user-specified option to overwrite existing table in hive * @return the user-specified option to overwrite existing table in hive.
*/ */
public boolean doOverwriteHiveTable() { public boolean doOverwriteHiveTable() {
return overwriteHiveTable; return overwriteHiveTable;
@ -536,7 +565,7 @@ public void setOverwriteHiveTable(boolean overwrite) {
} }
/** /**
* @return location where .java files go; guaranteed to end with '/' * @return location where .java files go; guaranteed to end with '/'.
*/ */
public String getCodeOutputDir() { public String getCodeOutputDir() {
if (codeOutputDir.endsWith(File.separator)) { if (codeOutputDir.endsWith(File.separator)) {
@ -551,7 +580,8 @@ public void setCodeOutputDir(String outputDir) {
} }
/** /**
* @return location where .jar and .class files go; guaranteed to end with '/' * @return location where .jar and .class files go; guaranteed to end with
* '/'.
*/ */
public String getJarOutputDir() { public String getJarOutputDir() {
if (jarOutputDir.endsWith(File.separator)) { if (jarOutputDir.endsWith(File.separator)) {
@ -566,7 +596,7 @@ public void setJarOutputDir(String outDir) {
} }
/** /**
* Return the value of $HADOOP_HOME * Return the value of $HADOOP_HOME.
* @return $HADOOP_HOME, or null if it's not set. * @return $HADOOP_HOME, or null if it's not set.
*/ */
public String getHadoopHome() { public String getHadoopHome() {
@ -589,7 +619,7 @@ public void setDebugSqlCmd(String sqlStatement) {
} }
/** /**
* @return The JDBC driver class name specified with --driver * @return The JDBC driver class name specified with --driver.
*/ */
public String getDriverClassName() { public String getDriverClassName() {
return driverClassName; return driverClassName;
@ -622,8 +652,8 @@ public void setFileLayout(FileLayout layout) {
} }
/** /**
* @return the field delimiter to use when parsing lines. Defaults to the field delim * @return the field delimiter to use when parsing lines. Defaults to the
* to use when printing lines * field delim to use when printing lines.
*/ */
public char getInputFieldDelim() { public char getInputFieldDelim() {
if (inputFieldDelim == '\000') { if (inputFieldDelim == '\000') {
@ -638,8 +668,8 @@ public void setInputFieldsTerminatedBy(char c) {
} }
/** /**
* @return the record delimiter to use when parsing lines. Defaults to the record delim * @return the record delimiter to use when parsing lines. Defaults to the
* to use when printing lines. * record delim to use when printing lines.
*/ */
public char getInputRecordDelim() { public char getInputRecordDelim() {
if (inputRecordDelim == '\000') { if (inputRecordDelim == '\000') {
@ -654,8 +684,8 @@ public void setInputLinesTerminatedBy(char c) {
} }
/** /**
* @return the character that may enclose fields when parsing lines. Defaults to the * @return the character that may enclose fields when parsing lines.
* enclosing-char to use when printing lines. * Defaults to the enclosing-char to use when printing lines.
*/ */
public char getInputEnclosedBy() { public char getInputEnclosedBy() {
if (inputEnclosedBy == '\000') { if (inputEnclosedBy == '\000') {
@ -670,8 +700,8 @@ public void setInputEnclosedBy(char c) {
} }
/** /**
* @return the escape character to use when parsing lines. Defaults to the escape * @return the escape character to use when parsing lines. Defaults to the
* character used when printing lines. * escape character used when printing lines.
*/ */
public char getInputEscapedBy() { public char getInputEscapedBy() {
if (inputEscapedBy == '\000') { if (inputEscapedBy == '\000') {
@ -686,8 +716,9 @@ public void setInputEscapedBy(char c) {
} }
/** /**
* @return true if fields must be enclosed by the --enclosed-by character when parsing. * @return true if fields must be enclosed by the --enclosed-by character
* Defaults to false. Set true when --input-enclosed-by is used. * when parsing. Defaults to false. Set true when --input-enclosed-by is
* used.
*/ */
public boolean isInputEncloseRequired() { public boolean isInputEncloseRequired() {
if (inputEnclosedBy == '\000') { if (inputEnclosedBy == '\000') {
@ -702,7 +733,8 @@ public void setInputEncloseRequired(boolean required) {
} }
/** /**
* @return the character to print between fields when importing them to text. * @return the character to print between fields when importing them to
* text.
*/ */
public char getOutputFieldDelim() { public char getOutputFieldDelim() {
return this.outputFieldDelim; return this.outputFieldDelim;
@ -714,7 +746,8 @@ public void setFieldsTerminatedBy(char c) {
/** /**
* @return the character to print between records when importing them to text. * @return the character to print between records when importing them to
* text.
*/ */
public char getOutputRecordDelim() { public char getOutputRecordDelim() {
return this.outputRecordDelim; return this.outputRecordDelim;
@ -725,7 +758,8 @@ public void setLinesTerminatedBy(char c) {
} }
/** /**
* @return a character which may enclose the contents of fields when imported to text. * @return a character which may enclose the contents of fields when
* imported to text.
*/ */
public char getOutputEnclosedBy() { public char getOutputEnclosedBy() {
return this.outputEnclosedBy; return this.outputEnclosedBy;
@ -736,7 +770,8 @@ public void setEnclosedBy(char c) {
} }
/** /**
* @return a character which signifies an escape sequence when importing to text. * @return a character which signifies an escape sequence when importing to
* text.
*/ */
public char getOutputEscapedBy() { public char getOutputEscapedBy() {
return this.outputEscapedBy; return this.outputEscapedBy;
@ -747,8 +782,9 @@ public void setEscapedBy(char c) {
} }
/** /**
* @return true if fields imported to text must be enclosed by the EnclosedBy char. * @return true if fields imported to text must be enclosed by the
* default is false; set to true if --enclosed-by is used instead of --optionally-enclosed-by. * EnclosedBy char. default is false; set to true if --enclosed-by is used
* instead of --optionally-enclosed-by.
*/ */
public boolean isOutputEncloseRequired() { public boolean isOutputEncloseRequired() {
return this.outputMustBeEnclosed; return this.outputMustBeEnclosed;
@ -770,7 +806,7 @@ public void setUseCompression(boolean useCompression) {
} }
/** /**
* @return the name of the destination table when importing to Hive * @return the name of the destination table when importing to Hive.
*/ */
public String getHiveTableName() { public String getHiveTableName() {
if (null != this.hiveTableName) { if (null != this.hiveTableName) {
@ -829,7 +865,7 @@ public void setConf(Configuration config) {
} }
/** /**
* @return command-line arguments after a '-' * @return command-line arguments after a '-'.
*/ */
public String [] getExtraArgs() { public String [] getExtraArgs() {
if (extraArgs == null) { if (extraArgs == null) {
@ -855,3 +891,4 @@ public void setExtraArgs(String [] args) {
} }
} }
} }

View File

@ -51,7 +51,7 @@ public class SqoopParser extends GnuParser {
// this Sqoop class, we cannot see their package-specific methods. // this Sqoop class, we cannot see their package-specific methods.
// So we just call it by reflection. As long as we're at it, this // So we just call it by reflection. As long as we're at it, this
// allows us to also put SqoopParser in its own package. // allows us to also put SqoopParser in its own package.
static java.lang.reflect.Method addValForProcessing; private static java.lang.reflect.Method addValForProcessing;
static { static {
try { try {

View File

@ -114,7 +114,7 @@ public void printHelp() {
} }
/** /**
* Print the help to the console using the specified help formatter * Print the help to the console using the specified help formatter.
* @param formatter the HelpFormatter to use. * @param formatter the HelpFormatter to use.
*/ */
public void printHelp(HelpFormatter formatter) { public void printHelp(HelpFormatter formatter) {

View File

@ -86,7 +86,7 @@ private String getHiveBinPath() {
/** /**
* If we used a MapReduce-based upload of the data, remove the _logs dir * If we used a MapReduce-based upload of the data, remove the _logs dir
* from where we put it, before running Hive LOAD DATA INPATH * from where we put it, before running Hive LOAD DATA INPATH.
*/ */
private void removeTempLogs(String tableName) throws IOException { private void removeTempLogs(String tableName) throws IOException {
FileSystem fs = FileSystem.get(configuration); FileSystem fs = FileSystem.get(configuration);
@ -102,7 +102,8 @@ private void removeTempLogs(String tableName) throws IOException {
if (fs.exists(logsPath)) { if (fs.exists(logsPath)) {
LOG.info("Removing temporary files from import process: " + logsPath); LOG.info("Removing temporary files from import process: " + logsPath);
if (!fs.delete(logsPath, true)) { if (!fs.delete(logsPath, true)) {
LOG.warn("Could not delete temporary files; continuing with import, but it may fail."); LOG.warn("Could not delete temporary files; "
+ "continuing with import, but it may fail.");
} }
} }
} }
@ -124,7 +125,7 @@ private boolean isGenerateOnly() {
*/ */
private File getScriptFile(String outputTableName) throws IOException { private File getScriptFile(String outputTableName) throws IOException {
if (!isGenerateOnly()) { if (!isGenerateOnly()) {
return File.createTempFile("hive-script-",".txt", return File.createTempFile("hive-script-", ".txt",
new File(options.getTempDir())); new File(options.getTempDir()));
} else { } else {
return new File(new File(options.getCodeOutputDir()), return new File(new File(options.getCodeOutputDir()),
@ -194,7 +195,8 @@ public void importTable(String inputTableName, String outputTableName,
try { try {
w.close(); w.close();
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.warn("IOException closing stream to Hive script: " + ioe.toString()); LOG.warn("IOException closing stream to Hive script: "
+ ioe.toString());
} }
} }
} }

View File

@ -26,10 +26,13 @@
/** /**
* Defines conversion between SQL types and Hive types. * Defines conversion between SQL types and Hive types.
*/ */
public class HiveTypes { public final class HiveTypes {
public static final Log LOG = LogFactory.getLog(HiveTypes.class.getName()); public static final Log LOG = LogFactory.getLog(HiveTypes.class.getName());
private HiveTypes() {
}
/** /**
* Given JDBC SQL types coming from another database, what is the best * Given JDBC SQL types coming from another database, what is the best
* mapping to a Hive-specific type? * mapping to a Hive-specific type?

View File

@ -45,7 +45,8 @@
*/ */
public class TableDefWriter { public class TableDefWriter {
public static final Log LOG = LogFactory.getLog(TableDefWriter.class.getName()); public static final Log LOG = LogFactory.getLog(
TableDefWriter.class.getName());
private SqoopOptions options; private SqoopOptions options;
private ConnManager connManager; private ConnManager connManager;
@ -139,13 +140,15 @@ public String getCreateTableStmt() throws IOException {
Integer colType = columnTypes.get(col); Integer colType = columnTypes.get(col);
String hiveColType = connManager.toHiveType(colType); String hiveColType = connManager.toHiveType(colType);
if (null == hiveColType) { if (null == hiveColType) {
throw new IOException("Hive does not support the SQL type for column " + col); throw new IOException("Hive does not support the SQL type for column "
+ col);
} }
sb.append(col + " " + hiveColType); sb.append(col + " " + hiveColType);
if (HiveTypes.isHiveTypeImprovised(colType)) { if (HiveTypes.isHiveTypeImprovised(colType)) {
LOG.warn("Column " + col + " had to be cast to a less precise type in Hive"); LOG.warn(
"Column " + col + " had to be cast to a less precise type in Hive");
} }
} }
@ -171,7 +174,7 @@ public String getCreateTableStmt() throws IOException {
org.apache.hadoop.hdfs.server.namenode.NameNode.DEFAULT_PORT; org.apache.hadoop.hdfs.server.namenode.NameNode.DEFAULT_PORT;
/** /**
* @return the LOAD DATA statement to import the data in HDFS into hive * @return the LOAD DATA statement to import the data in HDFS into hive.
*/ */
public String getLoadDataStmt() throws IOException { public String getLoadDataStmt() throws IOException {
String warehouseDir = options.getWarehouseDir(); String warehouseDir = options.getWarehouseDir();
@ -207,10 +210,9 @@ public String getLoadDataStmt() throws IOException {
* @param charNum the character to use as a delimiter * @param charNum the character to use as a delimiter
* @return a string of the form "\ooo" where ooo is an octal number * @return a string of the form "\ooo" where ooo is an octal number
* in [000, 177]. * in [000, 177].
* @throws IllegalArgumentException if charNum &gt;> 0177. * @throws IllegalArgumentException if charNum &gt; 0177.
*/ */
static String getHiveOctalCharCode(int charNum) static String getHiveOctalCharCode(int charNum) {
throws IllegalArgumentException {
if (charNum > 0177) { if (charNum > 0177) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Character " + charNum + " is an out-of-range delimiter"); "Character " + charNum + " is an out-of-range delimiter");

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.sqoop.io; package org.apache.hadoop.sqoop.io;
import java.io.FilterInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.io.IOException; import java.io.IOException;

View File

@ -76,7 +76,10 @@
* The LobFile format is specified at: * The LobFile format is specified at:
* http://wiki.github.com/cloudera/sqoop/sip-3 * http://wiki.github.com/cloudera/sqoop/sip-3
*/ */
public class LobFile { public final class LobFile {
private LobFile() {
}
public static final Log LOG = LogFactory.getLog(LobFile.class.getName()); public static final Log LOG = LogFactory.getLog(LobFile.class.getName());
@ -716,7 +719,7 @@ public Iterator<IndexTableEntry> iterator() {
/** /**
* Class that writes out a LobFile. Instantiate via LobFile.create(). * Class that writes out a LobFile. Instantiate via LobFile.create().
*/ */
public static abstract class Writer implements Closeable { public abstract static class Writer implements Closeable {
/** /**
* If this Writer is writing to a physical LobFile, then this returns * If this Writer is writing to a physical LobFile, then this returns
@ -733,6 +736,7 @@ public static abstract class Writer implements Closeable {
@Override @Override
protected synchronized void finalize() throws Throwable { protected synchronized void finalize() throws Throwable {
close(); close();
super.finalize();
} }
/** /**
@ -759,7 +763,7 @@ public abstract java.io.Writer writeClobRecord(long len)
throws IOException; throws IOException;
/** /**
* Report the current position in the output file * Report the current position in the output file.
* @return the number of bytes written through this Writer. * @return the number of bytes written through this Writer.
*/ */
public abstract long tell() throws IOException; public abstract long tell() throws IOException;
@ -795,7 +799,8 @@ private static class V0Writer extends Writer {
// The LobIndex we are constructing. // The LobIndex we are constructing.
private LinkedList<IndexSegment> indexSegments; private LinkedList<IndexSegment> indexSegments;
private int entriesInSegment; // number of entries in the current IndexSegment. // Number of entries in the current IndexSegment.
private int entriesInSegment;
private IndexTable indexTable; private IndexTable indexTable;
// Number of entries that can be written to a single IndexSegment. // Number of entries that can be written to a single IndexSegment.
@ -1078,7 +1083,7 @@ public java.io.Writer writeClobRecord(long len) throws IOException {
/** /**
* Class that can read a LobFile. Create with LobFile.open(). * Class that can read a LobFile. Create with LobFile.open().
*/ */
public static abstract class Reader implements Closeable { public abstract static class Reader implements Closeable {
/** /**
* If this Reader is reading from a physical LobFile, then this returns * If this Reader is reading from a physical LobFile, then this returns
* the file path it is reading from. Otherwise it returns null. * the file path it is reading from. Otherwise it returns null.
@ -1087,7 +1092,7 @@ public static abstract class Reader implements Closeable {
public abstract Path getPath(); public abstract Path getPath();
/** /**
* Report the current position in the file * Report the current position in the file.
* @return the current offset from the start of the file in bytes. * @return the current offset from the start of the file in bytes.
*/ */
public abstract long tell() throws IOException; public abstract long tell() throws IOException;
@ -1179,6 +1184,7 @@ protected void checkForNull(InputStream in) throws IOException {
@Override @Override
protected synchronized void finalize() throws Throwable { protected synchronized void finalize() throws Throwable {
close(); close();
super.finalize();
} }
} }
@ -1449,17 +1455,20 @@ private int findRecordStartMark(byte [] buf) {
return -1; // couldn't find it. return -1; // couldn't find it.
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public Path getPath() { public Path getPath() {
return this.path; return this.path;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public long tell() throws IOException { public long tell() throws IOException {
checkForNull(this.underlyingInput); checkForNull(this.underlyingInput);
return this.underlyingInput.getPos(); return this.underlyingInput.getPos();
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public void seek(long pos) throws IOException { public void seek(long pos) throws IOException {
closeUserStream(); closeUserStream();
@ -1576,6 +1585,7 @@ private void closeUserStream() throws IOException {
} }
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public boolean next() throws IOException { public boolean next() throws IOException {
LOG.debug("Checking for next record"); LOG.debug("Checking for next record");
@ -1646,26 +1656,31 @@ public boolean next() throws IOException {
return true; return true;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public boolean isRecordAvailable() { public boolean isRecordAvailable() {
return this.isAligned; return this.isAligned;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public long getRecordLen() { public long getRecordLen() {
return this.claimedRecordLen; return this.claimedRecordLen;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public long getRecordId() { public long getRecordId() {
return this.curEntryId; return this.curEntryId;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public long getRecordOffset() { public long getRecordOffset() {
return this.curRecordOffset; return this.curRecordOffset;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public InputStream readBlobRecord() throws IOException { public InputStream readBlobRecord() throws IOException {
if (!isRecordAvailable()) { if (!isRecordAvailable()) {
@ -1700,6 +1715,7 @@ public InputStream readBlobRecord() throws IOException {
return this.userInputStream; return this.userInputStream;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public java.io.Reader readClobRecord() throws IOException { public java.io.Reader readClobRecord() throws IOException {
// Get a handle to the binary reader and then wrap it. // Get a handle to the binary reader and then wrap it.
@ -1707,6 +1723,7 @@ public java.io.Reader readClobRecord() throws IOException {
return new InputStreamReader(is); return new InputStreamReader(is);
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public void close() throws IOException { public void close() throws IOException {
closeUserStream(); closeUserStream();
@ -1724,6 +1741,7 @@ public void close() throws IOException {
this.isAligned = false; this.isAligned = false;
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public boolean isClosed() { public boolean isClosed() {
return this.underlyingInput == null; return this.underlyingInput == null;

View File

@ -19,10 +19,8 @@
package org.apache.hadoop.sqoop.io; package org.apache.hadoop.sqoop.io;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.OutputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.IOException; import java.io.IOException;
import java.util.Formatter;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -48,7 +46,7 @@ public SplittableBufferedWriter(
this.alwaysFlush = false; this.alwaysFlush = false;
} }
/** For testing */ /** For testing. */
SplittableBufferedWriter(final SplittingOutputStream splitOutputStream, SplittableBufferedWriter(final SplittingOutputStream splitOutputStream,
final boolean alwaysFlush) { final boolean alwaysFlush) {
super(new OutputStreamWriter(splitOutputStream)); super(new OutputStreamWriter(splitOutputStream));

View File

@ -35,7 +35,7 @@
* We serialize in one of two formats: * We serialize in one of two formats:
* *
* First, check whether the BigInt can fit in a long: * First, check whether the BigInt can fit in a long:
* boolean b = BigIntegerPart > LONG_MAX || BigIntegerPart < LONG_MIN * boolean b = BigIntegerPart &gt; LONG_MAX || BigIntegerPart &lt; LONG_MIN
* *
* [int: scale][boolean: b == false][long: BigInt-part] * [int: scale][boolean: b == false][long: BigInt-part]
* [int: scale][boolean: b == true][string: BigInt-part.toString()] * [int: scale][boolean: b == true][string: BigInt-part.toString()]
@ -46,8 +46,10 @@ public final class BigDecimalSerializer {
private BigDecimalSerializer() { } private BigDecimalSerializer() { }
static final BigInteger LONG_MAX_AS_BIGINT = BigInteger.valueOf(Long.MAX_VALUE); static final BigInteger LONG_MAX_AS_BIGINT =
static final BigInteger LONG_MIN_AS_BIGINT = BigInteger.valueOf(Long.MIN_VALUE); BigInteger.valueOf(Long.MAX_VALUE);
static final BigInteger LONG_MIN_AS_BIGINT =
BigInteger.valueOf(Long.MIN_VALUE);
public static void write(BigDecimal d, DataOutput out) throws IOException { public static void write(BigDecimal d, DataOutput out) throws IOException {
int scale = d.scale(); int scale = d.scale();

View File

@ -25,19 +25,9 @@
import java.io.InputStream; import java.io.InputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.sqoop.io.LobFile; import org.apache.hadoop.sqoop.io.LobFile;
import org.apache.hadoop.sqoop.io.LobReaderCache;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -85,7 +75,7 @@ protected InputStream getInternalSource(BytesWritable data) {
} }
@Override @Override
protected BytesWritable deepCopyData() { protected BytesWritable deepCopyData(BytesWritable data) {
return new BytesWritable(Arrays.copyOf(data.getBytes(), data.getLength())); return new BytesWritable(Arrays.copyOf(data.getBytes(), data.getLength()));
} }
@ -94,15 +84,18 @@ public void readFieldsInternal(DataInput in) throws IOException {
// For internally-stored BLOBs, the data is a BytesWritable // For internally-stored BLOBs, the data is a BytesWritable
// containing the actual data. // containing the actual data.
if (null == this.data) { BytesWritable data = getDataObj();
this.data = new BytesWritable();
if (null == data) {
data = new BytesWritable();
} }
this.data.readFields(in); data.readFields(in);
setDataObj(data);
} }
@Override @Override
public void writeInternal(DataOutput out) throws IOException { public void writeInternal(DataOutput out) throws IOException {
data.write(out); getDataObj().write(out);
} }
/** /**

View File

@ -21,20 +21,11 @@
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader; import java.io.StringReader;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.sqoop.io.LobFile; import org.apache.hadoop.sqoop.io.LobFile;
/** /**
@ -73,7 +64,7 @@ protected Reader getInternalSource(String data) {
} }
@Override @Override
protected String deepCopyData() { protected String deepCopyData(String data) {
return data; return data;
} }
@ -85,12 +76,12 @@ protected String getInternalData(String data) {
@Override @Override
public void readFieldsInternal(DataInput in) throws IOException { public void readFieldsInternal(DataInput in) throws IOException {
// For internally-stored clobs, the data is written as UTF8 Text. // For internally-stored clobs, the data is written as UTF8 Text.
this.data = Text.readString(in); setDataObj(Text.readString(in));
} }
@Override @Override
public void writeInternal(DataOutput out) throws IOException { public void writeInternal(DataOutput out) throws IOException {
Text.writeString(out, data); Text.writeString(out, getDataObj());
} }
/** /**

View File

@ -28,31 +28,35 @@ private FieldFormatter() { }
/** /**
* Takes an input string representing the value of a field, encloses it in * Takes an input string representing the value of a field, encloses it in
* enclosing chars, and escapes any occurrences of such characters in the middle. * enclosing chars, and escapes any occurrences of such characters in the
* The escape character itself is also escaped if it appears in the text of the * middle. The escape character itself is also escaped if it appears in the
* field. * text of the field.
* *
* The field is enclosed only if: * The field is enclosed only if:
* enclose != '\000', and: * enclose != '\000', and:
* encloseRequired is true, or * encloseRequired is true, or
* one of the characters in the mustEscapeFor list is present in the string. * one of the characters in the mustEscapeFor list is present
* in the string.
* *
* Escaping is not performed if the escape char is '\000'. * Escaping is not performed if the escape char is '\000'.
* *
* @param str - The user's string to escape and enclose * @param str - The user's string to escape and enclose
* @param escape - What string to use as the escape sequence. If "" or null, then don't escape. * @param escape - What string to use as the escape sequence. If "" or null,
* @param enclose - The string to use to enclose str e.g. "quoted". If "" or null, then don't * then don't escape.
* enclose. * @param enclose - The string to use to enclose str e.g. "quoted". If "" or
* @param mustEncloseFor - A list of characters; if one is present in 'str', then str must be * null, then don't enclose.
* enclosed * @param mustEncloseFor - A list of characters; if one is present in 'str',
* @param encloseRequired - If true, then always enclose, regardless of mustEscapeFor * then str must be enclosed.
* @return the escaped, enclosed version of 'str' * @param encloseRequired - If true, then always enclose, regardless of
* mustEscapeFor.
* @return the escaped, enclosed version of 'str'.
*/ */
public static final String escapeAndEnclose(String str, String escape, String enclose, public static String escapeAndEnclose(String str, String escape,
char [] mustEncloseFor, boolean encloseRequired) { String enclose, char [] mustEncloseFor, boolean encloseRequired) {
// true if we can use an escape character. // true if we can use an escape character.
boolean escapingLegal = (null != escape && escape.length() > 0 && !escape.equals("\000")); boolean escapingLegal = (null != escape
&& escape.length() > 0 && !escape.equals("\000"));
String withEscapes; String withEscapes;
if (null == str) { if (null == str) {
@ -60,7 +64,7 @@ public static final String escapeAndEnclose(String str, String escape, String en
} }
if (escapingLegal) { if (escapingLegal) {
// escaping is legal. Escape any instances of the escape char itself // escaping is legal. Escape any instances of the escape char itself.
withEscapes = str.replace(escape, escape + escape); withEscapes = str.replace(escape, escape + escape);
} else { } else {
// no need to double-escape // no need to double-escape
@ -68,12 +72,13 @@ public static final String escapeAndEnclose(String str, String escape, String en
} }
if (null == enclose || enclose.length() == 0 || enclose.equals("\000")) { if (null == enclose || enclose.length() == 0 || enclose.equals("\000")) {
// The enclose-with character was left unset, so we can't enclose items. We're done. // The enclose-with character was left unset, so we can't enclose items.
// We're done.
return withEscapes; return withEscapes;
} }
// if we have an enclosing character, and escaping is legal, then the encloser must // if we have an enclosing character, and escaping is legal, then the
// always be escaped. // encloser must always be escaped.
if (escapingLegal) { if (escapingLegal) {
withEscapes = withEscapes.replace(enclose, escape + enclose); withEscapes = withEscapes.replace(enclose, escape + enclose);
} }

View File

@ -20,8 +20,6 @@
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date; import java.sql.Date;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
@ -37,14 +35,16 @@
*/ */
public final class JdbcWritableBridge { public final class JdbcWritableBridge {
// Currently, cap BLOB/CLOB objects at 16 MB until we can use external storage. // Currently, cap BLOB/CLOB objects at 16 MB until we can use external
public final static long MAX_BLOB_LENGTH = 16 * 1024 * 1024; // storage.
public final static long MAX_CLOB_LENGTH = 16 * 1024 * 1024; public static final long MAX_BLOB_LENGTH = 16 * 1024 * 1024;
public static final long MAX_CLOB_LENGTH = 16 * 1024 * 1024;
private JdbcWritableBridge() { private JdbcWritableBridge() {
} }
public static Integer readInteger(int colNum, ResultSet r) throws SQLException { public static Integer readInteger(int colNum, ResultSet r)
throws SQLException {
int val; int val;
val = r.getInt(colNum); val = r.getInt(colNum);
if (r.wasNull()) { if (r.wasNull()) {
@ -88,7 +88,8 @@ public static Double readDouble(int colNum, ResultSet r) throws SQLException {
} }
} }
public static Boolean readBoolean(int colNum, ResultSet r) throws SQLException { public static Boolean readBoolean(int colNum, ResultSet r)
throws SQLException {
boolean val; boolean val;
val = r.getBoolean(colNum); val = r.getBoolean(colNum);
if (r.wasNull()) { if (r.wasNull()) {
@ -102,7 +103,8 @@ public static Time readTime(int colNum, ResultSet r) throws SQLException {
return r.getTime(colNum); return r.getTime(colNum);
} }
public static Timestamp readTimestamp(int colNum, ResultSet r) throws SQLException { public static Timestamp readTimestamp(int colNum, ResultSet r)
throws SQLException {
return r.getTimestamp(colNum); return r.getTimestamp(colNum);
} }
@ -116,7 +118,8 @@ public static BytesWritable readBytesWritable(int colNum, ResultSet r)
return new BytesWritable(bytes); return new BytesWritable(bytes);
} }
public static BigDecimal readBigDecimal(int colNum, ResultSet r) throws SQLException { public static BigDecimal readBigDecimal(int colNum, ResultSet r)
throws SQLException {
return r.getBigDecimal(colNum); return r.getBigDecimal(colNum);
} }
@ -132,8 +135,8 @@ public static ClobRef readClobRef(int colNum, ResultSet r)
return null; return null;
} }
public static void writeInteger(Integer val, int paramIdx, int sqlType, PreparedStatement s) public static void writeInteger(Integer val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -141,8 +144,8 @@ public static void writeInteger(Integer val, int paramIdx, int sqlType, Prepared
} }
} }
public static void writeLong(Long val, int paramIdx, int sqlType, PreparedStatement s) public static void writeLong(Long val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -150,8 +153,8 @@ public static void writeLong(Long val, int paramIdx, int sqlType, PreparedStatem
} }
} }
public static void writeDouble(Double val, int paramIdx, int sqlType, PreparedStatement s) public static void writeDouble(Double val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -159,8 +162,8 @@ public static void writeDouble(Double val, int paramIdx, int sqlType, PreparedSt
} }
} }
public static void writeBoolean(Boolean val, int paramIdx, int sqlType, PreparedStatement s) public static void writeBoolean(Boolean val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -168,8 +171,8 @@ public static void writeBoolean(Boolean val, int paramIdx, int sqlType, Prepared
} }
} }
public static void writeFloat(Float val, int paramIdx, int sqlType, PreparedStatement s) public static void writeFloat(Float val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -177,8 +180,8 @@ public static void writeFloat(Float val, int paramIdx, int sqlType, PreparedStat
} }
} }
public static void writeString(String val, int paramIdx, int sqlType, PreparedStatement s) public static void writeString(String val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -186,8 +189,8 @@ public static void writeString(String val, int paramIdx, int sqlType, PreparedSt
} }
} }
public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType, PreparedStatement s) public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -195,8 +198,8 @@ public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType, Prep
} }
} }
public static void writeTime(Time val, int paramIdx, int sqlType, PreparedStatement s) public static void writeTime(Time val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -204,8 +207,8 @@ public static void writeTime(Time val, int paramIdx, int sqlType, PreparedStatem
} }
} }
public static void writeDate(Date val, int paramIdx, int sqlType, PreparedStatement s) public static void writeDate(Date val, int paramIdx, int sqlType,
throws SQLException { PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {
@ -228,8 +231,8 @@ public static void writeBytesWritable(BytesWritable val, int paramIdx,
} }
public static void writeBigDecimal(BigDecimal val, int paramIdx, int sqlType, PreparedStatement s) public static void writeBigDecimal(BigDecimal val, int paramIdx,
throws SQLException { int sqlType, PreparedStatement s) throws SQLException {
if (null == val) { if (null == val) {
s.setNull(paramIdx, sqlType); s.setNull(paramIdx, sqlType);
} else { } else {

View File

@ -18,30 +18,21 @@
package org.apache.hadoop.sqoop.lib; package org.apache.hadoop.sqoop.lib;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.Closeable; import java.io.Closeable;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader; import java.io.Reader;
import java.io.Writer; import java.io.Writer;
import java.math.BigDecimal;
import java.sql.Blob; import java.sql.Blob;
import java.sql.Clob; import java.sql.Clob;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.sqoop.io.LobFile; import org.apache.hadoop.sqoop.io.LobFile;
/** /**
@ -57,9 +48,9 @@
public class LargeObjectLoader implements Closeable { public class LargeObjectLoader implements Closeable {
// Spill to external storage for BLOB/CLOB objects > 16 MB. // Spill to external storage for BLOB/CLOB objects > 16 MB.
public final static long DEFAULT_MAX_LOB_LENGTH = 16 * 1024 * 1024; public static final long DEFAULT_MAX_LOB_LENGTH = 16 * 1024 * 1024;
public final static String MAX_INLINE_LOB_LEN_KEY = public static final String MAX_INLINE_LOB_LEN_KEY =
"sqoop.inline.lob.length.max"; "sqoop.inline.lob.length.max";
private Configuration conf; private Configuration conf;
@ -75,7 +66,7 @@ public class LargeObjectLoader implements Closeable {
private long nextLobFileId = 0; private long nextLobFileId = 0;
/** /**
* Create a new LargeObjectLoader * Create a new LargeObjectLoader.
* @param conf the Configuration to use * @param conf the Configuration to use
* @param workPath the HDFS working directory for this task. * @param workPath the HDFS working directory for this task.
*/ */
@ -91,6 +82,7 @@ public LargeObjectLoader(Configuration conf, Path workPath)
@Override @Override
protected synchronized void finalize() throws Throwable { protected synchronized void finalize() throws Throwable {
close(); close();
super.finalize();
} }
@Override @Override

View File

@ -22,12 +22,10 @@
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -57,7 +55,7 @@ protected LobRef() {
this.offset = 0; this.offset = 0;
this.length = 0; this.length = 0;
this.data = null; this.realData = null;
} }
protected LobRef(CONTAINERTYPE container) { protected LobRef(CONTAINERTYPE container) {
@ -65,7 +63,7 @@ protected LobRef(CONTAINERTYPE container) {
this.offset = 0; this.offset = 0;
this.length = 0; this.length = 0;
this.data = container; this.realData = container;
} }
protected LobRef(String file, long offset, long length) { protected LobRef(String file, long offset, long length) {
@ -73,11 +71,21 @@ protected LobRef(String file, long offset, long length) {
this.offset = offset; this.offset = offset;
this.length = length; this.length = length;
this.data = null; this.realData = null;
} }
// If the data is 'small', it's held directly, here. // If the data is 'small', it's held directly, here.
protected CONTAINERTYPE data; private CONTAINERTYPE realData;
/** Internal API to retrieve the data object. */
protected CONTAINERTYPE getDataObj() {
return realData;
}
/** Internal API to set the data object. */
protected void setDataObj(CONTAINERTYPE data) {
this.realData = data;
}
// If there data is too large to materialize fully, it's written into a file // If there data is too large to materialize fully, it's written into a file
// whose path (relative to the rest of the dataset) is recorded here. This // whose path (relative to the rest of the dataset) is recorded here. This
@ -90,7 +98,7 @@ protected LobRef(String file, long offset, long length) {
private long length; private long length;
// If we've opened a LobFile object, track our reference to it here. // If we've opened a LobFile object, track our reference to it here.
protected LobFile.Reader reader; private LobFile.Reader lobReader;
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -102,9 +110,9 @@ public Object clone() throws CloneNotSupportedException {
LobRef<DATATYPE, CONTAINERTYPE, ACCESSORTYPE> r = LobRef<DATATYPE, CONTAINERTYPE, ACCESSORTYPE> r =
(LobRef<DATATYPE, CONTAINERTYPE, ACCESSORTYPE>) super.clone(); (LobRef<DATATYPE, CONTAINERTYPE, ACCESSORTYPE>) super.clone();
r.reader = null; // Reference to opened reader is not duplicated. r.lobReader = null; // Reference to opened reader is not duplicated.
if (null != data) { if (null != realData) {
r.data = deepCopyData(); r.realData = deepCopyData(realData);
} }
return r; return r;
@ -113,12 +121,13 @@ public Object clone() throws CloneNotSupportedException {
@Override @Override
protected synchronized void finalize() throws Throwable { protected synchronized void finalize() throws Throwable {
close(); close();
super.finalize();
} }
public void close() throws IOException { public void close() throws IOException {
// Discard any open LobReader. // Discard any open LobReader.
if (null != this.reader) { if (null != this.lobReader) {
LobReaderCache.getCache().recycle(this.reader); LobReaderCache.getCache().recycle(this.lobReader);
} }
} }
@ -142,7 +151,7 @@ public boolean isExternal() {
* @throws IOException if it could not read the LOB from external storage. * @throws IOException if it could not read the LOB from external storage.
*/ */
public ACCESSORTYPE getDataStream(Mapper.Context mapContext) public ACCESSORTYPE getDataStream(Mapper.Context mapContext)
throws IllegalArgumentException, IOException { throws IOException {
InputSplit split = mapContext.getInputSplit(); InputSplit split = mapContext.getInputSplit();
if (split instanceof FileSplit) { if (split instanceof FileSplit) {
Path basePath = ((FileSplit) split).getPath().getParent(); Path basePath = ((FileSplit) split).getPath().getParent();
@ -171,35 +180,35 @@ public ACCESSORTYPE getDataStream(Configuration conf, Path basePath)
Path pathToRead = LobReaderCache.qualify( Path pathToRead = LobReaderCache.qualify(
new Path(basePath, fileName), conf); new Path(basePath, fileName), conf);
LOG.debug("Retreving data stream from external path: " + pathToRead); LOG.debug("Retreving data stream from external path: " + pathToRead);
if (reader != null) { if (lobReader != null) {
// We already have a reader open to a LobFile. Is it the correct file? // We already have a reader open to a LobFile. Is it the correct file?
if (!pathToRead.equals(reader.getPath())) { if (!pathToRead.equals(lobReader.getPath())) {
// No. Close this reader and get the correct one. // No. Close this.lobReader and get the correct one.
LOG.debug("Releasing previous external reader for " LOG.debug("Releasing previous external reader for "
+ reader.getPath()); + lobReader.getPath());
LobReaderCache.getCache().recycle(reader); LobReaderCache.getCache().recycle(lobReader);
reader = LobReaderCache.getCache().get(pathToRead, conf); lobReader = LobReaderCache.getCache().get(pathToRead, conf);
} }
} else { } else {
reader = LobReaderCache.getCache().get(pathToRead, conf); lobReader = LobReaderCache.getCache().get(pathToRead, conf);
} }
// We now have a LobFile.Reader associated with the correct file. Get to // We now have a LobFile.Reader associated with the correct file. Get to
// the correct offset and return an InputStream/Reader to the user. // the correct offset and return an InputStream/Reader to the user.
if (reader.tell() != offset) { if (lobReader.tell() != offset) {
LOG.debug("Seeking to record start offset " + offset); LOG.debug("Seeking to record start offset " + offset);
reader.seek(offset); lobReader.seek(offset);
} }
if (!reader.next()) { if (!lobReader.next()) {
throw new IOException("Could not locate record at " + pathToRead throw new IOException("Could not locate record at " + pathToRead
+ ":" + offset); + ":" + offset);
} }
return getExternalSource(reader); return getExternalSource(lobReader);
} else { } else {
// This data is already materialized in memory; wrap it and return. // This data is already materialized in memory; wrap it and return.
return getInternalSource(data); return getInternalSource(realData);
} }
} }
@ -223,7 +232,7 @@ protected abstract ACCESSORTYPE getExternalSource(LobFile.Reader reader)
/** /**
* Make a copy of the materialized data. * Make a copy of the materialized data.
*/ */
protected abstract CONTAINERTYPE deepCopyData(); protected abstract CONTAINERTYPE deepCopyData(CONTAINERTYPE data);
public DATATYPE getData() { public DATATYPE getData() {
if (isExternal()) { if (isExternal()) {
@ -231,7 +240,7 @@ public DATATYPE getData() {
"External LOBs must be read via getDataStream()"); "External LOBs must be read via getDataStream()");
} }
return getInternalData(data); return getInternalData(realData);
} }
@Override @Override
@ -240,7 +249,7 @@ public String toString() {
return "externalLob(lf," + fileName + "," + Long.toString(offset) return "externalLob(lf," + fileName + "," + Long.toString(offset)
+ "," + Long.toString(length) + ")"; + "," + Long.toString(length) + ")";
} else { } else {
return data.toString(); return realData.toString();
} }
} }
@ -260,7 +269,7 @@ public void readFields(DataInput in) throws IOException {
boolean isExternal = in.readBoolean(); boolean isExternal = in.readBoolean();
if (isExternal) { if (isExternal) {
this.data = null; this.realData = null;
String storageType = Text.readString(in); String storageType = Text.readString(in);
if (!storageType.equals("lf")) { if (!storageType.equals("lf")) {

View File

@ -29,11 +29,13 @@ public final class LobSerializer {
private LobSerializer() { } private LobSerializer() { }
public static void writeClob(ClobRef clob, DataOutput out) throws IOException { public static void writeClob(ClobRef clob, DataOutput out)
throws IOException {
clob.write(out); clob.write(out);
} }
public static void writeBlob(BlobRef blob, DataOutput out) throws IOException { public static void writeBlob(BlobRef blob, DataOutput out)
throws IOException {
blob.write(out); blob.write(out);
} }

View File

@ -65,6 +65,9 @@ private enum ParseState {
UNENCLOSED_ESCAPE UNENCLOSED_ESCAPE
} }
/**
* An error thrown when parsing fails.
*/
public static class ParseError extends Exception { public static class ParseError extends Exception {
public ParseError() { public ParseError() {
super("ParseError"); super("ParseError");
@ -246,7 +249,8 @@ record sep halts processing.
sb.append(curChar); sb.append(curChar);
if (this.enclosingRequired) { if (this.enclosingRequired) {
throw new ParseError("Opening field-encloser expected at position " + pos); throw new ParseError(
"Opening field-encloser expected at position " + pos);
} }
} }
@ -285,15 +289,15 @@ record sep halts processing.
break; break;
case ENCLOSED_ESCAPE: case ENCLOSED_ESCAPE:
// Treat this character literally, whatever it is, and return to enclosed // Treat this character literally, whatever it is, and return to
// field processing. // enclosed field processing.
sb.append(curChar); sb.append(curChar);
state = ParseState.ENCLOSED_FIELD; state = ParseState.ENCLOSED_FIELD;
break; break;
case ENCLOSED_EXPECT_DELIMITER: case ENCLOSED_EXPECT_DELIMITER:
// We were in an enclosed field, but got the final encloser. Now we expect // We were in an enclosed field, but got the final encloser. Now we
// either an end-of-field or an end-of-record. // expect either an end-of-field or an end-of-record.
if (this.fieldDelim == curChar) { if (this.fieldDelim == curChar) {
// end of one field is the beginning of the next. // end of one field is the beginning of the next.
state = ParseState.FIELD_START; state = ParseState.FIELD_START;
@ -308,8 +312,8 @@ record sep halts processing.
break; break;
case UNENCLOSED_ESCAPE: case UNENCLOSED_ESCAPE:
// Treat this character literally, whatever it is, and return to non-enclosed // Treat this character literally, whatever it is, and return to
// field processing. // non-enclosed field processing.
sb.append(curChar); sb.append(curChar);
state = ParseState.UNENCLOSED_FIELD; state = ParseState.UNENCLOSED_FIELD;
break; break;
@ -342,8 +346,8 @@ public boolean isEnclosingRequired() {
@Override @Override
public String toString() { public String toString() {
return "RecordParser[" + fieldDelim + ',' + recordDelim + ',' + enclosingChar + ',' return "RecordParser[" + fieldDelim + ',' + recordDelim + ','
+ escapeChar + ',' + enclosingRequired + "]"; + enclosingChar + ',' + escapeChar + ',' + enclosingRequired + "]";
} }
@Override @Override

View File

@ -32,21 +32,21 @@
* Interface implemented by the classes generated by sqoop's orm.ClassWriter. * Interface implemented by the classes generated by sqoop's orm.ClassWriter.
*/ */
public interface SqoopRecord extends Cloneable, DBWritable, Writable { public interface SqoopRecord extends Cloneable, DBWritable, Writable {
public void parse(CharSequence s) throws RecordParser.ParseError; void parse(CharSequence s) throws RecordParser.ParseError;
public void parse(Text s) throws RecordParser.ParseError; void parse(Text s) throws RecordParser.ParseError;
public void parse(byte [] s) throws RecordParser.ParseError; void parse(byte [] s) throws RecordParser.ParseError;
public void parse(char [] s) throws RecordParser.ParseError; void parse(char [] s) throws RecordParser.ParseError;
public void parse(ByteBuffer s) throws RecordParser.ParseError; void parse(ByteBuffer s) throws RecordParser.ParseError;
public void parse(CharBuffer s) throws RecordParser.ParseError; void parse(CharBuffer s) throws RecordParser.ParseError;
public void loadLargeObjects(LargeObjectLoader objLoader) void loadLargeObjects(LargeObjectLoader objLoader)
throws SQLException, IOException, InterruptedException; throws SQLException, IOException, InterruptedException;
public Object clone() throws CloneNotSupportedException; Object clone() throws CloneNotSupportedException;
/** /**
* Inserts the data in this object into the PreparedStatement, starting * Inserts the data in this object into the PreparedStatement, starting
* at parameter 'offset'. * at parameter 'offset'.
* @return the number of fields written to the statement. * @return the number of fields written to the statement.
*/ */
public int write(PreparedStatement stmt, int offset) throws SQLException; int write(PreparedStatement stmt, int offset) throws SQLException;
} }

View File

@ -37,12 +37,12 @@
public abstract class ConnManager { public abstract class ConnManager {
/** /**
* Return a list of all databases on a server * Return a list of all databases on a server.
*/ */
public abstract String [] listDatabases(); public abstract String [] listDatabases();
/** /**
* Return a list of all tables in a database * Return a list of all tables in a database.
*/ */
public abstract String [] listTables(); public abstract String [] listTables();
@ -57,14 +57,14 @@ public abstract class ConnManager {
public abstract String getPrimaryKey(String tableName); public abstract String getPrimaryKey(String tableName);
/** /**
* Return java type for SQL type * Return java type for SQL type.
* @param sqlType sql type * @param sqlType sql type
* @return java type * @return java type
*/ */
public abstract String toJavaType(int sqlType); public abstract String toJavaType(int sqlType);
/** /**
* Return hive type for SQL type * Return hive type for SQL type.
* @param sqlType sql type * @param sqlType sql type
* @return hive type * @return hive type
*/ */
@ -86,25 +86,27 @@ public abstract class ConnManager {
* returned ResultSet object, and for calling release() after that to free * returned ResultSet object, and for calling release() after that to free
* internal state. * internal state.
*/ */
public abstract ResultSet readTable(String tableName, String [] columns) throws SQLException; public abstract ResultSet readTable(String tableName, String [] columns)
throws SQLException;
/** /**
* @return the actual database connection * @return the actual database connection.
*/ */
public abstract Connection getConnection() throws SQLException; public abstract Connection getConnection() throws SQLException;
/** /**
* @return a string identifying the driver class to load for this JDBC connection type. * @return a string identifying the driver class to load for this
* JDBC connection type.
*/ */
public abstract String getDriverClass(); public abstract String getDriverClass();
/** /**
* Execute a SQL statement 's' and print its results to stdout * Execute a SQL statement 's' and print its results to stdout.
*/ */
public abstract void execAndPrint(String s); public abstract void execAndPrint(String s);
/** /**
* Perform an import of a table from the database into HDFS * Perform an import of a table from the database into HDFS.
*/ */
public abstract void importTable(ImportJobContext context) public abstract void importTable(ImportJobContext context)
throws IOException, ImportException; throws IOException, ImportException;
@ -139,7 +141,7 @@ public String escapeTableName(String tableName) {
public abstract void close() throws SQLException; public abstract void close() throws SQLException;
/** /**
* Export data stored in HDFS into a table in a database * Export data stored in HDFS into a table in a database.
*/ */
public void exportTable(ExportJobContext context) public void exportTable(ExportJobContext context)
throws IOException, ExportException { throws IOException, ExportException {

View File

@ -29,7 +29,8 @@
*/ */
public final class DefaultManagerFactory extends ManagerFactory { public final class DefaultManagerFactory extends ManagerFactory {
public static final Log LOG = LogFactory.getLog(DefaultManagerFactory.class.getName()); public static final Log LOG = LogFactory.getLog(
DefaultManagerFactory.class.getName());
public ConnManager accept(SqoopOptions options) { public ConnManager accept(SqoopOptions options) {
String manualDriver = options.getDriverClassName(); String manualDriver = options.getDriverClassName();

View File

@ -46,7 +46,8 @@
*/ */
public class DirectMySQLManager extends MySQLManager { public class DirectMySQLManager extends MySQLManager {
public static final Log LOG = LogFactory.getLog(DirectMySQLManager.class.getName()); public static final Log LOG = LogFactory.getLog(
DirectMySQLManager.class.getName());
public DirectMySQLManager(final SqoopOptions options) { public DirectMySQLManager(final SqoopOptions options) {
super(options, false); super(options, false);

View File

@ -51,10 +51,12 @@
* commands. * commands.
*/ */
public class DirectPostgresqlManager extends PostgresqlManager { public class DirectPostgresqlManager extends PostgresqlManager {
public static final Log LOG = LogFactory.getLog(DirectPostgresqlManager.class.getName()); public static final Log LOG = LogFactory.getLog(
DirectPostgresqlManager.class.getName());
public DirectPostgresqlManager(final SqoopOptions opts) { public DirectPostgresqlManager(final SqoopOptions opts) {
// Inform superclass that we're overriding import method via alt. constructor. // Inform superclass that we're overriding import method via alt.
// constructor.
super(opts, true); super(opts, true);
} }
@ -68,8 +70,8 @@ static class PostgresqlAsyncSink extends ErrorableAsyncSink {
private final PerfCounters counters; private final PerfCounters counters;
private final SqoopOptions options; private final SqoopOptions options;
PostgresqlAsyncSink(final SplittableBufferedWriter w, final SqoopOptions opts, PostgresqlAsyncSink(final SplittableBufferedWriter w,
final PerfCounters ctrs) { final SqoopOptions opts, final PerfCounters ctrs) {
this.writer = w; this.writer = w;
this.options = opts; this.options = opts;
this.counters = ctrs; this.counters = ctrs;
@ -81,14 +83,16 @@ public void processStream(InputStream is) {
} }
private static class PostgresqlStreamThread extends ErrorableThread { private static class PostgresqlStreamThread extends ErrorableThread {
public static final Log LOG = LogFactory.getLog(PostgresqlStreamThread.class.getName()); public static final Log LOG = LogFactory.getLog(
PostgresqlStreamThread.class.getName());
private final SplittableBufferedWriter writer; private final SplittableBufferedWriter writer;
private final InputStream stream; private final InputStream stream;
private final SqoopOptions options; private final SqoopOptions options;
private final PerfCounters counters; private final PerfCounters counters;
PostgresqlStreamThread(final InputStream is, final SplittableBufferedWriter w, PostgresqlStreamThread(final InputStream is,
final SplittableBufferedWriter w,
final SqoopOptions opts, final PerfCounters ctrs) { final SqoopOptions opts, final PerfCounters ctrs) {
this.stream = is; this.stream = is;
this.writer = w; this.writer = w;
@ -143,7 +147,8 @@ public void run() {
} }
/** /**
Takes a list of columns and turns them into a string like "col1, col2, col3..." * Takes a list of columns and turns them into a string like
* "col1, col2, col3...".
*/ */
private String getColumnListStr(String [] cols) { private String getColumnListStr(String [] cols) {
if (null == cols) { if (null == cols) {
@ -164,20 +169,20 @@ private String getColumnListStr(String [] cols) {
} }
/** /**
* @return the Postgresql-specific SQL command to copy the table ("COPY .... TO STDOUT") * @return the Postgresql-specific SQL command to copy the
* table ("COPY .... TO STDOUT").
*/ */
private String getCopyCommand(String tableName) { private String getCopyCommand(String tableName) {
/*
Format of this command is:
COPY table(col, col....) TO STDOUT // Format of this command is:
or COPY ( query ) TO STDOUT //
WITH DELIMITER 'fieldsep' // COPY table(col, col....) TO STDOUT
CSV // or COPY ( query ) TO STDOUT
QUOTE 'quotechar' // WITH DELIMITER 'fieldsep'
ESCAPE 'escapechar' // CSV
FORCE QUOTE col, col, col.... // QUOTE 'quotechar'
*/ // ESCAPE 'escapechar'
// FORCE QUOTE col, col, col....
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
String [] cols = getColumnNames(tableName); String [] cols = getColumnNames(tableName);
@ -238,12 +243,12 @@ or COPY ( query ) TO STDOUT
return copyCmd; return copyCmd;
} }
/** Write the COPY command to a temp file /** Write the COPY command to a temp file.
* @return the filename we wrote to. * @return the filename we wrote to.
*/ */
private String writeCopyCommand(String command) throws IOException { private String writeCopyCommand(String command) throws IOException {
String tmpDir = options.getTempDir(); String tmpDir = options.getTempDir();
File tempFile = File.createTempFile("tmp-",".sql", new File(tmpDir)); File tempFile = File.createTempFile("tmp-", ".sql", new File(tmpDir));
BufferedWriter w = new BufferedWriter( BufferedWriter w = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(tempFile))); new OutputStreamWriter(new FileOutputStream(tempFile)));
w.write(command); w.write(command);
@ -258,7 +263,7 @@ private String writeCopyCommand(String command) throws IOException {
private String writePasswordFile(String password) throws IOException { private String writePasswordFile(String password) throws IOException {
String tmpDir = options.getTempDir(); String tmpDir = options.getTempDir();
File tempFile = File.createTempFile("pgpass",".pgpass", new File(tmpDir)); File tempFile = File.createTempFile("pgpass", ".pgpass", new File(tmpDir));
LOG.debug("Writing password to tempfile: " + tempFile); LOG.debug("Writing password to tempfile: " + tempFile);
// Make sure it's only readable by the current user. // Make sure it's only readable by the current user.
@ -300,9 +305,9 @@ public void importTable(ImportJobContext context)
PerfCounters counters = new PerfCounters(); PerfCounters counters = new PerfCounters();
try { try {
// Get the COPY TABLE command to issue, write this to a file, and pass it // Get the COPY TABLE command to issue, write this to a file, and pass
// in to psql with -f filename. // it in to psql with -f filename. Then make sure we delete this file
// Then make sure we delete this file in our finally block. // in our finally block.
String copyCmd = getCopyCommand(tableName); String copyCmd = getCopyCommand(tableName);
commandFilename = writeCopyCommand(copyCmd); commandFilename = writeCopyCommand(copyCmd);
@ -312,10 +317,10 @@ public void importTable(ImportJobContext context)
// Environment to pass to psql. // Environment to pass to psql.
List<String> envp = Executor.getCurEnvpStrings(); List<String> envp = Executor.getCurEnvpStrings();
// We need to parse the connect string URI to determine the database name // We need to parse the connect string URI to determine the database
// and the host and port. If the host is localhost and the port is not specified, // name and the host and port. If the host is localhost and the port is
// we don't want to pass this to psql, because we want to force the use of a // not specified, we don't want to pass this to psql, because we want to
// UNIX domain socket, not a TCP/IP socket. // force the use of a UNIX domain socket, not a TCP/IP socket.
String connectString = options.getConnectString(); String connectString = options.getConnectString();
String databaseName = JdbcUrl.getDatabaseName(connectString); String databaseName = JdbcUrl.getDatabaseName(connectString);
String hostname = JdbcUrl.getHostName(connectString); String hostname = JdbcUrl.getHostName(connectString);
@ -325,7 +330,8 @@ public void importTable(ImportJobContext context)
throw new ImportException("Could not determine database name"); throw new ImportException("Could not determine database name");
} }
LOG.info("Performing import of table " + tableName + " from database " + databaseName); LOG.info("Performing import of table " + tableName + " from database "
+ databaseName);
args.add(PSQL_CMD); // requires that this is on the path. args.add(PSQL_CMD); // requires that this is on the path.
args.add("--tuples-only"); args.add("--tuples-only");
args.add("--quiet"); args.add("--quiet");
@ -401,7 +407,8 @@ public void importTable(ImportJobContext context)
// Remove any password file we wrote // Remove any password file we wrote
if (null != passwordFilename) { if (null != passwordFilename) {
if (!new File(passwordFilename).delete()) { if (!new File(passwordFilename).delete()) {
LOG.error("Could not remove postgresql password file " + passwordFilename); LOG.error("Could not remove postgresql password file "
+ passwordFilename);
LOG.error("You should remove this file to protect your credentials."); LOG.error("You should remove this file to protect your credentials.");
} }
} }

View File

@ -30,7 +30,8 @@ public class ExportJobContext {
private String jarFile; private String jarFile;
private SqoopOptions options; private SqoopOptions options;
public ExportJobContext(final String table, final String jar, final SqoopOptions opts) { public ExportJobContext(final String table, final String jar,
final SqoopOptions opts) {
this.tableName = table; this.tableName = table;
this.jarFile = jar; this.jarFile = jar;
this.options = opts; this.options = opts;

View File

@ -33,7 +33,8 @@
*/ */
public class GenericJdbcManager extends SqlManager { public class GenericJdbcManager extends SqlManager {
public static final Log LOG = LogFactory.getLog(GenericJdbcManager.class.getName()); public static final Log LOG = LogFactory.getLog(
GenericJdbcManager.class.getName());
private String jdbcDriverClass; private String jdbcDriverClass;
private Connection connection; private Connection connection;

View File

@ -29,13 +29,14 @@
*/ */
public class HsqldbManager extends GenericJdbcManager { public class HsqldbManager extends GenericJdbcManager {
public static final Log LOG = LogFactory.getLog(HsqldbManager.class.getName()); public static final Log LOG = LogFactory.getLog(
HsqldbManager.class.getName());
// driver class to ensure is loaded when making db connection. // driver class to ensure is loaded when making db connection.
private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver"; private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver";
// HsqlDb doesn't have a notion of multiple "databases"; the user's database is always called // HsqlDb doesn't have a notion of multiple "databases"; the user's database
// "PUBLIC"; // is always called "PUBLIC".
private static final String HSQL_SCHEMA_NAME = "PUBLIC"; private static final String HSQL_SCHEMA_NAME = "PUBLIC";
public HsqldbManager(final SqoopOptions opts) { public HsqldbManager(final SqoopOptions opts) {
@ -43,7 +44,8 @@ public HsqldbManager(final SqoopOptions opts) {
} }
/** /**
* Note: HSqldb only supports a single schema named "PUBLIC" * Return list of databases hosted by the server.
* HSQLDB only supports a single schema named "PUBLIC".
*/ */
@Override @Override
public String[] listDatabases() { public String[] listDatabases() {

View File

@ -33,7 +33,8 @@ public class ImportJobContext {
private SqoopOptions options; private SqoopOptions options;
private Class<? extends InputFormat> inputFormatClass; private Class<? extends InputFormat> inputFormatClass;
public ImportJobContext(final String table, final String jar, final SqoopOptions opts) { public ImportJobContext(final String table, final String jar,
final SqoopOptions opts) {
this.tableName = table; this.tableName = table;
this.jarFile = jar; this.jarFile = jar;
this.options = opts; this.options = opts;

View File

@ -37,7 +37,7 @@
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
/** /**
* Manages connections to MySQL databases * Manages connections to MySQL databases.
*/ */
public class MySQLManager extends GenericJdbcManager { public class MySQLManager extends GenericJdbcManager {
@ -103,7 +103,7 @@ public String[] listDatabases() {
@Override @Override
public void importTable(ImportJobContext context) public void importTable(ImportJobContext context)
throws IOException, ImportException { throws IOException, ImportException {
// Check that we're not doing a MapReduce from localhost. If we are, point // Check that we're not doing a MapReduce from localhost. If we are, point
// out that we could use mysqldump. // out that we could use mysqldump.

View File

@ -18,28 +18,20 @@
package org.apache.hadoop.sqoop.manager; package org.apache.hadoop.sqoop.manager;
import java.io.BufferedReader;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.nio.CharBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.sqoop.shims.HadoopShim; import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.util.DirectImportUtils; import org.apache.hadoop.sqoop.util.DirectImportUtils;
/** /**
* Helper methods and constants for MySQL imports/exports * Helper methods and constants for MySQL imports/exports.
*/ */
public final class MySQLUtils { public final class MySQLUtils {
@ -100,7 +92,7 @@ public static String writePasswordFile(Configuration conf)
// Create the temp file to hold the user's password. // Create the temp file to hold the user's password.
String tmpDir = conf.get( String tmpDir = conf.get(
HadoopShim.get().getJobLocalDirProperty(), "/tmp/"); HadoopShim.get().getJobLocalDirProperty(), "/tmp/");
File tempFile = File.createTempFile("mysql-cnf",".cnf", new File(tmpDir)); File tempFile = File.createTempFile("mysql-cnf", ".cnf", new File(tmpDir));
// Make the password file only private readable. // Make the password file only private readable.
DirectImportUtils.setFilePermissions(tempFile, "0600"); DirectImportUtils.setFilePermissions(tempFile, "0600");

View File

@ -47,7 +47,8 @@
*/ */
public class OracleManager extends GenericJdbcManager { public class OracleManager extends GenericJdbcManager {
public static final Log LOG = LogFactory.getLog(OracleManager.class.getName()); public static final Log LOG = LogFactory.getLog(
OracleManager.class.getName());
// driver class to ensure is loaded when making db connection. // driver class to ensure is loaded when making db connection.
private static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver"; private static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver";
@ -65,8 +66,8 @@ private static class ConnCache {
public static final Log LOG = LogFactory.getLog(ConnCache.class.getName()); public static final Log LOG = LogFactory.getLog(ConnCache.class.getName());
private static class CacheKey { private static class CacheKey {
public final String connectString; private final String connectString;
public final String username; private final String username;
public CacheKey(String connect, String user) { public CacheKey(String connect, String user) {
this.connectString = connect; this.connectString = connect;
@ -212,7 +213,8 @@ protected Connection makeConnection() throws SQLException {
try { try {
Class.forName(driverClass); Class.forName(driverClass);
} catch (ClassNotFoundException cnfe) { } catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Could not load db driver class: " + driverClass); throw new RuntimeException("Could not load db driver class: "
+ driverClass);
} }
String username = options.getUsername(); String username = options.getUsername();
@ -242,34 +244,37 @@ protected Connection makeConnection() throws SQLException {
} }
/** /**
* Set session time zone * Set session time zone.
* @param conn Connection object * @param conn Connection object
* @throws SQLException instance * @throws SQLException instance
*/ */
private void setSessionTimeZone(Connection conn) throws SQLException { private void setSessionTimeZone(Connection conn) throws SQLException {
// need to use reflection to call the method setSessionTimeZone on the OracleConnection class // Need to use reflection to call the method setSessionTimeZone on the
// because oracle specific java libraries are not accessible in this context // OracleConnection class because oracle specific java libraries are not
// accessible in this context.
Method method; Method method;
try { try {
method = conn.getClass().getMethod( method = conn.getClass().getMethod(
"setSessionTimeZone", new Class [] {String.class}); "setSessionTimeZone", new Class [] {String.class});
} catch (Exception ex) { } catch (Exception ex) {
LOG.error("Could not find method setSessionTimeZone in " + conn.getClass().getName(), ex); LOG.error("Could not find method setSessionTimeZone in "
+ conn.getClass().getName(), ex);
// rethrow SQLException // rethrow SQLException
throw new SQLException(ex); throw new SQLException(ex);
} }
// Need to set the time zone in order for Java // Need to set the time zone in order for Java to correctly access the
// to correctly access the column "TIMESTAMP WITH LOCAL TIME ZONE". // column "TIMESTAMP WITH LOCAL TIME ZONE". The user may have set this in
// The user may have set this in the configuration as 'oracle.sessionTimeZone'. // the configuration as 'oracle.sessionTimeZone'.
String clientTimeZoneStr = options.getConf().get(ORACLE_TIMEZONE_KEY, "GMT"); String clientTimeZoneStr = options.getConf().get(ORACLE_TIMEZONE_KEY,
"GMT");
try { try {
method.setAccessible(true); method.setAccessible(true);
method.invoke(conn, clientTimeZoneStr); method.invoke(conn, clientTimeZoneStr);
LOG.info("Time zone has been set to " + clientTimeZoneStr); LOG.info("Time zone has been set to " + clientTimeZoneStr);
} catch (Exception ex) { } catch (Exception ex) {
LOG.warn("Time zone " + clientTimeZoneStr + LOG.warn("Time zone " + clientTimeZoneStr
" could not be set on Oracle database."); + " could not be set on Oracle database.");
LOG.info("Setting default time zone: GMT"); LOG.info("Setting default time zone: GMT");
try { try {
// Per the documentation at: // Per the documentation at:
@ -310,7 +315,8 @@ public void exportTable(ExportJobContext context)
} }
@Override @Override
public ResultSet readTable(String tableName, String[] columns) throws SQLException { public ResultSet readTable(String tableName, String[] columns)
throws SQLException {
if (columns == null) { if (columns == null) {
columns = getColumnNames(tableName); columns = getColumnNames(tableName);
} }
@ -408,14 +414,14 @@ private String dbToHiveType(int sqlType) {
} }
/** /**
* Get database type * Get database type.
* @param clazz oracle class representing sql types * @param clazz oracle class representing sql types
* @param fieldName field name * @param fieldName field name
* @return value of database type constant * @return value of database type constant
*/ */
private int getDatabaseType(Class clazz, String fieldName) { private int getDatabaseType(Class clazz, String fieldName) {
// need to use reflection to extract constant values // Need to use reflection to extract constant values because the database
// because the database specific java libraries are not accessible in this context // specific java libraries are not accessible in this context.
int value = -1; int value = -1;
try { try {
java.lang.reflect.Field field = clazz.getDeclaredField(fieldName); java.lang.reflect.Field field = clazz.getDeclaredField(fieldName);
@ -429,13 +435,13 @@ private int getDatabaseType(Class clazz, String fieldName) {
} }
/** /**
* Load class by name * Load class by name.
* @param className class name * @param className class name
* @return class instance * @return class instance
*/ */
private Class getTypeClass(String className) { private Class getTypeClass(String className) {
// need to use reflection to load class // Need to use reflection to load class because the database specific java
// because the database specific java libraries are not accessible in this context // libraries are not accessible in this context.
Class typeClass = null; Class typeClass = null;
try { try {
typeClass = Class.forName(className); typeClass = Class.forName(className);

View File

@ -33,16 +33,18 @@
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
/** /**
* Manages connections to Postgresql databases * Manages connections to Postgresql databases.
*/ */
public class PostgresqlManager extends GenericJdbcManager { public class PostgresqlManager extends GenericJdbcManager {
public static final Log LOG = LogFactory.getLog(PostgresqlManager.class.getName()); public static final Log LOG = LogFactory.getLog(
PostgresqlManager.class.getName());
// driver class to ensure is loaded when making db connection. // driver class to ensure is loaded when making db connection.
private static final String DRIVER_CLASS = "org.postgresql.Driver"; private static final String DRIVER_CLASS = "org.postgresql.Driver";
private static final int POSTGRESQL_FETCH_SIZE = 50; // Fetch 50 rows at a time. // Fetch 50 rows at a time.
private static final int POSTGRESQL_FETCH_SIZE = 50;
// set to true after we warn the user that we can use direct fastpath. // set to true after we warn the user that we can use direct fastpath.
private static boolean warningPrinted = false; private static boolean warningPrinted = false;

View File

@ -62,7 +62,7 @@ public abstract class SqlManager extends ConnManager {
private Statement lastStatement; private Statement lastStatement;
/** /**
* Constructs the SqlManager * Constructs the SqlManager.
* @param opts the SqoopOptions describing the user's requested action. * @param opts the SqoopOptions describing the user's requested action.
*/ */
public SqlManager(final SqoopOptions opts) { public SqlManager(final SqoopOptions opts) {
@ -171,7 +171,8 @@ public Map<String, Integer> getColumnTypes(String tableName) {
} }
@Override @Override
public ResultSet readTable(String tableName, String[] columns) throws SQLException { public ResultSet readTable(String tableName, String[] columns)
throws SQLException {
if (columns == null) { if (columns == null) {
columns = getColumnNames(tableName); columns = getColumnNames(tableName);
} }
@ -212,7 +213,8 @@ public String[] listTables() {
DatabaseMetaData metaData = this.getConnection().getMetaData(); DatabaseMetaData metaData = this.getConnection().getMetaData();
results = metaData.getTables(null, null, null, tableTypes); results = metaData.getTables(null, null, null, tableTypes);
} catch (SQLException sqlException) { } catch (SQLException sqlException) {
LOG.error("Error reading database metadata: " + sqlException.toString()); LOG.error("Error reading database metadata: "
+ sqlException.toString());
return null; return null;
} }
@ -264,25 +266,26 @@ public String getPrimaryKey(String tableName) {
getConnection().commit(); getConnection().commit();
} }
} catch (SQLException sqlException) { } catch (SQLException sqlException) {
LOG.error("Error reading primary key metadata: " + sqlException.toString()); LOG.error("Error reading primary key metadata: "
+ sqlException.toString());
return null; return null;
} }
} }
/** /**
* Retrieve the actual connection from the outer ConnManager * Retrieve the actual connection from the outer ConnManager.
*/ */
public abstract Connection getConnection() throws SQLException; public abstract Connection getConnection() throws SQLException;
/** /**
* Determine what column to use to split the table. * Determine what column to use to split the table.
* @param options the SqoopOptions controlling this import. * @param opts the SqoopOptions controlling this import.
* @param tableName the table to import. * @param tableName the table to import.
* @return the splitting column, if one is set or inferrable, or null * @return the splitting column, if one is set or inferrable, or null
* otherwise. * otherwise.
*/ */
protected String getSplitColumn(SqoopOptions options, String tableName) { protected String getSplitColumn(SqoopOptions opts, String tableName) {
String splitCol = options.getSplitByCol(); String splitCol = opts.getSplitByCol();
if (null == splitCol) { if (null == splitCol) {
// If the user didn't specify a splitting column, try to infer one. // If the user didn't specify a splitting column, try to infer one.
splitCol = getPrimaryKey(tableName); splitCol = getPrimaryKey(tableName);
@ -344,7 +347,8 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
* @return the name of a Java type to hold the sql datatype, or null if none. * @return the name of a Java type to hold the sql datatype, or null if none.
*/ */
public String toJavaType(int sqlType) { public String toJavaType(int sqlType) {
// mappings from http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html // Mappings taken from:
// http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html
if (sqlType == Types.INTEGER) { if (sqlType == Types.INTEGER) {
return "Integer"; return "Integer";
} else if (sqlType == Types.VARCHAR) { } else if (sqlType == Types.VARCHAR) {
@ -396,7 +400,7 @@ public String toJavaType(int sqlType) {
} }
/** /**
* Resolve a database-specific type to Hive data type * Resolve a database-specific type to Hive data type.
* @param sqlType sql type * @param sqlType sql type
* @return hive type * @return hive type
*/ */
@ -489,7 +493,8 @@ protected Connection makeConnection() throws SQLException {
try { try {
Class.forName(driverClass); Class.forName(driverClass);
} catch (ClassNotFoundException cnfe) { } catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Could not load db driver class: " + driverClass); throw new RuntimeException("Could not load db driver class: "
+ driverClass);
} }
String username = options.getUsername(); String username = options.getUsername();
@ -497,7 +502,8 @@ protected Connection makeConnection() throws SQLException {
if (null == username) { if (null == username) {
connection = DriverManager.getConnection(options.getConnectString()); connection = DriverManager.getConnection(options.getConnectString());
} else { } else {
connection = DriverManager.getConnection(options.getConnectString(), username, password); connection = DriverManager.getConnection(options.getConnectString(),
username, password);
} }
// We only use this for metadata queries. Loosest semantics are okay. // We only use this for metadata queries. Loosest semantics are okay.
@ -508,7 +514,7 @@ protected Connection makeConnection() throws SQLException {
} }
/** /**
* Export data stored in HDFS into a table in a database * Export data stored in HDFS into a table in a database.
*/ */
public void exportTable(ExportJobContext context) public void exportTable(ExportJobContext context)
throws IOException, ExportException { throws IOException, ExportException {

View File

@ -34,27 +34,34 @@
public class AutoProgressMapper<KEYIN, VALIN, KEYOUT, VALOUT> public class AutoProgressMapper<KEYIN, VALIN, KEYOUT, VALOUT>
extends Mapper<KEYIN, VALIN, KEYOUT, VALOUT> { extends Mapper<KEYIN, VALIN, KEYOUT, VALOUT> {
public static final Log LOG = LogFactory.getLog(AutoProgressMapper.class.getName()); public static final Log LOG = LogFactory.getLog(
AutoProgressMapper.class.getName());
/** Total number of millis for which progress will be reported /**
by the auto-progress thread. If this is zero, then the auto-progress * Total number of millis for which progress will be reported by the
thread will never voluntarily exit. * auto-progress thread. If this is zero, then the auto-progress thread will
*/ * never voluntarily exit.
*/
private int maxProgressPeriod; private int maxProgressPeriod;
/** Number of milliseconds to sleep for between loop iterations. Must be less /**
than report interval. * Number of milliseconds to sleep for between loop iterations. Must be less
*/ * than report interval.
*/
private int sleepInterval; private int sleepInterval;
/** Number of milliseconds between calls to Reporter.progress(). Should be a multiple /**
of the sleepInterval. * Number of milliseconds between calls to Reporter.progress().
*/ * Should be a multiple of the sleepInterval.
*/
private int reportInterval; private int reportInterval;
public static final String MAX_PROGRESS_PERIOD_KEY = "sqoop.mapred.auto.progress.max"; public static final String MAX_PROGRESS_PERIOD_KEY =
public static final String SLEEP_INTERVAL_KEY = "sqoop.mapred.auto.progress.sleep"; "sqoop.mapred.auto.progress.max";
public static final String REPORT_INTERVAL_KEY = "sqoop.mapred.auto.progress.report"; public static final String SLEEP_INTERVAL_KEY =
"sqoop.mapred.auto.progress.sleep";
public static final String REPORT_INTERVAL_KEY =
"sqoop.mapred.auto.progress.report";
// Sleep for 10 seconds at a time. // Sleep for 10 seconds at a time.
static final int DEFAULT_SLEEP_INTERVAL = 10000; static final int DEFAULT_SLEEP_INTERVAL = 10000;
@ -67,7 +74,7 @@ public class AutoProgressMapper<KEYIN, VALIN, KEYOUT, VALOUT>
private class ProgressThread extends Thread { private class ProgressThread extends Thread {
private volatile boolean keepGoing; // while this is true, thread runs. private volatile boolean keepGoing; // While this is true, thread runs.
private Context context; private Context context;
private long startTimeMillis; private long startTimeMillis;
@ -91,17 +98,20 @@ public void run() {
final long REPORT_INTERVAL = AutoProgressMapper.this.reportInterval; final long REPORT_INTERVAL = AutoProgressMapper.this.reportInterval;
final long SLEEP_INTERVAL = AutoProgressMapper.this.sleepInterval; final long SLEEP_INTERVAL = AutoProgressMapper.this.sleepInterval;
// in a loop: // In a loop:
// * Check that we haven't run for too long (maxProgressPeriod) // * Check that we haven't run for too long (maxProgressPeriod).
// * If it's been a report interval since we last made progress, make more. // * If it's been a report interval since we last made progress,
// make more.
// * Sleep for a bit. // * Sleep for a bit.
// * If the parent thread has signaled for exit, do so. // * If the parent thread has signaled for exit, do so.
while (this.keepGoing) { while (this.keepGoing) {
long curTimeMillis = System.currentTimeMillis(); long curTimeMillis = System.currentTimeMillis();
if (MAX_PROGRESS != 0 && curTimeMillis - this.startTimeMillis > MAX_PROGRESS) { if (MAX_PROGRESS != 0
&& curTimeMillis - this.startTimeMillis > MAX_PROGRESS) {
this.keepGoing = false; this.keepGoing = false;
LOG.info("Auto-progress thread exiting after " + MAX_PROGRESS + " ms."); LOG.info("Auto-progress thread exiting after " + MAX_PROGRESS
+ " ms.");
break; break;
} }
@ -130,23 +140,29 @@ public void run() {
/** /**
* Set configuration parameters for the auto-progress thread. * Set configuration parameters for the auto-progress thread.
*/ */
private final void configureAutoProgress(Configuration job) { private void configureAutoProgress(Configuration job) {
this.maxProgressPeriod = job.getInt(MAX_PROGRESS_PERIOD_KEY, DEFAULT_MAX_PROGRESS); this.maxProgressPeriod = job.getInt(MAX_PROGRESS_PERIOD_KEY,
this.sleepInterval = job.getInt(SLEEP_INTERVAL_KEY, DEFAULT_SLEEP_INTERVAL); DEFAULT_MAX_PROGRESS);
this.reportInterval = job.getInt(REPORT_INTERVAL_KEY, DEFAULT_REPORT_INTERVAL); this.sleepInterval = job.getInt(SLEEP_INTERVAL_KEY,
DEFAULT_SLEEP_INTERVAL);
this.reportInterval = job.getInt(REPORT_INTERVAL_KEY,
DEFAULT_REPORT_INTERVAL);
if (this.reportInterval < 1) { if (this.reportInterval < 1) {
LOG.warn("Invalid " + REPORT_INTERVAL_KEY + "; setting to " + DEFAULT_REPORT_INTERVAL); LOG.warn("Invalid " + REPORT_INTERVAL_KEY + "; setting to "
+ DEFAULT_REPORT_INTERVAL);
this.reportInterval = DEFAULT_REPORT_INTERVAL; this.reportInterval = DEFAULT_REPORT_INTERVAL;
} }
if (this.sleepInterval > this.reportInterval || this.sleepInterval < 1) { if (this.sleepInterval > this.reportInterval || this.sleepInterval < 1) {
LOG.warn("Invalid " + SLEEP_INTERVAL_KEY + "; setting to " + DEFAULT_SLEEP_INTERVAL); LOG.warn("Invalid " + SLEEP_INTERVAL_KEY + "; setting to "
+ DEFAULT_SLEEP_INTERVAL);
this.sleepInterval = DEFAULT_SLEEP_INTERVAL; this.sleepInterval = DEFAULT_SLEEP_INTERVAL;
} }
if (this.maxProgressPeriod < 0) { if (this.maxProgressPeriod < 0) {
LOG.warn("Invalid " + MAX_PROGRESS_PERIOD_KEY + "; setting to " + DEFAULT_MAX_PROGRESS); LOG.warn("Invalid " + MAX_PROGRESS_PERIOD_KEY + "; setting to "
+ DEFAULT_MAX_PROGRESS);
this.maxProgressPeriod = DEFAULT_MAX_PROGRESS; this.maxProgressPeriod = DEFAULT_MAX_PROGRESS;
} }
} }
@ -179,7 +195,8 @@ public void run(Context context) throws IOException, InterruptedException {
thread.join(); thread.join();
LOG.debug("Progress thread shutdown detected."); LOG.debug("Progress thread shutdown detected.");
} catch (InterruptedException ie) { } catch (InterruptedException ie) {
LOG.warn("Interrupted when waiting on auto-progress thread: " + ie.toString()); LOG.warn("Interrupted when waiting on auto-progress thread: "
+ ie.toString());
} }
} }
} }

View File

@ -24,19 +24,12 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
@ -46,12 +39,8 @@
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.lib.LargeObjectLoader; import org.apache.hadoop.sqoop.lib.LargeObjectLoader;
import org.apache.hadoop.sqoop.orm.TableClassName;
import org.apache.hadoop.sqoop.shims.HadoopShim; import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.shims.ShimLoader; import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
import org.apache.hadoop.sqoop.util.ImportException;
import org.apache.hadoop.sqoop.util.PerfCounters;
/** /**
* Actually runs a jdbc import job using the ORM files generated by the * Actually runs a jdbc import job using the ORM files generated by the
@ -59,7 +48,8 @@
*/ */
public class DataDrivenImportJob extends ImportJobBase { public class DataDrivenImportJob extends ImportJobBase {
public static final Log LOG = LogFactory.getLog(DataDrivenImportJob.class.getName()); public static final Log LOG = LogFactory.getLog(
DataDrivenImportJob.class.getName());
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public DataDrivenImportJob(final SqoopOptions opts) { public DataDrivenImportJob(final SqoopOptions opts) {
@ -115,11 +105,12 @@ protected void configureInputFormat(Job job, String tableName,
try { try {
String username = options.getUsername(); String username = options.getUsername();
if (null == username || username.length() == 0) { if (null == username || username.length() == 0) {
DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(), DBConfiguration.configureDB(job.getConfiguration(),
options.getConnectString()); mgr.getDriverClass(), options.getConnectString());
} else { } else {
DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(), DBConfiguration.configureDB(job.getConfiguration(),
options.getConnectString(), username, options.getPassword()); mgr.getDriverClass(), options.getConnectString(),
username, options.getPassword());
} }
String [] colNames = options.getColumns(); String [] colNames = options.getColumns();
@ -139,8 +130,8 @@ protected void configureInputFormat(Job job, String tableName,
String whereClause = options.getWhereClause(); String whereClause = options.getWhereClause();
// We can't set the class properly in here, because we may not have the // We can't set the class properly in here, because we may not have the
// jar loaded in this JVM. So we start by calling setInput() with DBWritable // jar loaded in this JVM. So we start by calling setInput() with
// and then overriding the string manually. // DBWritable and then overriding the string manually.
DataDrivenDBInputFormat.setInput(job, DBWritable.class, DataDrivenDBInputFormat.setInput(job, DBWritable.class,
mgr.escapeTableName(tableName), whereClause, mgr.escapeTableName(tableName), whereClause,
mgr.escapeColName(splitByCol), sqlColNames); mgr.escapeColName(splitByCol), sqlColNames);

View File

@ -112,7 +112,8 @@ public static boolean isSequenceFiles(Configuration conf, Path p)
} }
if (null == stat) { if (null == stat) {
LOG.warn("null FileStatus object in isSequenceFiles(); assuming false."); LOG.warn("null FileStatus object in isSequenceFiles(); "
+ "assuming false.");
return false; return false;
} }
@ -239,7 +240,7 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
/** /**
* Run an export job to dump a table from HDFS to a database * Run an export job to dump a table from HDFS to a database.
* @throws IOException if the export job encounters an IO error * @throws IOException if the export job encounters an IO error
* @throws ExportException if the job fails unexpectedly or is misconfigured. * @throws ExportException if the job fails unexpectedly or is misconfigured.
*/ */
@ -248,7 +249,8 @@ public void runExport() throws ExportException, IOException {
SqoopOptions options = context.getOptions(); SqoopOptions options = context.getOptions();
Configuration conf = options.getConf(); Configuration conf = options.getConf();
String tableName = context.getTableName(); String tableName = context.getTableName();
String tableClassName = new TableClassName(options).getClassForTable(tableName); String tableClassName =
new TableClassName(options).getClassForTable(tableName);
String ormJarFile = context.getJarFile(); String ormJarFile = context.getJarFile();
LOG.info("Beginning export of " + tableName); LOG.info("Beginning export of " + tableName);

View File

@ -57,7 +57,8 @@
*/ */
public class ImportJobBase extends JobBase { public class ImportJobBase extends JobBase {
public static final Log LOG = LogFactory.getLog(ImportJobBase.class.getName()); public static final Log LOG = LogFactory.getLog(
ImportJobBase.class.getName());
public ImportJobBase() { public ImportJobBase() {
this(null); this(null);
@ -129,20 +130,24 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
/** /**
* Run an import job to read a table in to HDFS * Run an import job to read a table in to HDFS.
* *
* @param tableName the database table to read * @param tableName the database table to read
* @param ormJarFile the Jar file to insert into the dcache classpath. (may be null) * @param ormJarFile the Jar file to insert into the dcache classpath.
* @param splitByCol the column of the database table to use to split the import * (may be null)
* @param splitByCol the column of the database table to use to split
* the import
* @param conf A fresh Hadoop Configuration to use to build an MR job. * @param conf A fresh Hadoop Configuration to use to build an MR job.
* @throws IOException if the job encountered an IO problem * @throws IOException if the job encountered an IO problem
* @throws ImportException if the job failed unexpectedly or was misconfigured. * @throws ImportException if the job failed unexpectedly or was
* misconfigured.
*/ */
public void runImport(String tableName, String ormJarFile, String splitByCol, public void runImport(String tableName, String ormJarFile, String splitByCol,
Configuration conf) throws IOException, ImportException { Configuration conf) throws IOException, ImportException {
LOG.info("Beginning import of " + tableName); LOG.info("Beginning import of " + tableName);
String tableClassName = new TableClassName(options).getClassForTable(tableName); String tableClassName =
new TableClassName(options).getClassForTable(tableName);
loadJars(conf, ormJarFile, tableClassName); loadJars(conf, ormJarFile, tableClassName);
try { try {

View File

@ -35,10 +35,9 @@
import org.apache.hadoop.sqoop.ConnFactory; import org.apache.hadoop.sqoop.ConnFactory;
import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.ExportJobContext; import org.apache.hadoop.sqoop.manager.ExportJobContext;
import org.apache.hadoop.sqoop.shims.ShimLoader;
/** /**
* Run an export using JDBC (JDBC-based ExportOutputFormat) * Run an export using JDBC (JDBC-based ExportOutputFormat).
*/ */
public class JdbcExportJob extends ExportJobBase { public class JdbcExportJob extends ExportJobBase {

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
@ -92,23 +91,23 @@ protected Class<? extends OutputFormat> getOutputFormatClass()
return this.outputFormatClass; return this.outputFormatClass;
} }
/** Set the OutputFormat class to use for this job */ /** Set the OutputFormat class to use for this job. */
public void setOutputFormatClass(Class<? extends OutputFormat> cls) { public void setOutputFormatClass(Class<? extends OutputFormat> cls) {
this.outputFormatClass = cls; this.outputFormatClass = cls;
} }
/** Set the InputFormat class to use for this job */ /** Set the InputFormat class to use for this job. */
public void setInputFormatClass(Class<? extends InputFormat> cls) { public void setInputFormatClass(Class<? extends InputFormat> cls) {
this.inputFormatClass = cls; this.inputFormatClass = cls;
} }
/** Set the Mapper class to use for this job */ /** Set the Mapper class to use for this job. */
public void setMapperClass(Class<? extends Mapper> cls) { public void setMapperClass(Class<? extends Mapper> cls) {
this.mapperClass = cls; this.mapperClass = cls;
} }
/** /**
* Set the SqoopOptions configuring this job * Set the SqoopOptions configuring this job.
*/ */
public void setOptions(SqoopOptions opts) { public void setOptions(SqoopOptions opts) {
this.options = opts; this.options = opts;
@ -122,9 +121,10 @@ protected void loadJars(Configuration conf, String ormJarFile,
boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address")) boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
|| "local".equals(conf.get("mapred.job.tracker")); || "local".equals(conf.get("mapred.job.tracker"));
if (isLocal) { if (isLocal) {
// If we're using the LocalJobRunner, then instead of using the compiled jar file // If we're using the LocalJobRunner, then instead of using the compiled
// as the job source, we're running in the current thread. Push on another classloader // jar file as the job source, we're running in the current thread. Push
// that loads from that jar in addition to everything currently on the classpath. // on another classloader that loads from that jar in addition to
// everything currently on the classpath.
this.prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile, this.prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile,
tableClassName); tableClassName);
} }

View File

@ -25,20 +25,10 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBWritable; import org.apache.hadoop.mapreduce.lib.db.DBWritable;
@ -47,11 +37,7 @@
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.MySQLUtils; import org.apache.hadoop.sqoop.manager.MySQLUtils;
import org.apache.hadoop.sqoop.orm.TableClassName;
import org.apache.hadoop.sqoop.shims.ShimLoader; import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
import org.apache.hadoop.sqoop.util.ImportException;
import org.apache.hadoop.sqoop.util.PerfCounters;
/** /**
* Class that runs an import job using mysqldump in the mapper. * Class that runs an import job using mysqldump in the mapper.
@ -82,11 +68,12 @@ protected void configureInputFormat(Job job, String tableName,
try { try {
String username = options.getUsername(); String username = options.getUsername();
if (null == username || username.length() == 0) { if (null == username || username.length() == 0) {
DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(), DBConfiguration.configureDB(job.getConfiguration(),
options.getConnectString()); mgr.getDriverClass(), options.getConnectString());
} else { } else {
DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(), DBConfiguration.configureDB(job.getConfiguration(),
options.getConnectString(), username, options.getPassword()); mgr.getDriverClass(), options.getConnectString(), username,
options.getPassword());
} }
String [] colNames = options.getColumns(); String [] colNames = options.getColumns();
@ -106,8 +93,8 @@ protected void configureInputFormat(Job job, String tableName,
String whereClause = options.getWhereClause(); String whereClause = options.getWhereClause();
// We can't set the class properly in here, because we may not have the // We can't set the class properly in here, because we may not have the
// jar loaded in this JVM. So we start by calling setInput() with DBWritable // jar loaded in this JVM. So we start by calling setInput() with
// and then overriding the string manually. // DBWritable and then overriding the string manually.
// Note that mysqldump also does *not* want a quoted table name. // Note that mysqldump also does *not* want a quoted table name.
DataDrivenDBInputFormat.setInput(job, DBWritable.class, DataDrivenDBInputFormat.setInput(job, DBWritable.class,

View File

@ -19,14 +19,10 @@
package org.apache.hadoop.sqoop.mapreduce; package org.apache.hadoop.sqoop.mapreduce;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.nio.CharBuffer; import java.nio.CharBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -35,18 +31,13 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.lib.FieldFormatter; import org.apache.hadoop.sqoop.lib.FieldFormatter;
import org.apache.hadoop.sqoop.lib.RecordParser; import org.apache.hadoop.sqoop.lib.RecordParser;
import org.apache.hadoop.sqoop.manager.MySQLUtils; import org.apache.hadoop.sqoop.manager.MySQLUtils;
import org.apache.hadoop.sqoop.util.AsyncSink; import org.apache.hadoop.sqoop.util.AsyncSink;
import org.apache.hadoop.sqoop.util.DirectImportUtils;
import org.apache.hadoop.sqoop.util.ErrorableAsyncSink; import org.apache.hadoop.sqoop.util.ErrorableAsyncSink;
import org.apache.hadoop.sqoop.util.ErrorableThread; import org.apache.hadoop.sqoop.util.ErrorableThread;
import org.apache.hadoop.sqoop.util.ImportException;
import org.apache.hadoop.sqoop.util.JdbcUrl; import org.apache.hadoop.sqoop.util.JdbcUrl;
import org.apache.hadoop.sqoop.util.LoggingAsyncSink; import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
import org.apache.hadoop.sqoop.util.PerfCounters; import org.apache.hadoop.sqoop.util.PerfCounters;
@ -57,7 +48,8 @@
public class MySQLDumpMapper public class MySQLDumpMapper
extends Mapper<String, NullWritable, String, NullWritable> { extends Mapper<String, NullWritable, String, NullWritable> {
public static final Log LOG = LogFactory.getLog(MySQLDumpMapper.class.getName()); public static final Log LOG = LogFactory.getLog(
MySQLDumpMapper.class.getName());
private Configuration conf; private Configuration conf;
@ -111,19 +103,22 @@ public void run() {
break; // EOF. break; // EOF.
} }
// this line is of the form "INSERT .. VALUES ( actual value text );" // this line is of the form "INSERT .. VALUES ( actual value text
// strip the leading preamble up to the '(' and the trailing ');'. // );" strip the leading preamble up to the '(' and the trailing
// ');'.
if (preambleLen == -1) { if (preambleLen == -1) {
// we haven't determined how long the preamble is. It's constant // we haven't determined how long the preamble is. It's constant
// across all lines, so just figure this out once. // across all lines, so just figure this out once.
String recordStartMark = "VALUES ("; String recordStartMark = "VALUES (";
preambleLen = inLine.indexOf(recordStartMark) + recordStartMark.length(); preambleLen = inLine.indexOf(recordStartMark)
+ recordStartMark.length();
} }
// chop off the leading and trailing text as we write the // chop off the leading and trailing text as we write the
// output to HDFS. // output to HDFS.
int len = inLine.length() - 2 - preambleLen; int len = inLine.length() - 2 - preambleLen;
context.write(inLine.substring(preambleLen, inLine.length() - 2), null); context.write(inLine.substring(preambleLen, inLine.length() - 2),
null);
context.write("\n", null); context.write("\n", null);
counters.addBytes(1 + len); counters.addBytes(1 + len);
} }
@ -235,18 +230,21 @@ public void run() {
break; // EOF. break; // EOF.
} }
// this line is of the form "INSERT .. VALUES ( actual value text );" // this line is of the form "INSERT .. VALUES ( actual value text
// strip the leading preamble up to the '(' and the trailing ');'. // );" strip the leading preamble up to the '(' and the trailing
// ');'.
if (preambleLen == -1) { if (preambleLen == -1) {
// we haven't determined how long the preamble is. It's constant // we haven't determined how long the preamble is. It's constant
// across all lines, so just figure this out once. // across all lines, so just figure this out once.
String recordStartMark = "VALUES ("; String recordStartMark = "VALUES (";
preambleLen = inLine.indexOf(recordStartMark) + recordStartMark.length(); preambleLen = inLine.indexOf(recordStartMark)
+ recordStartMark.length();
} }
// Wrap the input string in a char buffer that ignores the leading and trailing // Wrap the input string in a char buffer that ignores the leading
// text. // and trailing text.
CharBuffer charbuf = CharBuffer.wrap(inLine, preambleLen, inLine.length() - 2); CharBuffer charbuf = CharBuffer.wrap(inLine, preambleLen,
inLine.length() - 2);
// Pass this along to the parser // Pass this along to the parser
List<String> fields = null; List<String> fields = null;
@ -258,7 +256,8 @@ public void run() {
continue; // Skip emitting this row. continue; // Skip emitting this row.
} }
// For all of the output fields, emit them using the delimiters the user chooses. // For all of the output fields, emit them using the delimiters
// the user chooses.
boolean first = true; boolean first = true;
int recordLen = 1; // for the delimiter. int recordLen = 1; // for the delimiter.
for (String field : fields) { for (String field : fields) {
@ -312,10 +311,11 @@ public void map(String splitConditions, NullWritable val, Context context)
ArrayList<String> args = new ArrayList<String>(); ArrayList<String> args = new ArrayList<String>();
String tableName = conf.get(MySQLUtils.TABLE_NAME_KEY); String tableName = conf.get(MySQLUtils.TABLE_NAME_KEY);
// We need to parse the connect string URI to determine the database // We need to parse the connect string URI to determine the database name.
// name. Using java.net.URL directly on the connect string will fail because // Using java.net.URL directly on the connect string will fail because
// Java doesn't respect arbitrary JDBC-based schemes. So we chop off the scheme // Java doesn't respect arbitrary JDBC-based schemes. So we chop off the
// (everything before '://') and replace it with 'http', which we know will work. // scheme (everything before '://') and replace it with 'http', which we
// know will work.
String connectString = conf.get(MySQLUtils.CONNECT_STRING_KEY); String connectString = conf.get(MySQLUtils.CONNECT_STRING_KEY);
String databaseName = JdbcUrl.getDatabaseName(connectString); String databaseName = JdbcUrl.getDatabaseName(connectString);
String hostname = JdbcUrl.getHostName(connectString); String hostname = JdbcUrl.getHostName(connectString);
@ -391,7 +391,8 @@ public void map(String splitConditions, NullWritable val, Context context)
InputStream is = p.getInputStream(); InputStream is = p.getInputStream();
if (MySQLUtils.outputDelimsAreMySQL(conf)) { if (MySQLUtils.outputDelimsAreMySQL(conf)) {
LOG.debug("Output delimiters conform to mysqldump; using straight copy"); LOG.debug("Output delimiters conform to mysqldump; "
+ "using straight copy");
sink = new CopyingAsyncSink(context, counters); sink = new CopyingAsyncSink(context, counters);
} else { } else {
LOG.debug("User-specified delimiters; using reparsing import"); LOG.debug("User-specified delimiters; using reparsing import");

View File

@ -27,7 +27,6 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
@ -80,11 +79,12 @@ protected void configureInputFormat(Job job, String tableName,
mgr = new ConnFactory(conf).getManager(options); mgr = new ConnFactory(conf).getManager(options);
String username = options.getUsername(); String username = options.getUsername();
if (null == username || username.length() == 0) { if (null == username || username.length() == 0) {
DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(), DBConfiguration.configureDB(job.getConfiguration(),
options.getConnectString()); mgr.getDriverClass(), options.getConnectString());
} else { } else {
DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(), DBConfiguration.configureDB(job.getConfiguration(),
options.getConnectString(), username, options.getPassword()); mgr.getDriverClass(), options.getConnectString(), username,
options.getPassword());
} }
String [] colNames = options.getColumns(); String [] colNames = options.getColumns();

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.sqoop.lib.TaskId; import org.apache.hadoop.sqoop.lib.TaskId;
@ -53,7 +52,8 @@
public class MySQLExportMapper<KEYIN, VALIN> public class MySQLExportMapper<KEYIN, VALIN>
extends Mapper<KEYIN, VALIN, NullWritable, NullWritable> { extends Mapper<KEYIN, VALIN, NullWritable, NullWritable> {
public static final Log LOG = LogFactory.getLog(MySQLExportMapper.class.getName()); public static final Log LOG = LogFactory.getLog(
MySQLExportMapper.class.getName());
/** Configuration key that specifies the number of bytes before which it /** Configuration key that specifies the number of bytes before which it
* commits the current export transaction and opens a new one. * commits the current export transaction and opens a new one.
@ -82,7 +82,7 @@ public class MySQLExportMapper<KEYIN, VALIN>
protected AsyncSink outSink; protected AsyncSink outSink;
protected AsyncSink errSink; protected AsyncSink errSink;
/** File object where we wrote the user's password to pass to mysqlimport */ /** File object where we wrote the user's password to pass to mysqlimport. */
protected File passwordFile; protected File passwordFile;
/** Character set used to write to mysqlimport. */ /** Character set used to write to mysqlimport. */

View File

@ -31,7 +31,8 @@
* that DBWritable to the OutputFormat for writeback to the database. * that DBWritable to the OutputFormat for writeback to the database.
*/ */
public class SequenceFileExportMapper public class SequenceFileExportMapper
extends AutoProgressMapper<LongWritable, SqoopRecord, SqoopRecord, NullWritable> { extends AutoProgressMapper<LongWritable, SqoopRecord, SqoopRecord,
NullWritable> {
public SequenceFileExportMapper() { public SequenceFileExportMapper() {
} }

View File

@ -31,12 +31,14 @@
* Imports records by writing them to a SequenceFile. * Imports records by writing them to a SequenceFile.
*/ */
public class SequenceFileImportMapper public class SequenceFileImportMapper
extends AutoProgressMapper<LongWritable, SqoopRecord, LongWritable, SqoopRecord> { extends AutoProgressMapper<LongWritable, SqoopRecord, LongWritable,
SqoopRecord> {
private LargeObjectLoader lobLoader; private LargeObjectLoader lobLoader;
@Override @Override
protected void setup(Context context) throws IOException, InterruptedException { protected void setup(Context context)
throws IOException, InterruptedException {
this.lobLoader = new LargeObjectLoader(context.getConfiguration(), this.lobLoader = new LargeObjectLoader(context.getConfiguration(),
FileOutputFormat.getWorkOutputPath(context)); FileOutputFormat.getWorkOutputPath(context));
} }

View File

@ -25,15 +25,15 @@
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.sqoop.lib.RecordParser; import org.apache.hadoop.sqoop.lib.RecordParser;
import org.apache.hadoop.sqoop.lib.SqoopRecord; import org.apache.hadoop.sqoop.lib.SqoopRecord;
/** /**
* Converts an input record from a string representation to a parsed Sqoop record * Converts an input record from a string representation to a parsed Sqoop
* and emits that DBWritable to the OutputFormat for writeback to the database. * record and emits that DBWritable to the OutputFormat for writeback to the
* database.
*/ */
public class TextExportMapper public class TextExportMapper
extends AutoProgressMapper<LongWritable, Text, SqoopRecord, NullWritable> { extends AutoProgressMapper<LongWritable, Text, SqoopRecord, NullWritable> {
@ -67,7 +67,8 @@ protected void setup(Context context)
} }
if (null == recordImpl) { if (null == recordImpl) {
throw new IOException("Could not instantiate object of type " + recordClassName); throw new IOException("Could not instantiate object of type "
+ recordClassName);
} }
} }

View File

@ -43,7 +43,8 @@ public TextImportMapper() {
} }
@Override @Override
protected void setup(Context context) throws IOException, InterruptedException { protected void setup(Context context)
throws IOException, InterruptedException {
this.lobLoader = new LargeObjectLoader(context.getConfiguration(), this.lobLoader = new LargeObjectLoader(context.getConfiguration(),
FileOutputFormat.getWorkOutputPath(context)); FileOutputFormat.getWorkOutputPath(context));
} }

View File

@ -21,7 +21,6 @@
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.SqlManager;
import org.apache.hadoop.sqoop.lib.BigDecimalSerializer; import org.apache.hadoop.sqoop.lib.BigDecimalSerializer;
import org.apache.hadoop.sqoop.lib.FieldFormatter; import org.apache.hadoop.sqoop.lib.FieldFormatter;
import org.apache.hadoop.sqoop.lib.JdbcWritableBridge; import org.apache.hadoop.sqoop.lib.JdbcWritableBridge;
@ -45,7 +44,7 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
/** /**
* Creates an ORM class to represent a table from a database * Creates an ORM class to represent a table from a database.
*/ */
public class ClassWriter { public class ClassWriter {
@ -209,12 +208,13 @@ public static String toIdentifier(String candidate) {
/** /**
* @param javaType * @param javaType
* @return the name of the method of JdbcWritableBridge to read an entry with a given java type. * @return the name of the method of JdbcWritableBridge to read an entry
* with a given java type.
*/ */
private String dbGetterForType(String javaType) { private String dbGetterForType(String javaType) {
// All Class-based types (e.g., java.math.BigDecimal) are handled with // All Class-based types (e.g., java.math.BigDecimal) are handled with
// "readBar" where some.package.foo.Bar is the canonical class name. // "readBar" where some.package.foo.Bar is the canonical class name. Turn
// Turn the javaType string into the getter type string. // the javaType string into the getter type string.
String [] parts = javaType.split("\\."); String [] parts = javaType.split("\\.");
if (parts.length == 0) { if (parts.length == 0) {
@ -224,18 +224,21 @@ private String dbGetterForType(String javaType) {
String lastPart = parts[parts.length - 1]; String lastPart = parts[parts.length - 1];
try { try {
String getter = "read" + Character.toUpperCase(lastPart.charAt(0)) + lastPart.substring(1); String getter = "read" + Character.toUpperCase(lastPart.charAt(0))
+ lastPart.substring(1);
return getter; return getter;
} catch (StringIndexOutOfBoundsException oob) { } catch (StringIndexOutOfBoundsException oob) {
// lastPart.*() doesn't work on empty strings. // lastPart.*() doesn't work on empty strings.
LOG.error("Could not infer JdbcWritableBridge getter for Java type " + javaType); LOG.error("Could not infer JdbcWritableBridge getter for Java type "
+ javaType);
return null; return null;
} }
} }
/** /**
* @param javaType * @param javaType
* @return the name of the method of JdbcWritableBridge to write an entry with a given java type. * @return the name of the method of JdbcWritableBridge to write an entry
* with a given java type.
*/ */
private String dbSetterForType(String javaType) { private String dbSetterForType(String javaType) {
// TODO(aaron): Lots of unit tests needed here. // TODO(aaron): Lots of unit tests needed here.
@ -249,11 +252,13 @@ private String dbSetterForType(String javaType) {
String lastPart = parts[parts.length - 1]; String lastPart = parts[parts.length - 1];
try { try {
String setter = "write" + Character.toUpperCase(lastPart.charAt(0)) + lastPart.substring(1); String setter = "write" + Character.toUpperCase(lastPart.charAt(0))
+ lastPart.substring(1);
return setter; return setter;
} catch (StringIndexOutOfBoundsException oob) { } catch (StringIndexOutOfBoundsException oob) {
// lastPart.*() doesn't work on empty strings. // lastPart.*() doesn't work on empty strings.
LOG.error("Could not infer PreparedStatement setter for Java type " + javaType); LOG.error("Could not infer PreparedStatement setter for Java type "
+ javaType);
return null; return null;
} }
} }
@ -262,7 +267,7 @@ private String stringifierForType(String javaType, String colName) {
if (javaType.equals("String")) { if (javaType.equals("String")) {
return colName; return colName;
} else { } else {
// this is an object type -- just call its toString() in a null-safe way. // This is an object type -- just call its toString() in a null-safe way.
return "\"\" + " + colName; return "\"\" + " + colName;
} }
} }
@ -271,36 +276,49 @@ private String stringifierForType(String javaType, String colName) {
* @param javaType the type to read * @param javaType the type to read
* @param inputObj the name of the DataInput to read from * @param inputObj the name of the DataInput to read from
* @param colName the column name to read * @param colName the column name to read
* @return the line of code involving a DataInput object to read an entry with a given java type. * @return the line of code involving a DataInput object to read an entry
* with a given java type.
*/ */
private String rpcGetterForType(String javaType, String inputObj, String colName) { private String rpcGetterForType(String javaType, String inputObj,
String colName) {
if (javaType.equals("Integer")) { if (javaType.equals("Integer")) {
return " this." + colName + " = Integer.valueOf(" + inputObj + ".readInt());\n"; return " this." + colName + " = Integer.valueOf(" + inputObj
+ ".readInt());\n";
} else if (javaType.equals("Long")) { } else if (javaType.equals("Long")) {
return " this." + colName + " = Long.valueOf(" + inputObj + ".readLong());\n"; return " this." + colName + " = Long.valueOf(" + inputObj
+ ".readLong());\n";
} else if (javaType.equals("Float")) { } else if (javaType.equals("Float")) {
return " this." + colName + " = Float.valueOf(" + inputObj + ".readFloat());\n"; return " this." + colName + " = Float.valueOf(" + inputObj
+ ".readFloat());\n";
} else if (javaType.equals("Double")) { } else if (javaType.equals("Double")) {
return " this." + colName + " = Double.valueOf(" + inputObj + ".readDouble());\n"; return " this." + colName + " = Double.valueOf(" + inputObj
+ ".readDouble());\n";
} else if (javaType.equals("Boolean")) { } else if (javaType.equals("Boolean")) {
return " this." + colName + " = Boolean.valueOf(" + inputObj + ".readBoolean());\n"; return " this." + colName + " = Boolean.valueOf(" + inputObj
+ ".readBoolean());\n";
} else if (javaType.equals("String")) { } else if (javaType.equals("String")) {
return " this." + colName + " = Text.readString(" + inputObj + ");\n"; return " this." + colName + " = Text.readString(" + inputObj + ");\n";
} else if (javaType.equals("java.sql.Date")) { } else if (javaType.equals("java.sql.Date")) {
return " this." + colName + " = new Date(" + inputObj + ".readLong());\n"; return " this." + colName + " = new Date(" + inputObj
+ ".readLong());\n";
} else if (javaType.equals("java.sql.Time")) { } else if (javaType.equals("java.sql.Time")) {
return " this." + colName + " = new Time(" + inputObj + ".readLong());\n"; return " this." + colName + " = new Time(" + inputObj
+ ".readLong());\n";
} else if (javaType.equals("java.sql.Timestamp")) { } else if (javaType.equals("java.sql.Timestamp")) {
return " this." + colName + " = new Timestamp(" + inputObj + ".readLong());\n" return " this." + colName + " = new Timestamp(" + inputObj
+ " this." + colName + ".setNanos(" + inputObj + ".readInt());\n"; + ".readLong());\n" + " this." + colName + ".setNanos("
+ inputObj + ".readInt());\n";
} else if (javaType.equals("java.math.BigDecimal")) { } else if (javaType.equals("java.math.BigDecimal")) {
return " this." + colName + " = " + BigDecimalSerializer.class.getCanonicalName() return " this." + colName + " = "
+ BigDecimalSerializer.class.getCanonicalName()
+ ".readFields(" + inputObj + ");\n"; + ".readFields(" + inputObj + ");\n";
} else if (javaType.equals(ClobRef.class.getName())) { } else if (javaType.equals(ClobRef.class.getName())) {
return " this." + colName + " = " + LobSerializer.class.getCanonicalName() return " this." + colName + " = "
+ LobSerializer.class.getCanonicalName()
+ ".readClobFields(" + inputObj + ");\n"; + ".readClobFields(" + inputObj + ");\n";
} else if (javaType.equals(BlobRef.class.getName())) { } else if (javaType.equals(BlobRef.class.getName())) {
return " this." + colName + " = " + LobSerializer.class.getCanonicalName() return " this." + colName + " = "
+ LobSerializer.class.getCanonicalName()
+ ".readBlobFields(" + inputObj + ");\n"; + ".readBlobFields(" + inputObj + ");\n";
} else if (javaType.equals(BytesWritable.class.getName())) { } else if (javaType.equals(BytesWritable.class.getName())) {
return " this." + colName + " = new BytesWritable();\n" return " this." + colName + " = new BytesWritable();\n"
@ -312,13 +330,14 @@ private String rpcGetterForType(String javaType, String inputObj, String colName
} }
/** /**
* Deserialize a possibly-null value from the DataInput stream * Deserialize a possibly-null value from the DataInput stream.
* @param javaType name of the type to deserialize if it's not null. * @param javaType name of the type to deserialize if it's not null.
* @param inputObj name of the DataInput to read from * @param inputObj name of the DataInput to read from
* @param colName the column name to read. * @param colName the column name to read.
* @return * @return
*/ */
private String rpcGetterForMaybeNull(String javaType, String inputObj, String colName) { private String rpcGetterForMaybeNull(String javaType, String inputObj,
String colName) {
return " if (" + inputObj + ".readBoolean()) { \n" return " if (" + inputObj + ".readBoolean()) { \n"
+ " this." + colName + " = null;\n" + " this." + colName + " = null;\n"
+ " } else {\n" + " } else {\n"
@ -330,10 +349,11 @@ private String rpcGetterForMaybeNull(String javaType, String inputObj, String co
* @param javaType the type to write * @param javaType the type to write
* @param inputObj the name of the DataOutput to write to * @param inputObj the name of the DataOutput to write to
* @param colName the column name to write * @param colName the column name to write
* @return the line of code involving a DataOutput object to write an entry with * @return the line of code involving a DataOutput object to write an entry
* a given java type. * with a given java type.
*/ */
private String rpcSetterForType(String javaType, String outputObj, String colName) { private String rpcSetterForType(String javaType, String outputObj,
String colName) {
if (javaType.equals("Integer")) { if (javaType.equals("Integer")) {
return " " + outputObj + ".writeInt(this." + colName + ");\n"; return " " + outputObj + ".writeInt(this." + colName + ");\n";
} else if (javaType.equals("Long")) { } else if (javaType.equals("Long")) {
@ -347,12 +367,15 @@ private String rpcSetterForType(String javaType, String outputObj, String colNam
} else if (javaType.equals("String")) { } else if (javaType.equals("String")) {
return " Text.writeString(" + outputObj + ", " + colName + ");\n"; return " Text.writeString(" + outputObj + ", " + colName + ");\n";
} else if (javaType.equals("java.sql.Date")) { } else if (javaType.equals("java.sql.Date")) {
return " " + outputObj + ".writeLong(this." + colName + ".getTime());\n"; return " " + outputObj + ".writeLong(this." + colName
+ ".getTime());\n";
} else if (javaType.equals("java.sql.Time")) { } else if (javaType.equals("java.sql.Time")) {
return " " + outputObj + ".writeLong(this." + colName + ".getTime());\n"; return " " + outputObj + ".writeLong(this." + colName
+ ".getTime());\n";
} else if (javaType.equals("java.sql.Timestamp")) { } else if (javaType.equals("java.sql.Timestamp")) {
return " " + outputObj + ".writeLong(this." + colName + ".getTime());\n" return " " + outputObj + ".writeLong(this." + colName
+ " " + outputObj + ".writeInt(this." + colName + ".getNanos());\n"; + ".getTime());\n" + " " + outputObj + ".writeInt(this." + colName
+ ".getNanos());\n";
} else if (javaType.equals(BytesWritable.class.getName())) { } else if (javaType.equals(BytesWritable.class.getName())) {
return " this." + colName + ".write(" + outputObj + ");\n"; return " this." + colName + ".write(" + outputObj + ");\n";
} else if (javaType.equals("java.math.BigDecimal")) { } else if (javaType.equals("java.math.BigDecimal")) {
@ -378,7 +401,8 @@ private String rpcSetterForType(String javaType, String outputObj, String colNam
* @param colName the column name to read. * @param colName the column name to read.
* @return * @return
*/ */
private String rpcSetterForMaybeNull(String javaType, String outputObj, String colName) { private String rpcSetterForMaybeNull(String javaType, String outputObj,
String colName) {
return " if (null == this." + colName + ") { \n" return " if (null == this." + colName + ") { \n"
+ " " + outputObj + ".writeBoolean(true);\n" + " " + outputObj + ".writeBoolean(true);\n"
+ " } else {\n" + " } else {\n"
@ -388,13 +412,13 @@ private String rpcSetterForMaybeNull(String javaType, String outputObj, String c
} }
/** /**
* Generate a member field and getter method for each column * Generate a member field and getter method for each column.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateFields(Map<String, Integer> columnTypes, String [] colNames, private void generateFields(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
for (String col : colNames) { for (String col : colNames) {
int sqlType = columnTypes.get(col); int sqlType = columnTypes.get(col);
@ -412,15 +436,16 @@ private void generateFields(Map<String, Integer> columnTypes, String [] colNames
} }
/** /**
* Generate the readFields() method used by the database * Generate the readFields() method used by the database.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateDbRead(Map<String, Integer> columnTypes, String [] colNames, private void generateDbRead(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
sb.append(" public void readFields(ResultSet __dbResults) throws SQLException {\n"); sb.append(" public void readFields(ResultSet __dbResults) ");
sb.append("throws SQLException {\n");
// Save ResultSet object cursor for use in LargeObjectLoader // Save ResultSet object cursor for use in LargeObjectLoader
// if necessary. // if necessary.
@ -462,7 +487,8 @@ private void generateLoadLargeObjects(Map<String, Integer> columnTypes,
// readFields() method generated by generateDbRead(). // readFields() method generated by generateDbRead().
sb.append(" public void loadLargeObjects(LargeObjectLoader __loader)\n"); sb.append(" public void loadLargeObjects(LargeObjectLoader __loader)\n");
sb.append(" throws SQLException, IOException, InterruptedException {\n"); sb.append(" throws SQLException, IOException, ");
sb.append("InterruptedException {\n");
int fieldNum = 0; int fieldNum = 0;
@ -479,9 +505,9 @@ private void generateLoadLargeObjects(Map<String, Integer> columnTypes,
String getterMethod = dbGetterForType(javaType); String getterMethod = dbGetterForType(javaType);
if ("readClobRef".equals(getterMethod) if ("readClobRef".equals(getterMethod)
|| "readBlobRef".equals(getterMethod)) { || "readBlobRef".equals(getterMethod)) {
// This field is a blob/clob field with delayed loading. // This field is a blob/clob field with delayed loading. Call the
// Call the appropriate LargeObjectLoader method (which has the // appropriate LargeObjectLoader method (which has the same name as a
// same name as a JdbcWritableBridge method). // JdbcWritableBridge method).
sb.append(" this." + col + " = __loader." + getterMethod sb.append(" this." + col + " = __loader." + getterMethod
+ "(" + fieldNum + ", this.__cur_result_set);\n"); + "(" + fieldNum + ", this.__cur_result_set);\n");
} }
@ -491,19 +517,21 @@ private void generateLoadLargeObjects(Map<String, Integer> columnTypes,
/** /**
* Generate the write() method used by the database * Generate the write() method used by the database.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateDbWrite(Map<String, Integer> columnTypes, String [] colNames, private void generateDbWrite(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
sb.append(" public void write(PreparedStatement __dbStmt) throws SQLException {\n"); sb.append(" public void write(PreparedStatement __dbStmt) "
+ "throws SQLException {\n");
sb.append(" write(__dbStmt, 0);\n"); sb.append(" write(__dbStmt, 0);\n");
sb.append(" }\n\n"); sb.append(" }\n\n");
sb.append(" public int write(PreparedStatement __dbStmt, int __off) throws SQLException {\n"); sb.append(" public int write(PreparedStatement __dbStmt, int __off) "
+ "throws SQLException {\n");
int fieldNum = 0; int fieldNum = 0;
@ -533,15 +561,16 @@ private void generateDbWrite(Map<String, Integer> columnTypes, String [] colName
/** /**
* Generate the readFields() method used by the Hadoop RPC system * Generate the readFields() method used by the Hadoop RPC system.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateHadoopRead(Map<String, Integer> columnTypes, String [] colNames, private void generateHadoopRead(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
sb.append(" public void readFields(DataInput __dataIn) throws IOException {\n"); sb.append(" public void readFields(DataInput __dataIn) "
+ "throws IOException {\n");
for (String col : colNames) { for (String col : colNames) {
int sqlType = columnTypes.get(col); int sqlType = columnTypes.get(col);
@ -610,27 +639,27 @@ private void generateCloneMethod(Map<String, Integer> columnTypes,
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateToString(Map<String, Integer> columnTypes, String [] colNames, private void generateToString(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
// Embed the delimiters into the class, as characters... // Embed the delimiters into the class, as characters...
sb.append(" private static final char __OUTPUT_FIELD_DELIM_CHAR = " + sb.append(" private static final char __OUTPUT_FIELD_DELIM_CHAR = "
+ (int)options.getOutputFieldDelim() + ";\n"); + (int)options.getOutputFieldDelim() + ";\n");
sb.append(" private static final char __OUTPUT_RECORD_DELIM_CHAR = " sb.append(" private static final char __OUTPUT_RECORD_DELIM_CHAR = "
+ (int)options.getOutputRecordDelim() + ";\n"); + (int)options.getOutputRecordDelim() + ";\n");
// as strings... // as strings...
sb.append(" private static final String __OUTPUT_FIELD_DELIM = \"\" + (char) " sb.append(" private static final String __OUTPUT_FIELD_DELIM = "
+ (int) options.getOutputFieldDelim() + ";\n"); + "\"\" + (char) " + (int) options.getOutputFieldDelim() + ";\n");
sb.append(" private static final String __OUTPUT_RECORD_DELIM = \"\" + (char) " sb.append(" private static final String __OUTPUT_RECORD_DELIM = "
+ (int) options.getOutputRecordDelim() + ";\n"); + "\"\" + (char) " + (int) options.getOutputRecordDelim() + ";\n");
sb.append(" private static final String __OUTPUT_ENCLOSED_BY = \"\" + (char) " sb.append(" private static final String __OUTPUT_ENCLOSED_BY = "
+ (int) options.getOutputEnclosedBy() + ";\n"); + "\"\" + (char) " + (int) options.getOutputEnclosedBy() + ";\n");
sb.append(" private static final String __OUTPUT_ESCAPED_BY = \"\" + (char) " sb.append(" private static final String __OUTPUT_ESCAPED_BY = "
+ (int) options.getOutputEscapedBy() + ";\n"); + "\"\" + (char) " + (int) options.getOutputEscapedBy() + ";\n");
// and some more options. // and some more options.
sb.append(" private static final boolean __OUTPUT_ENCLOSE_REQUIRED = " sb.append(" private static final boolean __OUTPUT_ENCLOSE_REQUIRED = "
+ options.isOutputEncloseRequired() + ";\n"); + options.isOutputEncloseRequired() + ";\n");
sb.append(" private static final char [] __OUTPUT_DELIMITER_LIST = { " sb.append(" private static final char [] __OUTPUT_DELIMITER_LIST = { "
+ "__OUTPUT_FIELD_DELIM_CHAR, __OUTPUT_RECORD_DELIM_CHAR };\n\n"); + "__OUTPUT_FIELD_DELIM_CHAR, __OUTPUT_RECORD_DELIM_CHAR };\n\n");
@ -662,8 +691,8 @@ private void generateToString(Map<String, Integer> columnTypes, String [] colNam
} }
sb.append(" __sb.append(FieldFormatter.escapeAndEnclose(" + stringExpr sb.append(" __sb.append(FieldFormatter.escapeAndEnclose(" + stringExpr
+ ", __OUTPUT_ESCAPED_BY, __OUTPUT_ENCLOSED_BY, __OUTPUT_DELIMITER_LIST, " + ", __OUTPUT_ESCAPED_BY, __OUTPUT_ENCLOSED_BY, "
+ "__OUTPUT_ENCLOSE_REQUIRED));\n"); + "__OUTPUT_DELIMITER_LIST, __OUTPUT_ENCLOSE_REQUIRED));\n");
} }
@ -675,17 +704,21 @@ private void generateToString(Map<String, Integer> columnTypes, String [] colNam
/** /**
* Helper method for generateParser(). Writes out the parse() method for one particular * Helper method for generateParser(). Writes out the parse() method for one
* type we support as an input string-ish type. * particular type we support as an input string-ish type.
*/ */
private void generateParseMethod(String typ, StringBuilder sb) { private void generateParseMethod(String typ, StringBuilder sb) {
sb.append(" public void parse(" + typ + " __record) throws RecordParser.ParseError {\n"); sb.append(" public void parse(" + typ + " __record) "
+ "throws RecordParser.ParseError {\n");
sb.append(" if (null == this.__parser) {\n"); sb.append(" if (null == this.__parser) {\n");
sb.append(" this.__parser = new RecordParser(__INPUT_FIELD_DELIM_CHAR, "); sb.append(" this.__parser = new RecordParser("
sb.append("__INPUT_RECORD_DELIM_CHAR, __INPUT_ENCLOSED_BY_CHAR, __INPUT_ESCAPED_BY_CHAR, "); + "__INPUT_FIELD_DELIM_CHAR, ");
sb.append("__INPUT_RECORD_DELIM_CHAR, __INPUT_ENCLOSED_BY_CHAR, "
+ "__INPUT_ESCAPED_BY_CHAR, ");
sb.append("__INPUT_ENCLOSE_REQUIRED);\n"); sb.append("__INPUT_ENCLOSE_REQUIRED);\n");
sb.append(" }\n"); sb.append(" }\n");
sb.append(" List<String> __fields = this.__parser.parseRecord(__record);\n"); sb.append(" List<String> __fields = "
+ "this.__parser.parseRecord(__record);\n");
sb.append(" __loadFromFields(__fields);\n"); sb.append(" __loadFromFields(__fields);\n");
sb.append(" }\n\n"); sb.append(" }\n\n");
} }
@ -701,18 +734,20 @@ private void parseNullVal(String colName, StringBuilder sb) {
} }
/** /**
* Helper method for generateParser(). Generates the code that loads one field of * Helper method for generateParser(). Generates the code that loads one
* a specified name and type from the next element of the field strings list. * field of a specified name and type from the next element of the field
* strings list.
*/ */
private void parseColumn(String colName, int colType, StringBuilder sb) { private void parseColumn(String colName, int colType, StringBuilder sb) {
// assume that we have __it and __cur_str vars, based on __loadFromFields() code. // assume that we have __it and __cur_str vars, based on
// __loadFromFields() code.
sb.append(" __cur_str = __it.next();\n"); sb.append(" __cur_str = __it.next();\n");
String javaType = connManager.toJavaType(colType); String javaType = connManager.toJavaType(colType);
parseNullVal(colName, sb); parseNullVal(colName, sb);
if (javaType.equals("String")) { if (javaType.equals("String")) {
// TODO(aaron): Distinguish between 'null' and null. Currently they both set the // TODO(aaron): Distinguish between 'null' and null. Currently they both
// actual object to null. // set the actual object to null.
sb.append(" this." + colName + " = __cur_str;\n"); sb.append(" this." + colName + " = __cur_str;\n");
} else if (javaType.equals("Integer")) { } else if (javaType.equals("Integer")) {
sb.append(" this." + colName + " = Integer.valueOf(__cur_str);\n"); sb.append(" this." + colName + " = Integer.valueOf(__cur_str);\n");
@ -725,13 +760,17 @@ private void parseColumn(String colName, int colType, StringBuilder sb) {
} else if (javaType.equals("Boolean")) { } else if (javaType.equals("Boolean")) {
sb.append(" this." + colName + " = Boolean.valueOf(__cur_str);\n"); sb.append(" this." + colName + " = Boolean.valueOf(__cur_str);\n");
} else if (javaType.equals("java.sql.Date")) { } else if (javaType.equals("java.sql.Date")) {
sb.append(" this." + colName + " = java.sql.Date.valueOf(__cur_str);\n"); sb.append(" this." + colName
+ " = java.sql.Date.valueOf(__cur_str);\n");
} else if (javaType.equals("java.sql.Time")) { } else if (javaType.equals("java.sql.Time")) {
sb.append(" this." + colName + " = java.sql.Time.valueOf(__cur_str);\n"); sb.append(" this." + colName
+ " = java.sql.Time.valueOf(__cur_str);\n");
} else if (javaType.equals("java.sql.Timestamp")) { } else if (javaType.equals("java.sql.Timestamp")) {
sb.append(" this." + colName + " = java.sql.Timestamp.valueOf(__cur_str);\n"); sb.append(" this." + colName
+ " = java.sql.Timestamp.valueOf(__cur_str);\n");
} else if (javaType.equals("java.math.BigDecimal")) { } else if (javaType.equals("java.math.BigDecimal")) {
sb.append(" this." + colName + " = new java.math.BigDecimal(__cur_str);\n"); sb.append(" this." + colName
+ " = new java.math.BigDecimal(__cur_str);\n");
} else if (javaType.equals(ClobRef.class.getName())) { } else if (javaType.equals(ClobRef.class.getName())) {
sb.append(" this." + colName + " = ClobRef.parse(__cur_str);\n"); sb.append(" this." + colName + " = ClobRef.parse(__cur_str);\n");
} else if (javaType.equals(BlobRef.class.getName())) { } else if (javaType.equals(BlobRef.class.getName())) {
@ -744,18 +783,19 @@ private void parseColumn(String colName, int colType, StringBuilder sb) {
} }
/** /**
* Generate the parse() method * Generate the parse() method.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateParser(Map<String, Integer> columnTypes, String [] colNames, private void generateParser(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
// Embed into the class the delimiter characters to use when parsing input records. // Embed into the class the delimiter characters to use when parsing input
// Note that these can differ from the delims to use as output via toString(), if // records. Note that these can differ from the delims to use as output
// the user wants to use this class to convert one format to another. // via toString(), if the user wants to use this class to convert one
sb.append(" private static final char __INPUT_FIELD_DELIM_CHAR = " + // format to another.
sb.append(" private static final char __INPUT_FIELD_DELIM_CHAR = "
+ (int)options.getInputFieldDelim() + ";\n"); + (int)options.getInputFieldDelim() + ";\n");
sb.append(" private static final char __INPUT_RECORD_DELIM_CHAR = " sb.append(" private static final char __INPUT_RECORD_DELIM_CHAR = "
+ (int)options.getInputRecordDelim() + ";\n"); + (int)options.getInputRecordDelim() + ";\n");
@ -778,9 +818,9 @@ private void generateParser(Map<String, Integer> columnTypes, String [] colNames
generateParseMethod("ByteBuffer", sb); generateParseMethod("ByteBuffer", sb);
generateParseMethod("CharBuffer", sb); generateParseMethod("CharBuffer", sb);
// The wrapper methods call __loadFromFields() to actually interpret the raw // The wrapper methods call __loadFromFields() to actually interpret the
// field data as string, int, boolean, etc. The generation of this method is // raw field data as string, int, boolean, etc. The generation of this
// type-dependent for the fields. // method is type-dependent for the fields.
sb.append(" private void __loadFromFields(List<String> fields) {\n"); sb.append(" private void __loadFromFields(List<String> fields) {\n");
sb.append(" Iterator<String> __it = fields.listIterator();\n"); sb.append(" Iterator<String> __it = fields.listIterator();\n");
sb.append(" String __cur_str;\n"); sb.append(" String __cur_str;\n");
@ -792,15 +832,16 @@ private void generateParser(Map<String, Integer> columnTypes, String [] colNames
} }
/** /**
* Generate the write() method used by the Hadoop RPC system * Generate the write() method used by the Hadoop RPC system.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to * @param sb - StringBuilder to append code to
*/ */
private void generateHadoopWrite(Map<String, Integer> columnTypes, String [] colNames, private void generateHadoopWrite(Map<String, Integer> columnTypes,
StringBuilder sb) { String [] colNames, StringBuilder sb) {
sb.append(" public void write(DataOutput __dataOut) throws IOException {\n"); sb.append(" public void write(DataOutput __dataOut) "
+ "throws IOException {\n");
for (String col : colNames) { for (String col : colNames) {
int sqlType = columnTypes.get(col); int sqlType = columnTypes.get(col);
@ -840,21 +881,22 @@ public void generate() throws IOException {
String identifier = toIdentifier(col); String identifier = toIdentifier(col);
cleanedColNames[i] = identifier; cleanedColNames[i] = identifier;
// make sure the col->type mapping holds for the // Make sure the col->type mapping holds for the
// new identifier name, too. // new identifier name, too.
columnTypes.put(identifier, columnTypes.get(col)); columnTypes.put(identifier, columnTypes.get(col));
} }
// Generate the Java code // Generate the Java code.
StringBuilder sb = generateClassForColumns(columnTypes, cleanedColNames); StringBuilder sb = generateClassForColumns(columnTypes, cleanedColNames);
// Write this out to a file. // Write this out to a file.
String codeOutDir = options.getCodeOutputDir(); String codeOutDir = options.getCodeOutputDir();
// Get the class name to generate, which includes package components // Get the class name to generate, which includes package components.
String className = new TableClassName(options).getClassForTable(tableName); String className = new TableClassName(options).getClassForTable(tableName);
// convert the '.' characters to '/' characters // Convert the '.' characters to '/' characters.
String sourceFilename = className.replace('.', File.separatorChar) + ".java"; String sourceFilename = className.replace('.', File.separatorChar)
+ ".java";
String filename = codeOutDir + sourceFilename; String filename = codeOutDir + sourceFilename;
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
@ -908,7 +950,7 @@ public void generate() throws IOException {
} }
/** /**
* Generate the ORM code for a table object containing the named columns * Generate the ORM code for a table object containing the named columns.
* @param columnTypes - mapping from column names to sql types * @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table. * @param colNames - ordered list of column names for table.
* @return - A StringBuilder that contains the text of the class code. * @return - A StringBuilder that contains the text of the class code.
@ -917,7 +959,8 @@ public StringBuilder generateClassForColumns(Map<String, Integer> columnTypes,
String [] colNames) { String [] colNames) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("// ORM class for " + tableName + "\n"); sb.append("// ORM class for " + tableName + "\n");
sb.append("// WARNING: This class is AUTO-GENERATED. Modify at your own risk.\n"); sb.append("// WARNING: This class is AUTO-GENERATED. "
+ "Modify at your own risk.\n");
TableClassName tableNameInfo = new TableClassName(options); TableClassName tableNameInfo = new TableClassName(options);
@ -958,7 +1001,8 @@ public StringBuilder generateClassForColumns(Map<String, Integer> columnTypes,
String className = tableNameInfo.getShortClassForTable(tableName); String className = tableNameInfo.getShortClassForTable(tableName);
sb.append("public class " + className sb.append("public class " + className
+ " implements DBWritable, SqoopRecord, Writable {\n"); + " implements DBWritable, SqoopRecord, Writable {\n");
sb.append(" public static final int PROTOCOL_VERSION = " + CLASS_WRITER_VERSION + ";\n"); sb.append(" public static final int PROTOCOL_VERSION = "
+ CLASS_WRITER_VERSION + ";\n");
sb.append(" protected ResultSet __cur_result_set;\n"); sb.append(" protected ResultSet __cur_result_set;\n");
generateFields(columnTypes, colNames, sb); generateFields(columnTypes, colNames, sb);
generateDbRead(columnTypes, colNames, sb); generateDbRead(columnTypes, colNames, sb);
@ -970,7 +1014,8 @@ public StringBuilder generateClassForColumns(Map<String, Integer> columnTypes,
generateParser(columnTypes, colNames, sb); generateParser(columnTypes, colNames, sb);
generateCloneMethod(columnTypes, colNames, sb); generateCloneMethod(columnTypes, colNames, sb);
// TODO(aaron): Generate hashCode(), compareTo(), equals() so it can be a WritableComparable // TODO(aaron): Generate hashCode(), compareTo(), equals() so it can be a
// WritableComparable
sb.append("}\n"); sb.append("}\n");

View File

@ -38,7 +38,6 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
@ -49,15 +48,13 @@
* Manages the compilation of a bunch of .java files into .class files * Manages the compilation of a bunch of .java files into .class files
* and eventually a jar. * and eventually a jar.
* *
* Also embeds this program's jar into the lib/ directory inside the compiled jar * Also embeds this program's jar into the lib/ directory inside the compiled
* to ensure that the job runs correctly. * jar to ensure that the job runs correctly.
*
*
*
*/ */
public class CompilationManager { public class CompilationManager {
public static final Log LOG = LogFactory.getLog(CompilationManager.class.getName()); public static final Log LOG = LogFactory.getLog(
CompilationManager.class.getName());
private SqoopOptions options; private SqoopOptions options;
private List<String> sources; private List<String> sources;
@ -98,7 +95,8 @@ private String findHadoopCoreJar() {
} }
for (File f : entries) { for (File f : entries) {
if (f.getName().startsWith("hadoop-") && f.getName().endsWith("-core.jar")) { if (f.getName().startsWith("hadoop-")
&& f.getName().endsWith("-core.jar")) {
LOG.info("Found hadoop core jar at: " + f.getAbsolutePath()); LOG.info("Found hadoop core jar at: " + f.getAbsolutePath());
return f.getAbsolutePath(); return f.getAbsolutePath();
} }
@ -128,9 +126,9 @@ public void compile() throws IOException {
// find hadoop-*-core.jar for classpath. // find hadoop-*-core.jar for classpath.
String coreJar = findHadoopCoreJar(); String coreJar = findHadoopCoreJar();
if (null == coreJar) { if (null == coreJar) {
// Couldn't find a core jar to insert into the CP for compilation. // Couldn't find a core jar to insert into the CP for compilation. If,
// If, however, we're running this from a unit test, then the path // however, we're running this from a unit test, then the path to the
// to the .class files might be set via the hadoop.alt.classpath property // .class files might be set via the hadoop.alt.classpath property
// instead. Check there first. // instead. Check there first.
String coreClassesPath = System.getProperty("hadoop.alt.classpath"); String coreClassesPath = System.getProperty("hadoop.alt.classpath");
if (null == coreClassesPath) { if (null == coreClassesPath) {
@ -200,7 +198,7 @@ public void compile() throws IOException {
} }
/** /**
* @return the complete filename of the .jar file to generate */ * @return the complete filename of the .jar file to generate. */
public String getJarFilename() { public String getJarFilename() {
String jarOutDir = options.getJarOutputDir(); String jarOutDir = options.getJarOutputDir();
String tableName = options.getTableName(); String tableName = options.getTableName();
@ -235,11 +233,11 @@ private void addClassFilesFromDir(File dir, JarOutputStream jstream)
baseDirName = baseDirName + File.separator; baseDirName = baseDirName + File.separator;
} }
// for each input class file, create a zipfile entry for it, // For each input class file, create a zipfile entry for it,
// read the file into a buffer, and write it to the jar file. // read the file into a buffer, and write it to the jar file.
for (File entry : dirEntries) { for (File entry : dirEntries) {
if (!entry.isDirectory()) { if (!entry.isDirectory()) {
// chomp off the portion of the full path that is shared // Chomp off the portion of the full path that is shared
// with the base directory where class files were put; // with the base directory where class files were put;
// we only record the subdir parts in the zip entry. // we only record the subdir parts in the zip entry.
String fullPath = entry.getAbsolutePath(); String fullPath = entry.getAbsolutePath();
@ -247,7 +245,8 @@ private void addClassFilesFromDir(File dir, JarOutputStream jstream)
boolean include = chompedPath.endsWith(".class") boolean include = chompedPath.endsWith(".class")
&& sources.contains( && sources.contains(
chompedPath.substring(0, chompedPath.length() - ".class".length()) + ".java"); chompedPath.substring(0, chompedPath.length() - ".class".length())
+ ".java");
if (include) { if (include) {
// include this file. // include this file.
@ -262,7 +261,7 @@ private void addClassFilesFromDir(File dir, JarOutputStream jstream)
} }
/** /**
* Create an output jar file to use when executing MapReduce jobs * Create an output jar file to use when executing MapReduce jobs.
*/ */
public void jar() throws IOException { public void jar() throws IOException {
String jarOutDir = options.getJarOutputDir(); String jarOutDir = options.getJarOutputDir();
@ -293,7 +292,8 @@ public void jar() throws IOException {
addLibJar(thisJarFile, jstream); addLibJar(thisJarFile, jstream);
} else { } else {
// couldn't find our own jar (we were running from .class files?) // couldn't find our own jar (we were running from .class files?)
LOG.warn("Could not find jar for Sqoop; MapReduce jobs may not run correctly."); LOG.warn("Could not find jar for Sqoop; "
+ "MapReduce jobs may not run correctly.");
} }
String shimJarFile = findShimJar(); String shimJarFile = findShimJar();
@ -347,12 +347,13 @@ private void addLibJar(String jarFilename, JarOutputStream jstream)
private static final int BUFFER_SZ = 4096; private static final int BUFFER_SZ = 4096;
/** /**
* utility method to copy a .class file into the jar stream. * Utility method to copy a .class file into the jar stream.
* @param f * @param f
* @param ostream * @param ostream
* @throws IOException * @throws IOException
*/ */
private void copyFileToStream(File f, OutputStream ostream) throws IOException { private void copyFileToStream(File f, OutputStream ostream)
throws IOException {
FileInputStream fis = new FileInputStream(f); FileInputStream fis = new FileInputStream(f);
byte [] buffer = new byte[BUFFER_SZ]; byte [] buffer = new byte[BUFFER_SZ];
try { try {
@ -381,7 +382,7 @@ private String findShimJar() {
return findJarForClass(h.getClass()); return findJarForClass(h.getClass());
} }
// method mostly cloned from o.a.h.mapred.JobConf.findContainingJar() // Method mostly cloned from o.a.h.mapred.JobConf.findContainingJar().
private String findJarForClass(Class<? extends Object> classObj) { private String findJarForClass(Class<? extends Object> classObj) {
ClassLoader loader = classObj.getClassLoader(); ClassLoader loader = classObj.getClassLoader();
String classFile = classObj.getName().replaceAll("\\.", "/") + ".class"; String classFile = classObj.getName().replaceAll("\\.", "/") + ".class";

View File

@ -25,18 +25,20 @@
/** /**
* Reconciles the table name being imported with the class naming information * Reconciles the table name being imported with the class naming information
* specified in SqoopOptions to determine the actual package and class name * specified in SqoopOptions to determine the actual package and class name to
* to use for a table. * use for a table.
*/ */
public class TableClassName { public class TableClassName {
public static final Log LOG = LogFactory.getLog(TableClassName.class.getName()); public static final Log LOG = LogFactory.getLog(
TableClassName.class.getName());
private final SqoopOptions options; private final SqoopOptions options;
public TableClassName(final SqoopOptions opts) { public TableClassName(final SqoopOptions opts) {
if (null == opts) { if (null == opts) {
throw new NullPointerException("Cannot instantiate a TableClassName on null options."); throw new NullPointerException(
"Cannot instantiate a TableClassName on null options.");
} else { } else {
this.options = opts; this.options = opts;
} }
@ -47,18 +49,19 @@ public TableClassName(final SqoopOptions opts) {
* package-part which will be used for a class. The actual table name being * package-part which will be used for a class. The actual table name being
* generated-for is irrelevant; so not an argument. * generated-for is irrelevant; so not an argument.
* *
* @return the package where generated ORM classes go. Will be null for top-level. * @return the package where generated ORM classes go. Will be null for
* top-level.
*/ */
public String getPackageForTable() { public String getPackageForTable() {
String predefinedClass = options.getClassName(); String predefinedClass = options.getClassName();
if (null != predefinedClass) { if (null != predefinedClass) {
// if the predefined classname contains a package-part, return that. // If the predefined classname contains a package-part, return that.
int lastDot = predefinedClass.lastIndexOf('.'); int lastDot = predefinedClass.lastIndexOf('.');
if (-1 == lastDot) { if (-1 == lastDot) {
// no package part. // No package part.
return null; return null;
} else { } else {
// return the string up to but not including the last dot. // Return the string up to but not including the last dot.
return predefinedClass.substring(0, lastDot); return predefinedClass.substring(0, lastDot);
} }
} else { } else {
@ -69,8 +72,8 @@ public String getPackageForTable() {
} }
/** /**
* @param tableName the name of the table being imported * @param tableName the name of the table being imported.
* @return the full name of the class to generate/use to import a table * @return the full name of the class to generate/use to import a table.
*/ */
public String getClassForTable(String tableName) { public String getClassForTable(String tableName) {
if (null == tableName) { if (null == tableName) {
@ -95,7 +98,8 @@ public String getClassForTable(String tableName) {
} }
/** /**
* @return just the last spegment of the class name -- all package info stripped. * @return just the last spegment of the class name -- all package info
* stripped.
*/ */
public String getShortClassForTable(String tableName) { public String getShortClassForTable(String tableName) {
String fullClass = getClassForTable(tableName); String fullClass = getClassForTable(tableName);

View File

@ -77,7 +77,8 @@ public abstract long getNumMapInputRecords(Job job)
/** /**
* Set the mapper speculative execution property for a job. * Set the mapper speculative execution property for a job.
*/ */
public abstract void setJobMapSpeculativeExecution(Job job, boolean isEnabled); public abstract void setJobMapSpeculativeExecution(Job job,
boolean isEnabled);
/** /**
* Sets the Jobtracker address to use for a job. * Sets the Jobtracker address to use for a job.
@ -123,7 +124,7 @@ public abstract long getNumMapInputRecords(Job job)
public abstract MapContext getMapContextForIOPath( public abstract MapContext getMapContextForIOPath(
Configuration conf, Path p); Configuration conf, Path p);
public final static synchronized HadoopShim get() { public static final synchronized HadoopShim get() {
return ShimLoader.getHadoopShim(null); return ShimLoader.getHadoopShim(null);
} }
} }

View File

@ -174,8 +174,8 @@ private static <T> T loadShim(List<String> matchExprs,
return shim; return shim;
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Could not load shim in class " + throw new RuntimeException("Could not load shim in class "
className, e); + className, e);
} }
} }
} }

View File

@ -52,7 +52,8 @@
*/ */
public abstract class BaseSqoopTool extends SqoopTool { public abstract class BaseSqoopTool extends SqoopTool {
public static final Log LOG = LogFactory.getLog(BaseSqoopTool.class.getName()); public static final Log LOG = LogFactory.getLog(
BaseSqoopTool.class.getName());
public static final String HELP_STR = "\nTry --help for usage instructions."; public static final String HELP_STR = "\nTry --help for usage instructions.";
@ -248,7 +249,8 @@ protected RelatedOptions getCommonOptions() {
.hasArg().withDescription("Set authentication password") .hasArg().withDescription("Set authentication password")
.withLongOpt(PASSWORD_ARG) .withLongOpt(PASSWORD_ARG)
.create()); .create());
commonOpts.addOption(OptionBuilder.withDescription("Read password from console") commonOpts.addOption(OptionBuilder
.withDescription("Read password from console")
.create(PASSWORD_PROMPT_ARG)); .create(PASSWORD_PROMPT_ARG));
commonOpts.addOption(OptionBuilder.withArgName("dir") commonOpts.addOption(OptionBuilder.withArgName("dir")
@ -342,10 +344,11 @@ protected RelatedOptions getOutputFormatOptions() {
} }
/** /**
* @return options governing input format delimiters * @return options governing input format delimiters.
*/ */
protected RelatedOptions getInputFormatOptions() { protected RelatedOptions getInputFormatOptions() {
RelatedOptions inputFormatOpts = new RelatedOptions("Input parsing arguments"); RelatedOptions inputFormatOpts =
new RelatedOptions("Input parsing arguments");
inputFormatOpts.addOption(OptionBuilder.withArgName("char") inputFormatOpts.addOption(OptionBuilder.withArgName("char")
.hasArg() .hasArg()
.withDescription("Sets the input field separator") .withDescription("Sets the input field separator")
@ -380,7 +383,8 @@ protected RelatedOptions getInputFormatOptions() {
* @return options related to code generation. * @return options related to code generation.
*/ */
protected RelatedOptions getCodeGenOpts(boolean multiTable) { protected RelatedOptions getCodeGenOpts(boolean multiTable) {
RelatedOptions codeGenOpts = new RelatedOptions("Code generation arguments"); RelatedOptions codeGenOpts =
new RelatedOptions("Code generation arguments");
codeGenOpts.addOption(OptionBuilder.withArgName("dir") codeGenOpts.addOption(OptionBuilder.withArgName("dir")
.hasArg() .hasArg()
.withDescription("Output directory for generated code") .withDescription("Output directory for generated code")

View File

@ -62,7 +62,7 @@ public List<String> getGeneratedJarFiles() {
} }
/** /**
* Generate the .class and .jar files * Generate the .class and .jar files.
* @return the filename of the emitted jar file. * @return the filename of the emitted jar file.
* @throws IOException * @throws IOException
*/ */

View File

@ -45,7 +45,6 @@
import org.apache.hadoop.sqoop.orm.ClassWriter; import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager; import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.shims.ShimLoader; import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.tool.SqoopTool;
import org.apache.hadoop.sqoop.util.ExportException; import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
@ -77,7 +76,8 @@ private void exportTable(SqoopOptions options, String tableName)
// Generate the ORM code for the tables. // Generate the ORM code for the tables.
jarFile = codeGenerator.generateORM(options, tableName); jarFile = codeGenerator.generateORM(options, tableName);
ExportJobContext context = new ExportJobContext(tableName, jarFile, options); ExportJobContext context = new ExportJobContext(tableName, jarFile,
options);
manager.exportTable(context); manager.exportTable(context);
} }
@ -94,7 +94,8 @@ public int run(SqoopOptions options) {
try { try {
exportTable(options, options.getTableName()); exportTable(options, options.getTableName());
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.error("Encountered IOException running export job: " + ioe.toString()); LOG.error("Encountered IOException running export job: "
+ ioe.toString());
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) { if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
throw new RuntimeException(ioe); throw new RuntimeException(ioe);
} else { } else {

View File

@ -21,7 +21,6 @@
import java.util.Set; import java.util.Set;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
/** /**

View File

@ -19,32 +19,13 @@
package org.apache.hadoop.sqoop.tool; package org.apache.hadoop.sqoop.tool;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.sqoop.Sqoop; import org.apache.hadoop.sqoop.Sqoop;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
import org.apache.hadoop.sqoop.hive.HiveImport; import org.apache.hadoop.sqoop.hive.HiveImport;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.ExportJobContext;
import org.apache.hadoop.sqoop.manager.ImportJobContext;
import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.tool.SqoopTool;
import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
/** /**
@ -84,7 +65,8 @@ public int run(SqoopOptions options) {
} }
} }
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.error("Encountered IOException running import job: " + ioe.toString()); LOG.error("Encountered IOException running import job: "
+ ioe.toString());
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) { if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
throw new RuntimeException(ioe); throw new RuntimeException(ioe);
} else { } else {

View File

@ -84,7 +84,8 @@ protected void importTable(SqoopOptions options, String tableName,
jarFile = codeGenerator.generateORM(options, tableName); jarFile = codeGenerator.generateORM(options, tableName);
// Do the actual import. // Do the actual import.
ImportJobContext context = new ImportJobContext(tableName, jarFile, options); ImportJobContext context = new ImportJobContext(tableName, jarFile,
options);
manager.importTable(context); manager.importTable(context);
// If the user wants this table to be in Hive, perform that post-load. // If the user wants this table to be in Hive, perform that post-load.
@ -120,7 +121,8 @@ public int run(SqoopOptions options) {
// Import a single table the user specified. // Import a single table the user specified.
importTable(options, options.getTableName(), hiveImport); importTable(options, options.getTableName(), hiveImport);
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.error("Encountered IOException running import job: " + ioe.toString()); LOG.error("Encountered IOException running import job: "
+ ioe.toString());
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) { if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
throw new RuntimeException(ioe); throw new RuntimeException(ioe);
} else { } else {

View File

@ -28,7 +28,7 @@
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
/** /**
* Tool that lists available databases on a server * Tool that lists available databases on a server.
*/ */
public class ListDatabasesTool extends BaseSqoopTool { public class ListDatabasesTool extends BaseSqoopTool {

View File

@ -83,7 +83,7 @@ public abstract class SqoopTool {
* @param cls the class providing the tool. * @param cls the class providing the tool.
* @param description a user-friendly description of the tool's function. * @param description a user-friendly description of the tool's function.
*/ */
private static final void registerTool(String toolName, private static void registerTool(String toolName,
Class<? extends SqoopTool> cls, String description) { Class<? extends SqoopTool> cls, String description) {
TOOLS.put(toolName, cls); TOOLS.put(toolName, cls);
DESCRIPTIONS.put(toolName, description); DESCRIPTIONS.put(toolName, description);
@ -169,7 +169,8 @@ public void configureOptions(ToolOptions opts) {
* @param opts the configured tool options * @param opts the configured tool options
*/ */
public void printHelp(ToolOptions opts) { public void printHelp(ToolOptions opts) {
System.out.println("usage: sqoop " + getToolName() + " [GENERIC-ARGS] [TOOL-ARGS]"); System.out.println("usage: sqoop " + getToolName()
+ " [GENERIC-ARGS] [TOOL-ARGS]");
System.out.println(""); System.out.println("");
opts.printHelp(); opts.printHelp();

View File

@ -37,7 +37,8 @@ public abstract class AsyncSink {
/** /**
* Wait until the stream has been processed. * Wait until the stream has been processed.
* @return a status code indicating success or failure. 0 is typical for success. * @return a status code indicating success or failure. 0 is typical for
* success.
*/ */
public abstract int join() throws InterruptedException; public abstract int join() throws InterruptedException;
} }

View File

@ -29,19 +29,17 @@
/** /**
* Allows you to add and remove jar-files from the running JVM by * Allows you to add and remove jar-files from the running JVM by
* instantiating classloaders for them. * instantiating classloaders for them.
*
*
*
*/ */
public final class ClassLoaderStack { public final class ClassLoaderStack {
public static final Log LOG = LogFactory.getLog(ClassLoaderStack.class.getName()); public static final Log LOG = LogFactory.getLog(
ClassLoaderStack.class.getName());
private ClassLoaderStack() { private ClassLoaderStack() {
} }
/** /**
* Sets the classloader for the current thread * Sets the classloader for the current thread.
*/ */
public static void setCurrentClassLoader(ClassLoader cl) { public static void setCurrentClassLoader(ClassLoader cl) {
LOG.debug("Restoring classloader: " + cl.toString()); LOG.debug("Restoring classloader: " + cl.toString());
@ -49,23 +47,27 @@ public static void setCurrentClassLoader(ClassLoader cl) {
} }
/** /**
* Adds a ClassLoader to the top of the stack that will load from the Jar file * Adds a ClassLoader to the top of the stack that will load from the Jar
* of your choice. Returns the previous classloader so you can restore it * file of your choice. Returns the previous classloader so you can restore
* if need be, later. * it if need be, later.
* *
* @param jarFile The filename of a jar file that you want loaded into this JVM * @param jarFile The filename of a jar file that you want loaded into this
* @param testClassName The name of the class to load immediately (optional) * JVM.
* @param testClassName The name of the class to load immediately
* (optional).
*/ */
public static ClassLoader addJarFile(String jarFile, String testClassName) public static ClassLoader addJarFile(String jarFile, String testClassName)
throws IOException { throws IOException {
// load the classes from the ORM JAR file into the current VM // load the classes from the ORM JAR file into the current VM.
ClassLoader prevClassLoader = Thread.currentThread().getContextClassLoader(); ClassLoader prevClassLoader =
Thread.currentThread().getContextClassLoader();
String urlPath = "jar:file://" + new File(jarFile).getAbsolutePath() + "!/"; String urlPath = "jar:file://" + new File(jarFile).getAbsolutePath() + "!/";
LOG.debug("Attempting to load jar through URL: " + urlPath); LOG.debug("Attempting to load jar through URL: " + urlPath);
LOG.debug("Previous classloader is " + prevClassLoader); LOG.debug("Previous classloader is " + prevClassLoader);
URL [] jarUrlArray = {new URL(urlPath)}; URL [] jarUrlArray = {new URL(urlPath)};
URLClassLoader cl = URLClassLoader.newInstance(jarUrlArray, prevClassLoader); URLClassLoader cl = URLClassLoader.newInstance(jarUrlArray,
prevClassLoader);
try { try {
if (null != testClassName) { if (null != testClassName) {
// try to load a class from the jar to force loading now. // try to load a class from the jar to force loading now.
@ -74,7 +76,8 @@ public static ClassLoader addJarFile(String jarFile, String testClassName)
} }
LOG.debug("Loaded jar into current JVM: " + urlPath); LOG.debug("Loaded jar into current JVM: " + urlPath);
} catch (ClassNotFoundException cnfe) { } catch (ClassNotFoundException cnfe) {
throw new IOException("Could not load jar " + jarFile + " into JVM. (Could not find class " throw new IOException("Could not load jar " + jarFile
+ " into JVM. (Could not find class "
+ testClassName + ".)", cnfe); + testClassName + ".)", cnfe);
} }

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.sqoop.util; package org.apache.hadoop.sqoop.util;
import java.io.InputStream;
/** /**
* Partial implementation of AsyncSink that relies on ErrorableThread to * Partial implementation of AsyncSink that relies on ErrorableThread to
* provide a status bit for the join() method. * provide a status bit for the join() method.

View File

@ -61,15 +61,16 @@ public static void main(String... aArgs) throws FileNotFoundException {
* *
* @param aStartingDir is a valid directory, which can be read. * @param aStartingDir is a valid directory, which can be read.
*/ */
public static List<File> getFileListing(File aStartingDir) throws FileNotFoundException { public static List<File> getFileListing(File aStartingDir)
throws FileNotFoundException {
validateDirectory(aStartingDir); validateDirectory(aStartingDir);
List<File> result = getFileListingNoSort(aStartingDir); List<File> result = getFileListingNoSort(aStartingDir);
Collections.sort(result); Collections.sort(result);
return result; return result;
} }
// PRIVATE // private static List<File> getFileListingNoSort(File aStartingDir)
private static List<File> getFileListingNoSort(File aStartingDir) throws FileNotFoundException { throws FileNotFoundException {
List<File> result = new ArrayList<File>(); List<File> result = new ArrayList<File>();
File[] filesAndDirs = aStartingDir.listFiles(); File[] filesAndDirs = aStartingDir.listFiles();
List<File> filesDirs = Arrays.asList(filesAndDirs); List<File> filesDirs = Arrays.asList(filesAndDirs);
@ -88,23 +89,26 @@ private static List<File> getFileListingNoSort(File aStartingDir) throws FileNot
/** /**
* Directory is valid if it exists, does not represent a file, and can be read. * Directory is valid if it exists, does not represent a file, and can be read.
*/ */
private static void validateDirectory(File aDirectory) throws FileNotFoundException { private static void validateDirectory(File aDirectory)
throws FileNotFoundException {
if (aDirectory == null) { if (aDirectory == null) {
throw new IllegalArgumentException("Directory should not be null."); throw new IllegalArgumentException("Directory should not be null.");
} }
if (!aDirectory.exists()) { if (!aDirectory.exists()) {
throw new FileNotFoundException("Directory does not exist: " + aDirectory); throw new FileNotFoundException("Directory does not exist: "
+ aDirectory);
} }
if (!aDirectory.isDirectory()) { if (!aDirectory.isDirectory()) {
throw new IllegalArgumentException("Is not a directory: " + aDirectory); throw new IllegalArgumentException("Is not a directory: " + aDirectory);
} }
if (!aDirectory.canRead()) { if (!aDirectory.canRead()) {
throw new IllegalArgumentException("Directory cannot be read: " + aDirectory); throw new IllegalArgumentException("Directory cannot be read: "
+ aDirectory);
} }
} }
/** /**
* Recursively delete a directory and all its children * Recursively delete a directory and all its children.
* @param dir is a valid directory. * @param dir is a valid directory.
*/ */
public static void recursiveDeleteDir(File dir) throws IOException { public static void recursiveDeleteDir(File dir) throws IOException {

View File

@ -38,8 +38,8 @@ private JdbcUrl() {
} }
/** /**
* @return the database name from the connect string, which is typically the 'path' * @return the database name from the connect string, which is typically the
* component, or null if we can't. * 'path' component, or null if we can't.
*/ */
public static String getDatabaseName(String connectString) { public static String getDatabaseName(String connectString) {
try { try {
@ -48,7 +48,8 @@ public static String getDatabaseName(String connectString) {
if (-1 == schemeEndOffset) { if (-1 == schemeEndOffset) {
// couldn't find one? try our best here. // couldn't find one? try our best here.
sanitizedString = "http://" + connectString; sanitizedString = "http://" + connectString;
LOG.warn("Could not find database access scheme in connect string " + connectString); LOG.warn("Could not find database access scheme in connect string "
+ connectString);
} else { } else {
sanitizedString = "http" + connectString.substring(schemeEndOffset); sanitizedString = "http" + connectString.substring(schemeEndOffset);
} }
@ -81,7 +82,8 @@ public static String getHostName(String connectString) {
String sanitizedString = null; String sanitizedString = null;
int schemeEndOffset = connectString.indexOf("://"); int schemeEndOffset = connectString.indexOf("://");
if (-1 == schemeEndOffset) { if (-1 == schemeEndOffset) {
// couldn't find one? ok, then there's no problem, it should work as a URL. // Couldn't find one? ok, then there's no problem, it should work as a
// URL.
sanitizedString = connectString; sanitizedString = connectString;
} else { } else {
sanitizedString = "http" + connectString.substring(schemeEndOffset); sanitizedString = "http" + connectString.substring(schemeEndOffset);
@ -104,7 +106,8 @@ public static int getPort(String connectString) {
String sanitizedString = null; String sanitizedString = null;
int schemeEndOffset = connectString.indexOf("://"); int schemeEndOffset = connectString.indexOf("://");
if (-1 == schemeEndOffset) { if (-1 == schemeEndOffset) {
// couldn't find one? ok, then there's no problem, it should work as a URL. // Couldn't find one? ok, then there's no problem, it should work as a
// URL.
sanitizedString = connectString; sanitizedString = connectString;
} else { } else {
sanitizedString = "http" + connectString.substring(schemeEndOffset); sanitizedString = "http" + connectString.substring(schemeEndOffset);

View File

@ -32,7 +32,8 @@
*/ */
public class LoggingAsyncSink extends AsyncSink { public class LoggingAsyncSink extends AsyncSink {
public static final Log LOG = LogFactory.getLog(LoggingAsyncSink.class.getName()); public static final Log LOG = LogFactory.getLog(
LoggingAsyncSink.class.getName());
private Log contextLog; private Log contextLog;

View File

@ -31,7 +31,8 @@
*/ */
public class NullAsyncSink extends AsyncSink { public class NullAsyncSink extends AsyncSink {
public static final Log LOG = LogFactory.getLog(NullAsyncSink.class.getName()); public static final Log LOG = LogFactory.getLog(
NullAsyncSink.class.getName());
private Thread child; private Thread child;
@ -69,7 +70,8 @@ public void run() {
} }
} }
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.warn("IOException reading from (ignored) stream: " + ioe.toString()); LOG.warn("IOException reading from (ignored) stream: "
+ ioe.toString());
} }
try { try {

View File

@ -47,7 +47,7 @@ public void stopClock() {
private static final double ONE_BILLION = 1000.0 * 1000.0 * 1000.0; private static final double ONE_BILLION = 1000.0 * 1000.0 * 1000.0;
/** maximum number of digits after the decimal place */ /** Maximum number of digits after the decimal place. */
private static final int MAX_PLACES = 4; private static final int MAX_PLACES = 4;
/** /**
@ -63,8 +63,8 @@ private Double inSeconds(long nanos) {
/** /**
* @return a string of the form "xxxx bytes" or "xxxxx KB" or "xxxx GB", scaled * @return a string of the form "xxxx bytes" or "xxxxx KB" or "xxxx GB",
* as is appropriate for the current value. * scaled as is appropriate for the current value.
*/ */
private String formatBytes() { private String formatBytes() {
double val; double val;
@ -125,7 +125,8 @@ private String formatSpeed() {
} }
public String toString() { public String toString() {
return formatBytes() + " in " + formatTimeInSeconds() + " (" + formatSpeed() + ")"; return formatBytes() + " in " + formatTimeInSeconds() + " ("
+ formatSpeed() + ")";
} }
} }

View File

@ -29,17 +29,18 @@
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
/** /**
* Utility methods to format and print ResultSet objects * Utility methods to format and print ResultSet objects.
*/ */
public class ResultSetPrinter { public class ResultSetPrinter {
public static final Log LOG = LogFactory.getLog(ResultSetPrinter.class.getName()); public static final Log LOG = LogFactory.getLog(
ResultSetPrinter.class.getName());
// max output width to allocate to any column of the printed results. // max output width to allocate to any column of the printed results.
private static final int MAX_COL_WIDTH = 20; private static final int MAX_COL_WIDTH = 20;
/** /**
* Print 'str' to the string builder, padded to 'width' chars * Print 'str' to the string builder, padded to 'width' chars.
*/ */
private static void printPadded(StringBuilder sb, String str, int width) { private static void printPadded(StringBuilder sb, String str, int width) {
int numPad; int numPad;
@ -72,7 +73,8 @@ public final void printResultSet(PrintWriter pw, ResultSet results)
ResultSetMetaData metadata = results.getMetaData(); ResultSetMetaData metadata = results.getMetaData();
for (int i = 1; i < cols + 1; i++) { for (int i = 1; i < cols + 1; i++) {
String colName = metadata.getColumnName(i); String colName = metadata.getColumnName(i);
colWidths[i - 1] = Math.min(metadata.getColumnDisplaySize(i), MAX_COL_WIDTH); colWidths[i - 1] = Math.min(metadata.getColumnDisplaySize(i),
MAX_COL_WIDTH);
if (colName == null || colName.equals("")) { if (colName == null || colName.equals("")) {
colName = metadata.getColumnLabel(i) + "*"; colName = metadata.getColumnLabel(i) + "*";
} }

View File

@ -30,19 +30,20 @@
/** /**
* Stress test export procedure by running a large-scale export to MySQL. * Stress test export procedure by running a large-scale export to MySQL.
* This requires MySQL be configured with a database that can be accessed * This requires MySQL be configured with a database that can be accessed by
* by the specified username without a password. The user must be able to * the specified username without a password. The user must be able to create
* create and drop tables in the database. * and drop tables in the database.
* *
* Run with: src/scripts/run-perftest.sh ExportStressTest (connect-str) (username) * Run with: src/scripts/run-perftest.sh ExportStressTest \
* (connect-str) (username)
*/ */
public class ExportStressTest extends Configured implements Tool { public class ExportStressTest extends Configured implements Tool {
// Export 10 GB of data. Each record is ~100 bytes. // Export 10 GB of data. Each record is ~100 bytes.
public final static int NUM_FILES = 10; public static final int NUM_FILES = 10;
public final static int RECORDS_PER_FILE = 10 * 1024 * 1024; public static final int RECORDS_PER_FILE = 10 * 1024 * 1024;
public final static String ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; public static final String ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
public ExportStressTest() { public ExportStressTest() {
} }
@ -88,7 +89,8 @@ public void createTable(String connectStr, String username) throws Exception {
Connection conn = DriverManager.getConnection(connectStr, username, null); Connection conn = DriverManager.getConnection(connectStr, username, null);
conn.setAutoCommit(false); conn.setAutoCommit(false);
PreparedStatement stmt = conn.prepareStatement( PreparedStatement stmt = conn.prepareStatement(
"DROP TABLE IF EXISTS ExportStressTestTable", ResultSet.TYPE_FORWARD_ONLY, "DROP TABLE IF EXISTS ExportStressTestTable",
ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY); ResultSet.CONCUR_READ_ONLY);
stmt.executeUpdate(); stmt.executeUpdate();
stmt.close(); stmt.close();
@ -103,7 +105,9 @@ public void createTable(String connectStr, String username) throws Exception {
conn.close(); conn.close();
} }
/** Actually run the export of the generated data to the user-created table. */ /**
* Actually run the export of the generated data to the user-created table.
*/
public void runExport(String connectStr, String username) throws Exception { public void runExport(String connectStr, String username) throws Exception {
SqoopOptions options = new SqoopOptions(getConf()); SqoopOptions options = new SqoopOptions(getConf());
options.setConnectString(connectStr); options.setConnectString(connectStr);

View File

@ -25,16 +25,17 @@
* A simple benchmark to performance test LobFile reader/writer speed. * A simple benchmark to performance test LobFile reader/writer speed.
* Writes out 10 GB of data to the local disk and then reads it back. * Writes out 10 GB of data to the local disk and then reads it back.
* Run with: * Run with:
* HADOOP_OPTS=-agentlib:hprof=cpu=samples src/scripts/run-perftest.sh LobFilePerfTest * HADOOP_OPTS=-agentlib:hprof=cpu=samples \
* src/scripts/run-perftest.sh LobFilePerfTest
*/ */
public class LobFilePerfTest { public class LobFilePerfTest {
long recordLen = 20 * 1024 * 1024; // 20 MB records private long recordLen = 20 * 1024 * 1024; // 20 MB records
int numRecords = 500; private int numRecords = 500;
Configuration conf; private Configuration conf;
Path p; private Path p;
long startTime; private long startTime;
byte [] record; private byte [] record;
public LobFilePerfTest() { public LobFilePerfTest() {
conf = new Configuration(); conf = new Configuration();
@ -99,7 +100,7 @@ private void readFile() throws Exception {
System.out.println("Read " + recordSize + " bytes"); System.out.println("Read " + recordSize + " bytes");
} }
private void run() throws Exception { public void run() throws Exception {
makeRecordBody(); makeRecordBody();
writeFile(); writeFile();
readFile(); readFile();

View File

@ -30,14 +30,14 @@
public class LobFileStressTest { public class LobFileStressTest {
// Big records in testBigFile() are 5 GB each. // Big records in testBigFile() are 5 GB each.
public final static long LARGE_RECORD_LEN = 5L * 1024L * 1024L * 1024L; public static final long LARGE_RECORD_LEN = 5L * 1024L * 1024L * 1024L;
int numRandomTrials = 1000000; private int numRandomTrials = 1000000;
Configuration conf; private Configuration conf;
boolean allPassed; private boolean allPassed;
long lastCompressPos; // start offset of the last record in the file. private long lastCompressPos; // start offset of the last record in the file.
long lastRawPos; private long lastRawPos;
public LobFileStressTest() { public LobFileStressTest() {
conf = new Configuration(); conf = new Configuration();
@ -314,7 +314,7 @@ private void testBigFile(boolean compress) throws Exception {
+ compress + ". "); + compress + ". ");
Path p = getBigFilePath(compress); Path p = getBigFilePath(compress);
long startOffsets [] = new long[NUM_RECORDS]; long [] startOffsets = new long[NUM_RECORDS];
// Write the file. Five records, 5 GB a piece. // Write the file. Five records, 5 GB a piece.
System.out.print("Testing write. "); System.out.print("Testing write. ");
@ -364,8 +364,7 @@ private void testBigFile(boolean compress) throws Exception {
} }
} }
public void run() throws Exception {
private void run() throws Exception {
writeIntegerFile(true); writeIntegerFile(true);
writeIntegerFile(false); writeIntegerFile(false);
testSequentialScan(false); testSequentialScan(false);

View File

@ -33,7 +33,7 @@
/** /**
* Hadoop Shim for CDH3 (based on 0.20.2) * Hadoop Shim for CDH3 (based on 0.20.2).
*/ */
public class CDH3Shim extends CommonHadoopShim { public class CDH3Shim extends CommonHadoopShim {
@Override @Override

View File

@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.RecordReader;
@ -65,10 +64,10 @@ public CombineShimRecordReader(CombineFileSplit split,
} }
@Override @Override
public void initialize(InputSplit split, TaskAttemptContext context) public void initialize(InputSplit curSplit, TaskAttemptContext curContext)
throws IOException, InterruptedException { throws IOException, InterruptedException {
this.split = (CombineFileSplit) split; this.split = (CombineFileSplit) curSplit;
this.context = context; this.context = curContext;
if (null == rr) { if (null == rr) {
createChildReader(); createChildReader();
@ -77,7 +76,7 @@ public void initialize(InputSplit split, TaskAttemptContext context)
FileSplit fileSplit = new FileSplit(this.split.getPath(index), FileSplit fileSplit = new FileSplit(this.split.getPath(index),
this.split.getOffset(index), this.split.getLength(index), this.split.getOffset(index), this.split.getLength(index),
this.split.getLocations()); this.split.getLocations());
this.rr.initialize(fileSplit, context); this.rr.initialize(fileSplit, this.context);
} }
@Override @Override

View File

@ -18,18 +18,13 @@
package org.apache.hadoop.sqoop.mapreduce; package org.apache.hadoop.sqoop.mapreduce;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobContext;

View File

@ -1,8 +1,8 @@
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
@ -29,7 +29,6 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
@ -52,7 +51,7 @@
* Uses DBOutputFormat/DBConfiguration for configuring the output. * Uses DBOutputFormat/DBConfiguration for configuring the output.
*/ */
public class ExportOutputFormat<K extends SqoopRecord, V> public class ExportOutputFormat<K extends SqoopRecord, V>
extends OutputFormat<K,V> { extends OutputFormat<K, V> {
/** conf key: number of rows to export per INSERT statement. */ /** conf key: number of rows to export per INSERT statement. */
public static final String RECORDS_PER_STATEMENT_KEY = public static final String RECORDS_PER_STATEMENT_KEY =
@ -72,6 +71,7 @@ public class ExportOutputFormat<K extends SqoopRecord, V>
private static final Log LOG = LogFactory.getLog(ExportOutputFormat.class); private static final Log LOG = LogFactory.getLog(ExportOutputFormat.class);
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public void checkOutputSpecs(JobContext context) public void checkOutputSpecs(JobContext context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
@ -90,6 +90,7 @@ public void checkOutputSpecs(JobContext context)
} }
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public OutputCommitter getOutputCommitter(TaskAttemptContext context) public OutputCommitter getOutputCommitter(TaskAttemptContext context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
@ -105,6 +106,7 @@ public void setupTask(TaskAttemptContext taskContext) { }
}; };
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context)
throws IOException { throws IOException {
@ -454,6 +456,7 @@ private void insertRows(boolean closeConn)
} }
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public void close(TaskAttemptContext context) public void close(TaskAttemptContext context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
@ -474,6 +477,7 @@ public void close(TaskAttemptContext context)
} }
} }
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public void write(K key, V value) public void write(K key, V value)
throws InterruptedException, IOException { throws InterruptedException, IOException {

View File

@ -18,40 +18,14 @@
package org.apache.hadoop.sqoop.mapreduce; package org.apache.hadoop.sqoop.mapreduce;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.nio.CharBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
import org.apache.hadoop.sqoop.lib.FieldFormatter;
import org.apache.hadoop.sqoop.lib.RecordParser;
import org.apache.hadoop.sqoop.util.AsyncSink;
import org.apache.hadoop.sqoop.util.DirectImportUtils;
import org.apache.hadoop.sqoop.util.ErrorableAsyncSink;
import org.apache.hadoop.sqoop.util.ErrorableThread;
import org.apache.hadoop.sqoop.util.ImportException;
import org.apache.hadoop.sqoop.util.JdbcUrl;
import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
import org.apache.hadoop.sqoop.util.PerfCounters;
/** /**
* InputFormat designed to take data-driven splits and feed them to a mysqldump * InputFormat designed to take data-driven splits and feed them to a mysqldump

View File

@ -1,8 +1,8 @@
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
@ -30,8 +30,9 @@
* Oracle-specific SQL formatting overrides default ExportOutputFormat's. * Oracle-specific SQL formatting overrides default ExportOutputFormat's.
*/ */
public class OracleExportOutputFormat<K extends SqoopRecord, V> public class OracleExportOutputFormat<K extends SqoopRecord, V>
extends ExportOutputFormat<K,V> { extends ExportOutputFormat<K, V> {
@Override
/** {@inheritDoc} */ /** {@inheritDoc} */
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context)
throws IOException { throws IOException {

View File

@ -28,7 +28,6 @@
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptContext;
@ -40,7 +39,7 @@
public class RawKeyTextOutputFormat<K, V> extends FileOutputFormat<K, V> { public class RawKeyTextOutputFormat<K, V> extends FileOutputFormat<K, V> {
protected static class RawKeyRecordWriter<K, V> extends RecordWriter<K, V> { protected static class RawKeyRecordWriter<K, V> extends RecordWriter<K, V> {
private static final String utf8 = "UTF-8"; private static final String UTF8 = "UTF-8";
protected DataOutputStream out; protected DataOutputStream out;
@ -59,7 +58,7 @@ private void writeObject(Object o) throws IOException {
Text to = (Text) o; Text to = (Text) o;
out.write(to.getBytes(), 0, to.getLength()); out.write(to.getBytes(), 0, to.getLength());
} else { } else {
out.write(o.toString().getBytes(utf8)); out.write(o.toString().getBytes(UTF8));
} }
} }
@ -67,7 +66,8 @@ public synchronized void write(K key, V value) throws IOException {
writeObject(key); writeObject(key);
} }
public synchronized void close(TaskAttemptContext context) throws IOException { public synchronized void close(TaskAttemptContext context)
throws IOException {
out.close(); out.close();
} }
} }

View File

@ -0,0 +1,17 @@
/**
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

View File

@ -0,0 +1,196 @@
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
<!--
Licensed to Cloudera, Inc. under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<xsl:output method="html" indent="yes"/>
<xsl:decimal-format decimal-separator="." grouping-separator="," />
<xsl:key name="files" match="file" use="@name" />
<!-- Checkstyle XML Style Sheet by Stephane Bailliez <sbailliez@apache.org> -->
<!-- Part of the Checkstyle distribution found at http://checkstyle.sourceforge.net -->
<!-- Usage (generates checkstyle_report.html): -->
<!-- <checkstyle failonviolation="false" config="${check.config}"> -->
<!-- <fileset dir="${src.dir}" includes="**/*.java"/> -->
<!-- <formatter type="xml" toFile="${doc.dir}/checkstyle_report.xml"/> -->
<!-- </checkstyle> -->
<!-- <style basedir="${doc.dir}" destdir="${doc.dir}" -->
<!-- includes="checkstyle_report.xml" -->
<!-- style="${doc.dir}/checkstyle-noframes-sorted.xsl"/> -->
<xsl:template match="checkstyle">
<html>
<head>
<style type="text/css">
.bannercell {
border: 0px;
padding: 0px;
}
body {
margin-left: 10;
margin-right: 10;
font:normal 80% arial,helvetica,sanserif;
background-color:#FFFFFF;
color:#000000;
}
.a td {
background: #efefef;
}
.b td {
background: #fff;
}
th, td {
text-align: left;
vertical-align: top;
}
th {
font-weight:bold;
background: #ccc;
color: black;
}
table, th, td {
font-size:100%;
border: none
}
table.log tr td, tr th {
}
h2 {
font-weight:bold;
font-size:140%;
margin-bottom: 5;
}
h3 {
font-size:100%;
font-weight:bold;
background: #525D76;
color: white;
text-decoration: none;
padding: 5px;
margin-right: 2px;
margin-left: 2px;
margin-bottom: 0;
}
</style>
</head>
<body>
<a name="top"></a>
<!-- jakarta logo -->
<table border="0" cellpadding="0" cellspacing="0" width="100%">
<tr>
<td class="bannercell" rowspan="2">
<!--a href="http://jakarta.apache.org/">
<img src="http://jakarta.apache.org/images/jakarta-logo.gif" alt="http://jakarta.apache.org" align="left" border="0"/>
</a-->
</td>
<td class="text-align:right"><h2>CheckStyle Audit</h2></td>
</tr>
<tr>
<td class="text-align:right">Designed for use with <a href='http://checkstyle.sourceforge.net/'>CheckStyle</a> and <a href='http://jakarta.apache.org'>Ant</a>.</td>
</tr>
</table>
<hr size="1"/>
<!-- Summary part -->
<xsl:apply-templates select="." mode="summary"/>
<hr size="1" width="100%" align="left"/>
<!-- Package List part -->
<xsl:apply-templates select="." mode="filelist"/>
<hr size="1" width="100%" align="left"/>
<!-- For each package create its part -->
<xsl:apply-templates select="file[@name and generate-id(.) = generate-id(key('files', @name))]" />
<hr size="1" width="100%" align="left"/>
</body>
</html>
</xsl:template>
<xsl:template match="checkstyle" mode="filelist">
<h3>Files</h3>
<table class="log" border="0" cellpadding="5" cellspacing="2" width="100%">
<tr>
<th>Name</th>
<th>Errors</th>
</tr>
<xsl:for-each select="file[@name and generate-id(.) = generate-id(key('files', @name))]">
<xsl:sort data-type="number" order="descending" select="count(key('files', @name)/error)"/>
<xsl:variable name="errorCount" select="count(error)"/>
<tr>
<xsl:call-template name="alternated-row"/>
<td><a href="#f-{@name}"><xsl:value-of select="@name"/></a></td>
<td><xsl:value-of select="$errorCount"/></td>
</tr>
</xsl:for-each>
</table>
</xsl:template>
<xsl:template match="file">
<a name="f-{@name}"></a>
<h3>File <xsl:value-of select="@name"/></h3>
<table class="log" border="0" cellpadding="5" cellspacing="2" width="100%">
<tr>
<th>Error Description</th>
<th>Line</th>
</tr>
<xsl:for-each select="key('files', @name)/error">
<xsl:sort data-type="number" order="ascending" select="@line"/>
<tr>
<xsl:call-template name="alternated-row"/>
<td><xsl:value-of select="@message"/></td>
<td><xsl:value-of select="@line"/></td>
</tr>
</xsl:for-each>
</table>
<a href="#top">Back to top</a>
</xsl:template>
<xsl:template match="checkstyle" mode="summary">
<h3>Summary</h3>
<xsl:variable name="fileCount" select="count(file[@name and generate-id(.) = generate-id(key('files', @name))])"/>
<xsl:variable name="errorCount" select="count(file/error)"/>
<table class="log" border="0" cellpadding="5" cellspacing="2" width="100%">
<tr>
<th>Files</th>
<th>Errors</th>
</tr>
<tr>
<xsl:call-template name="alternated-row"/>
<td><xsl:value-of select="$fileCount"/></td>
<td><xsl:value-of select="$errorCount"/></td>
</tr>
</table>
</xsl:template>
<xsl:template name="alternated-row">
<xsl:attribute name="class">
<xsl:if test="position() mod 2 = 1">a</xsl:if>
<xsl:if test="position() mod 2 = 0">b</xsl:if>
</xsl:attribute>
</xsl:template>
</xsl:stylesheet>

209
src/test/checkstyle.xml Normal file
View File

@ -0,0 +1,209 @@
<?xml version="1.0"?>
<!--
Licensed to Cloudera, Inc. under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.2//EN"
"http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
<!--
Checkstyle configuration for Sqoop that is based on the sun_checks.xml file
that is bundled with Checkstyle and includes checks for:
- the Java Language Specification at
http://java.sun.com/docs/books/jls/second_edition/html/index.html
- the Sun Code Conventions at http://java.sun.com/docs/codeconv/
- the Javadoc guidelines at
http://java.sun.com/j2se/javadoc/writingdoccomments/index.html
- the JDK Api documentation http://java.sun.com/j2se/docs/api/index.html
- some best practices
Checkstyle is very configurable. Be sure to read the documentation at
http://checkstyle.sf.net (or in your downloaded distribution).
Most Checks are configurable, be sure to consult the documentation.
To completely disable a check, just comment it out or delete it from the file.
Finally, it is worth reading the documentation.
-->
<module name="Checker">
<!-- Checks that a package.html file exists for each package. -->
<!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
<!-- module name="PackageHtml"/ -->
<!-- Checks whether files end with a new line. -->
<!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
<module name="NewlineAtEndOfFile"/>
<!-- Checks for Headers -->
<!-- See http://checkstyle.sf.net/config_header.html -->
<module name="Header">
<property name="headerFile"
value="${test.dir}/checkstyle-java-header.txt" />
</module>
<module name="FileLength"/>
<module name="FileTabCharacter"/>
<module name="TreeWalker">
<!-- Checks for Javadoc comments. -->
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
<module name="JavadocType">
<property name="scope" value="public"/>
<property name="allowMissingParamTags" value="true"/>
</module>
<module name="JavadocStyle"/>
<module name="SuperClone"/>
<module name="SuperFinalize"/>
<!-- Checks for Naming Conventions. -->
<!-- See http://checkstyle.sf.net/config_naming.html -->
<module name="ConstantName"/>
<module name="ClassTypeParameterName">
<property name="format" value="^[A-Z]+$"/>
</module>
<module name="LocalFinalVariableName">
<property name="format" value="^[A-Z][_A-Z0-9]*$" />
</module>
<module name="LocalVariableName"/>
<module name="MemberName"/>
<module name="MethodName"/>
<module name="MethodTypeParameterName">
<property name="format" value="^[A-Z]+$"/>
</module>
<module name="PackageName"/>
<module name="ParameterName"/>
<module name="StaticVariableName"/>
<module name="TypeName"/>
<!-- Checks for imports -->
<!-- See http://checkstyle.sf.net/config_import.html -->
<module name="IllegalImport"/> <!-- defaults to sun.* packages -->
<module name="RedundantImport"/>
<module name="UnusedImports"/>
<!-- Checks for Size Violations. -->
<!-- See http://checkstyle.sf.net/config_sizes.html -->
<module name="LineLength"/>
<module name="MethodLength"/>
<module name="ParameterNumber"/>
<module name="OuterTypeNumber"/>
<!-- Checks for whitespace -->
<!-- See http://checkstyle.sf.net/config_whitespace.html -->
<module name="GenericWhitespace"/>
<module name="EmptyForIteratorPad"/>
<module name="MethodParamPad"/>
<module name="NoWhitespaceAfter">
<property name="tokens"
value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS, UNARY_PLUS" />
</module>
<module name="NoWhitespaceBefore"/>
<module name="OperatorWrap"/>
<module name="ParenPad"/>
<module name="TypecastParenPad"/>
<module name="WhitespaceAfter">
<property name="tokens" value="COMMA, SEMI"/>
</module>
<!-- Modifier Checks -->
<!-- See http://checkstyle.sf.net/config_modifiers.html -->
<module name="ModifierOrder"/>
<module name="RedundantModifier"/>
<!-- Checks for blocks. You know, those {}'s -->
<!-- See http://checkstyle.sf.net/config_blocks.html -->
<module name="AvoidNestedBlocks"/>
<module name="EmptyBlock">
<!-- catch blocks need a statement or a comment. -->
<property name="option" value="text" />
<property name="tokens" value="LITERAL_CATCH" />
</module>
<module name="EmptyBlock">
<!-- all other blocks need a real statement. -->
<property name="option" value="stmt" />
<property name="tokens" value="LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY,
LITERAL_IF, LITERAL_FOR, LITERAL_TRY, LITERAL_WHILE, INSTANCE_INIT,
STATIC_INIT" />
</module>
<module name="LeftCurly"/>
<module name="NeedBraces"/>
<module name="RightCurly"/>
<!-- Checks for common coding problems -->
<!-- See http://checkstyle.sf.net/config_coding.html -->
<!-- module name="AvoidInlineConditionals"/-->
<module name="DoubleCheckedLocking"/>
<module name="EmptyStatement"/>
<module name="EqualsHashCode"/>
<module name="StringLiteralEquality" />
<module name="HiddenField">
<property name="ignoreConstructorParameter" value="true"/>
</module>
<module name="IllegalInstantiation"/>
<module name="InnerAssignment"/>
<module name="MissingSwitchDefault"/>
<module name="RedundantThrows"/>
<module name="SimplifyBooleanExpression"/>
<module name="SimplifyBooleanReturn"/>
<module name="DefaultComesLast" />
<!-- Checks for class design -->
<!-- See http://checkstyle.sf.net/config_design.html -->
<module name="FinalClass"/>
<module name="HideUtilityClassConstructor"/>
<module name="InterfaceIsType"/>
<module name="VisibilityModifier">
<property name="protectedAllowed" value="true" />
</module>
<module name="MissingOverride" />
<!-- Miscellaneous other checks. -->
<!-- See http://checkstyle.sf.net/config_misc.html -->
<module name="ArrayTypeStyle"/>
<module name="ArrayTrailingComma"/>
<!--
This generates too many false-positives on wrapped 'throws' clauses
to be really useful. Disabled for now.
Sqoop style is:
* Spaces, not tabs.
* Indent by two spaces.
* Indent by four spaces when wrapping a line.
<module name="Indentation">
<property name="basicOffset" value="2" />
<property name="caseIndent" value="0" />
</module>
-->
<!-- module name="TodoComment"/ -->
<module name="UpperEll"/>
</module>
</module>

View File

@ -22,7 +22,7 @@
import junit.framework.TestSuite; import junit.framework.TestSuite;
/** /**
* All tests for Sqoop (org.apache.hadoop.sqoop) * All tests for Sqoop (org.apache.hadoop.sqoop).
*/ */
public final class AllTests { public final class AllTests {

View File

@ -38,7 +38,7 @@
import junit.framework.TestSuite; import junit.framework.TestSuite;
/** /**
* Smoke tests for Sqoop (org.apache.hadoop.sqoop) * Smoke tests for Sqoop (org.apache.hadoop.sqoop).
*/ */
public final class SmokeTests { public final class SmokeTests {

View File

@ -42,7 +42,7 @@
public class TestAllTables extends ImportJobTestCase { public class TestAllTables extends ImportJobTestCase {
/** /**
* Create the argv to pass to Sqoop * Create the argv to pass to Sqoop.
* @return the argv as an array of strings. * @return the argv as an array of strings.
*/ */
private String [] getArgv(boolean includeHadoopFlags) { private String [] getArgv(boolean includeHadoopFlags) {
@ -67,7 +67,7 @@ public class TestAllTables extends ImportJobTestCase {
/** the names of the tables we're creating. */ /** the names of the tables we're creating. */
private List<String> tableNames; private List<String> tableNames;
/** The strings to inject in the (ordered) tables */ /** The strings to inject in the (ordered) tables. */
private List<String> expectedStrings; private List<String> expectedStrings;
@Before @Before
@ -123,7 +123,8 @@ public void testMultiTableImport() throws IOException {
this.expectedStrings.remove(0); this.expectedStrings.remove(0);
BufferedReader reader = new BufferedReader( BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(new File(filePath.toString())))); new InputStreamReader(new FileInputStream(
new File(filePath.toString()))));
try { try {
String line = reader.readLine(); String line = reader.readLine();
assertEquals("Table " + tableName + " expected a different string", assertEquals("Table " + tableName + " expected a different string",

View File

@ -40,7 +40,8 @@
*/ */
public class TestColumnTypes extends ManagerCompatTestCase { public class TestColumnTypes extends ManagerCompatTestCase {
public static final Log LOG = LogFactory.getLog(TestColumnTypes.class.getName()); public static final Log LOG = LogFactory.getLog(
TestColumnTypes.class.getName());
@Override @Override
protected Log getLogger() { protected Log getLogger() {

View File

@ -38,7 +38,8 @@ public class TestConnFactory extends TestCase {
public void testCustomFactory() throws IOException { public void testCustomFactory() throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, AlwaysDummyFactory.class.getName()); conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY,
AlwaysDummyFactory.class.getName());
ConnFactory factory = new ConnFactory(conf); ConnFactory factory = new ConnFactory(conf);
ConnManager manager = factory.getManager(new SqoopOptions()); ConnManager manager = factory.getManager(new SqoopOptions());
@ -76,6 +77,10 @@ public void testMultipleManagers() throws IOException {
////// mock classes used for test cases above ////// ////// mock classes used for test cases above //////
/**
* Factory that always returns a DummyManager, regardless of the
* configuration.
*/
public static class AlwaysDummyFactory extends ManagerFactory { public static class AlwaysDummyFactory extends ManagerFactory {
public ConnManager accept(SqoopOptions opts) { public ConnManager accept(SqoopOptions opts) {
// Always return a new DummyManager // Always return a new DummyManager
@ -83,6 +88,9 @@ public ConnManager accept(SqoopOptions opts) {
} }
} }
/**
* ManagerFactory that accepts no configurations.
*/
public static class EmptyFactory extends ManagerFactory { public static class EmptyFactory extends ManagerFactory {
public ConnManager accept(SqoopOptions opts) { public ConnManager accept(SqoopOptions opts) {
// Never instantiate a proper ConnManager; // Never instantiate a proper ConnManager;
@ -114,7 +122,7 @@ public String getPrimaryKey(String tableName) {
} }
/** /**
* Default implementation * Default implementation.
* @param sqlType sql data type * @param sqlType sql data type
* @return java data type * @return java data type
*/ */
@ -123,7 +131,7 @@ public String toJavaType(int sqlType) {
} }
/** /**
* Default implementation * Default implementation.
* @param sqlType sql data type * @param sqlType sql data type
* @return hive data type * @return hive data type
*/ */

View File

@ -120,22 +120,22 @@ private String getRecordLine(int recordNum, ColumnGenerator... extraCols) {
the database should look like. the database should look like.
*/ */
public interface ColumnGenerator { public interface ColumnGenerator {
/** for a row with id rowNum, what should we write into that /** For a row with id rowNum, what should we write into that
line of the text file to export? line of the text file to export?
*/ */
public String getExportText(int rowNum); String getExportText(int rowNum);
/** for a row with id rowNum, what should the database return /** For a row with id rowNum, what should the database return
for the given column's value? for the given column's value?
*/ */
public String getVerifyText(int rowNum); String getVerifyText(int rowNum);
/** Return the column type to put in the CREATE TABLE statement */ /** Return the column type to put in the CREATE TABLE statement. */
public String getType(); String getType();
} }
/** /**
* Create a data file that gets exported to the db * Create a data file that gets exported to the db.
* @param fileNum the number of the file (for multi-file export) * @param fileNum the number of the file (for multi-file export)
* @param numRecords how many records to write to the file. * @param numRecords how many records to write to the file.
* @param gzip is true if the file should be gzipped. * @param gzip is true if the file should be gzipped.
@ -173,7 +173,8 @@ private void createTextFile(int fileNum, int numRecords, boolean gzip,
} }
} }
private void verifyCompressedFile(Path f, int expectedNumLines) throws IOException { private void verifyCompressedFile(Path f, int expectedNumLines)
throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///"); conf.set("fs.default.name", "file:///");
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
@ -185,7 +186,8 @@ private void verifyCompressedFile(Path f, int expectedNumLines) throws IOExcepti
if (null == decompressor) { if (null == decompressor) {
LOG.info("Verifying gzip sanity with null decompressor"); LOG.info("Verifying gzip sanity with null decompressor");
} else { } else {
LOG.info("Verifying gzip sanity with decompressor: " + decompressor.toString()); LOG.info("Verifying gzip sanity with decompressor: "
+ decompressor.toString());
} }
is = codec.createInputStream(is, decompressor); is = codec.createInputStream(is, decompressor);
BufferedReader r = new BufferedReader(new InputStreamReader(is)); BufferedReader r = new BufferedReader(new InputStreamReader(is));
@ -205,7 +207,7 @@ private void verifyCompressedFile(Path f, int expectedNumLines) throws IOExcepti
} }
/** /**
* Create a data file in SequenceFile format that gets exported to the db * Create a data file in SequenceFile format that gets exported to the db.
* @param fileNum the number of the file (for multi-file export). * @param fileNum the number of the file (for multi-file export).
* @param numRecords how many records to write to the file. * @param numRecords how many records to write to the file.
* @param className the table class name to instantiate and populate * @param className the table class name to instantiate and populate
@ -303,7 +305,7 @@ public void createTable(ColumnGenerator... extraColumns) throws SQLException {
} }
} }
/** Removing an existing table directory from the filesystem */ /** Removing an existing table directory from the filesystem. */
private void removeTablePath() throws IOException { private void removeTablePath() throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///"); conf.set("fs.default.name", "file:///");
@ -346,7 +348,8 @@ private void assertColMinAndMax(String colName, ColumnGenerator generator)
int minId = getMinRowId(); int minId = getMinRowId();
int maxId = getMaxRowId(); int maxId = getMaxRowId();
LOG.info("Checking min/max for column " + colName + " with type " + generator.getType()); LOG.info("Checking min/max for column " + colName + " with type "
+ generator.getType());
String expectedMin = generator.getVerifyText(minId); String expectedMin = generator.getVerifyText(minId);
String expectedMax = generator.getVerifyText(maxId); String expectedMax = generator.getVerifyText(maxId);
@ -414,7 +417,7 @@ public void testEmptyExport() throws IOException, SQLException {
multiFileTest(1, 0, 1); multiFileTest(1, 0, 1);
} }
/** Export 10 rows, make sure they load in correctly */ /** Export 10 rows, make sure they load in correctly. */
public void testTextExport() throws IOException, SQLException { public void testTextExport() throws IOException, SQLException {
multiFileTest(1, 10, 1); multiFileTest(1, 10, 1);
} }
@ -427,7 +430,7 @@ public void testMultiFilesOneMapper() throws IOException, SQLException {
} }
/** Make sure we can use CombineFileInputFormat to handle multiple /** Make sure we can use CombineFileInputFormat to handle multiple
* files and multiple maps * files and multiple maps.
*/ */
public void testMultiFilesMultiMaps() throws IOException, SQLException { public void testMultiFilesMultiMaps() throws IOException, SQLException {
multiFileTest(2, 10, 2); multiFileTest(2, 10, 2);
@ -481,7 +484,7 @@ public void testUnlimitedTransactionSize() throws IOException, SQLException {
verifyExport(TOTAL_RECORDS); verifyExport(TOTAL_RECORDS);
} }
/** Run 2 mappers, make sure all records load in correctly */ /** Run 2 mappers, make sure all records load in correctly. */
public void testMultiMapTextExport() throws IOException, SQLException { public void testMultiMapTextExport() throws IOException, SQLException {
final int RECORDS_PER_MAP = 10; final int RECORDS_PER_MAP = 10;
@ -496,12 +499,13 @@ public void testMultiMapTextExport() throws IOException, SQLException {
verifyExport(RECORDS_PER_MAP * NUM_FILES); verifyExport(RECORDS_PER_MAP * NUM_FILES);
} }
/** Export some rows from a SequenceFile, make sure they import correctly */ /** Export some rows from a SequenceFile, make sure they import correctly. */
public void testSequenceFileExport() throws Exception { public void testSequenceFileExport() throws Exception {
final int TOTAL_RECORDS = 10; final int TOTAL_RECORDS = 10;
// First, generate class and jar files that represent the table we're exporting to. // First, generate class and jar files that represent the table
// we're exporting to.
LOG.info("Creating initial schema for SeqFile test"); LOG.info("Creating initial schema for SeqFile test");
createTable(); createTable();
LOG.info("Generating code..."); LOG.info("Generating code...");
@ -531,7 +535,8 @@ public void testSequenceFileExport() throws Exception {
String jarBaseName = jarPath.getName(); String jarBaseName = jarPath.getName();
assertTrue(jarBaseName.endsWith(".jar")); assertTrue(jarBaseName.endsWith(".jar"));
assertTrue(jarBaseName.length() > ".jar".length()); assertTrue(jarBaseName.length() > ".jar".length());
String className = jarBaseName.substring(0, jarBaseName.length() - ".jar".length()); String className = jarBaseName.substring(0, jarBaseName.length()
- ".jar".length());
LOG.info("Using jar filename: " + jarFileName); LOG.info("Using jar filename: " + jarFileName);
LOG.info("Using class name: " + className); LOG.info("Using class name: " + className);
@ -621,7 +626,7 @@ protected String pad(int n) {
} }
/** /**
* Get a column generator for DATE columns * Get a column generator for DATE columns.
*/ */
protected ColumnGenerator getDateColumnGenerator() { protected ColumnGenerator getDateColumnGenerator() {
return new ColumnGenerator() { return new ColumnGenerator() {

View File

@ -31,10 +31,11 @@
*/ */
public class TestMultiCols extends ImportJobTestCase { public class TestMultiCols extends ImportJobTestCase {
public static final Log LOG = LogFactory.getLog(TestMultiCols.class.getName()); public static final Log LOG = LogFactory.getLog(
TestMultiCols.class.getName());
/** /**
* Do a full import verification test on a table containing one row * Do a full import verification test on a table containing one row.
* @param types the types of the columns to insert * @param types the types of the columns to insert
* @param insertVals the SQL text to use to insert each value * @param insertVals the SQL text to use to insert each value
* @param validateVals the text to expect when retrieving each value from * @param validateVals the text to expect when retrieving each value from
@ -44,12 +45,12 @@ public class TestMultiCols extends ImportJobTestCase {
* @param importColumns The list of columns to import * @param importColumns The list of columns to import
*/ */
private void verifyTypes(String [] types , String [] insertVals, private void verifyTypes(String [] types , String [] insertVals,
String validateVals [], String validateLine) { String [] validateVals, String validateLine) {
verifyTypes(types, insertVals, validateVals, validateLine, null); verifyTypes(types, insertVals, validateVals, validateLine, null);
} }
private void verifyTypes(String [] types , String [] insertVals, private void verifyTypes(String [] types , String [] insertVals,
String validateVals [], String validateLine, String [] importColumns) { String [] validateVals, String validateLine, String [] importColumns) {
createTableWithColTypes(types, insertVals); createTableWithColTypes(types, insertVals);

View File

@ -47,7 +47,7 @@
public class TestMultiMaps extends ImportJobTestCase { public class TestMultiMaps extends ImportJobTestCase {
/** /**
* Create the argv to pass to Sqoop * Create the argv to pass to Sqoop.
* @return the argv as an array of strings. * @return the argv as an array of strings.
*/ */
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames, protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
@ -103,7 +103,7 @@ protected List<Path> getDataFilePaths() throws IOException {
} }
/** /**
* Given a comma-delimited list of integers, grab and parse the first int * Given a comma-delimited list of integers, grab and parse the first int.
* @param str a comma-delimited list of values, the first of which is an int. * @param str a comma-delimited list of values, the first of which is an int.
* @return the first field in the string, cast to int * @return the first field in the string, cast to int
*/ */
@ -130,13 +130,15 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
CompilationManager compileMgr = new CompilationManager(opts); CompilationManager compileMgr = new CompilationManager(opts);
String jarFileName = compileMgr.getJarFilename(); String jarFileName = compileMgr.getJarFilename();
prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName()); prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
getTableName());
List<Path> paths = getDataFilePaths(); List<Path> paths = getDataFilePaths();
Configuration conf = new Configuration(); Configuration conf = new Configuration();
int curSum = 0; int curSum = 0;
assertTrue("Found only " + paths.size() + " path(s); expected > 1.", paths.size() > 1); assertTrue("Found only " + paths.size() + " path(s); expected > 1.",
paths.size() > 1);
// We expect multiple files. We need to open all the files and sum up the // We expect multiple files. We need to open all the files and sum up the
// first column across all of them. // first column across all of them.
@ -147,11 +149,12 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf); Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf); Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
// We know that these values are two ints separated by a ',' character. // We know that these values are two ints separated by a ','
// Since this is all dynamic, though, we don't want to actually link against // character. Since this is all dynamic, though, we don't want to
// the class and use its methods. So we just parse this back into int fields manually. // actually link against the class and use its methods. So we just
// Sum them up and ensure that we get the expected total for the first column, to // parse this back into int fields manually. Sum them up and ensure
// verify that we got all the results from the db into the file. // that we get the expected total for the first column, to verify that
// we got all the results from the db into the file.
// now sum up everything in the file. // now sum up everything in the file.
while (reader.next(key) != null) { while (reader.next(key) != null) {
@ -163,7 +166,8 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
reader = null; reader = null;
} }
assertEquals("Total sum of first db column mismatch", expectedSum, curSum); assertEquals("Total sum of first db column mismatch", expectedSum,
curSum);
} catch (InvalidOptionsException ioe) { } catch (InvalidOptionsException ioe) {
fail(ioe.toString()); fail(ioe.toString());
} catch (ParseException pe) { } catch (ParseException pe) {

View File

@ -37,12 +37,12 @@
import org.apache.hadoop.sqoop.util.ClassLoaderStack; import org.apache.hadoop.sqoop.util.ClassLoaderStack;
/** /**
* Test that --split-by works * Test that --split-by works.
*/ */
public class TestSplitBy extends ImportJobTestCase { public class TestSplitBy extends ImportJobTestCase {
/** /**
* Create the argv to pass to Sqoop * Create the argv to pass to Sqoop.
* @return the argv as an array of strings. * @return the argv as an array of strings.
*/ */
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames, protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
@ -82,7 +82,7 @@ protected String getTableName() {
/** /**
* Given a comma-delimited list of integers, grab and parse the first int * Given a comma-delimited list of integers, grab and parse the first int.
* @param str a comma-delimited list of values, the first of which is an int. * @param str a comma-delimited list of values, the first of which is an int.
* @return the first field in the string, cast to int * @return the first field in the string, cast to int
*/ */
@ -109,7 +109,8 @@ public void runSplitByTest(String splitByCol, int expectedSum)
String jarFileName = compileMgr.getJarFilename(); String jarFileName = compileMgr.getJarFilename();
LOG.debug("Got jar from import job: " + jarFileName); LOG.debug("Got jar from import job: " + jarFileName);
prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName()); prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
getTableName());
reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString()); reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
@ -119,10 +120,11 @@ public void runSplitByTest(String splitByCol, int expectedSum)
Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf); Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
// We know that these values are two ints separated by a ',' character. // We know that these values are two ints separated by a ',' character.
// Since this is all dynamic, though, we don't want to actually link against // Since this is all dynamic, though, we don't want to actually link
// the class and use its methods. So we just parse this back into int fields manually. // against the class and use its methods. So we just parse this back
// Sum them up and ensure that we get the expected total for the first column, to // into int fields manually. Sum them up and ensure that we get the
// verify that we got all the results from the db into the file. // expected total for the first column, to verify that we got all the
// results from the db into the file.
// Sum up everything in the file. // Sum up everything in the file.
int curSum = 0; int curSum = 0;
@ -131,7 +133,8 @@ public void runSplitByTest(String splitByCol, int expectedSum)
curSum += getFirstInt(val.toString()); curSum += getFirstInt(val.toString());
} }
assertEquals("Total sum of first db column mismatch", expectedSum, curSum); assertEquals("Total sum of first db column mismatch", expectedSum,
curSum);
} catch (InvalidOptionsException ioe) { } catch (InvalidOptionsException ioe) {
fail(ioe.toString()); fail(ioe.toString());
} catch (ParseException pe) { } catch (ParseException pe) {

View File

@ -24,7 +24,7 @@
/** /**
* Test aspects of the SqoopOptions class * Test aspects of the SqoopOptions class.
*/ */
public class TestSqoopOptions extends TestCase { public class TestSqoopOptions extends TestCase {
@ -158,8 +158,8 @@ private SqoopOptions parse(String [] argv) throws Exception {
// test that setting output delimiters also sets input delimiters // test that setting output delimiters also sets input delimiters
public void testDelimitersInherit() throws Exception { public void testDelimitersInherit() throws Exception {
String [] args = { String [] args = {
"--fields-terminated-by", "--fields-terminated-by",
"|" "|",
}; };
SqoopOptions opts = parse(args); SqoopOptions opts = parse(args);
@ -167,13 +167,14 @@ public void testDelimitersInherit() throws Exception {
assertEquals('|', opts.getOutputFieldDelim()); assertEquals('|', opts.getOutputFieldDelim());
} }
// test that setting output delimiters and setting input delims separately works // Test that setting output delimiters and setting input delims
// separately works.
public void testDelimOverride1() throws Exception { public void testDelimOverride1() throws Exception {
String [] args = { String [] args = {
"--fields-terminated-by", "--fields-terminated-by",
"|", "|",
"--input-fields-terminated-by", "--input-fields-terminated-by",
"*" "*",
}; };
SqoopOptions opts = parse(args); SqoopOptions opts = parse(args);
@ -184,10 +185,10 @@ public void testDelimOverride1() throws Exception {
// test that the order in which delims are specified doesn't matter // test that the order in which delims are specified doesn't matter
public void testDelimOverride2() throws Exception { public void testDelimOverride2() throws Exception {
String [] args = { String [] args = {
"--input-fields-terminated-by", "--input-fields-terminated-by",
"*", "*",
"--fields-terminated-by", "--fields-terminated-by",
"|" "|",
}; };
SqoopOptions opts = parse(args); SqoopOptions opts = parse(args);
@ -198,7 +199,7 @@ public void testDelimOverride2() throws Exception {
public void testBadNumMappers1() throws Exception { public void testBadNumMappers1() throws Exception {
String [] args = { String [] args = {
"--num-mappers", "--num-mappers",
"x" "x",
}; };
try { try {
@ -212,7 +213,7 @@ public void testBadNumMappers1() throws Exception {
public void testBadNumMappers2() throws Exception { public void testBadNumMappers2() throws Exception {
String [] args = { String [] args = {
"-m", "-m",
"x" "x",
}; };
try { try {
@ -226,7 +227,7 @@ public void testBadNumMappers2() throws Exception {
public void testGoodNumMappers() throws Exception { public void testGoodNumMappers() throws Exception {
String [] args = { String [] args = {
"-m", "-m",
"4" "4",
}; };
SqoopOptions opts = parse(args); SqoopOptions opts = parse(args);

View File

@ -45,7 +45,7 @@
public class TestWhere extends ImportJobTestCase { public class TestWhere extends ImportJobTestCase {
/** /**
* Create the argv to pass to Sqoop * Create the argv to pass to Sqoop.
* @return the argv as an array of strings. * @return the argv as an array of strings.
*/ */
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames, protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
@ -87,7 +87,7 @@ protected String getTableName() {
/** /**
* Given a comma-delimited list of integers, grab and parse the first int * Given a comma-delimited list of integers, grab and parse the first int.
* @param str a comma-delimited list of values, the first of which is an int. * @param str a comma-delimited list of values, the first of which is an int.
* @return the first field in the string, cast to int * @return the first field in the string, cast to int
*/ */
@ -96,8 +96,8 @@ private int getFirstInt(String str) {
return Integer.parseInt(parts[0]); return Integer.parseInt(parts[0]);
} }
public void runWhereTest(String whereClause, String firstValStr, int numExpectedResults, public void runWhereTest(String whereClause, String firstValStr,
int expectedSum) throws IOException { int numExpectedResults, int expectedSum) throws IOException {
String [] columns = HsqldbTestServer.getFieldNames(); String [] columns = HsqldbTestServer.getFieldNames();
ClassLoader prevClassLoader = null; ClassLoader prevClassLoader = null;
@ -113,7 +113,8 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
CompilationManager compileMgr = new CompilationManager(opts); CompilationManager compileMgr = new CompilationManager(opts);
String jarFileName = compileMgr.getJarFilename(); String jarFileName = compileMgr.getJarFilename();
prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName()); prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
getTableName());
reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString()); reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
@ -128,13 +129,15 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
// make sure that the value we think should be at the top, is. // make sure that the value we think should be at the top, is.
reader.getCurrentValue(val); reader.getCurrentValue(val);
assertEquals("Invalid ordering within sorted SeqFile", firstValStr, val.toString()); assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
val.toString());
// We know that these values are two ints separated by a ',' character. // We know that these values are two ints separated by a ',' character.
// Since this is all dynamic, though, we don't want to actually link against // Since this is all dynamic, though, we don't want to actually link
// the class and use its methods. So we just parse this back into int fields manually. // against the class and use its methods. So we just parse this back
// Sum them up and ensure that we get the expected total for the first column, to // into int fields manually. Sum them up and ensure that we get the
// verify that we got all the results from the db into the file. // expected total for the first column, to verify that we got all the
// results from the db into the file.
int curSum = getFirstInt(val.toString()); int curSum = getFirstInt(val.toString());
int totalResults = 1; int totalResults = 1;
@ -145,8 +148,10 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
totalResults++; totalResults++;
} }
assertEquals("Total sum of first db column mismatch", expectedSum, curSum); assertEquals("Total sum of first db column mismatch", expectedSum,
assertEquals("Incorrect number of results for query", numExpectedResults, totalResults); curSum);
assertEquals("Incorrect number of results for query", numExpectedResults,
totalResults);
} catch (InvalidOptionsException ioe) { } catch (InvalidOptionsException ioe) {
fail(ioe.toString()); fail(ioe.toString());
} catch (ParseException pe) { } catch (ParseException pe) {

View File

@ -33,9 +33,10 @@
import org.apache.hadoop.sqoop.manager.PostgresqlTest; import org.apache.hadoop.sqoop.manager.PostgresqlTest;
/** /**
* Test battery including all tests of vendor-specific ConnManager implementations. * Test battery including all tests of vendor-specific ConnManager
* These tests likely aren't run by Apache Hudson, because they require configuring * implementations. These tests likely aren't run by Apache Hudson, because
* and using Oracle, MySQL, etc., which may have incompatible licenses with Apache. * they require configuring and using Oracle, MySQL, etc., which may have
* incompatible licenses with Apache.
*/ */
public final class ThirdPartyTests extends TestCase { public final class ThirdPartyTests extends TestCase {

View File

@ -42,7 +42,8 @@
*/ */
public class TestHiveImport extends ImportJobTestCase { public class TestHiveImport extends ImportJobTestCase {
public static final Log LOG = LogFactory.getLog(TestHiveImport.class.getName()); public static final Log LOG = LogFactory.getLog(
TestHiveImport.class.getName());
/** /**
* Sets the expected number of columns in the table being manipulated * Sets the expected number of columns in the table being manipulated
@ -60,7 +61,7 @@ private void setNumCols(int numCols) {
} }
/** /**
* Create the argv to pass to Sqoop * Create the argv to pass to Sqoop.
* @return the argv as an array of strings. * @return the argv as an array of strings.
*/ */
protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) { protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
@ -147,8 +148,9 @@ private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
return opts; return opts;
} }
private void runImportTest(String tableName, String [] types, String [] values, private void runImportTest(String tableName, String [] types,
String verificationScript, String [] args, SqoopTool tool) throws IOException { String [] values, String verificationScript, String [] args,
SqoopTool tool) throws IOException {
// create a table and populate it with a row... // create a table and populate it with a row...
createTableWithColTypes(types, values); createTableWithColTypes(types, values);
@ -158,7 +160,8 @@ private void runImportTest(String tableName, String [] types, String [] values,
SqoopOptions options = getSqoopOptions(args, tool); SqoopOptions options = getSqoopOptions(args, tool);
String hiveHome = options.getHiveHome(); String hiveHome = options.getHiveHome();
assertNotNull("hive.home was not set", hiveHome); assertNotNull("hive.home was not set", hiveHome);
Path testDataPath = new Path(new Path(hiveHome), "scripts/" + verificationScript); Path testDataPath = new Path(new Path(hiveHome),
"scripts/" + verificationScript);
System.setProperty("expected.script", testDataPath.toString()); System.setProperty("expected.script", testDataPath.toString());
// verify that we can import it correctly into hive. // verify that we can import it correctly into hive.
@ -202,7 +205,7 @@ public void testGenerateOnly() throws IOException {
} }
/** Test that strings and ints are handled in the normal fashion */ /** Test that strings and ints are handled in the normal fashion. */
@Test @Test
public void testNormalHiveImport() throws IOException { public void testNormalHiveImport() throws IOException {
final String TABLE_NAME = "NORMAL_HIVE_IMPORT"; final String TABLE_NAME = "NORMAL_HIVE_IMPORT";
@ -214,7 +217,7 @@ public void testNormalHiveImport() throws IOException {
getArgv(false, null), new ImportTool()); getArgv(false, null), new ImportTool());
} }
/** Test that table is created in hive with no data import */ /** Test that table is created in hive with no data import. */
@Test @Test
public void testCreateOnlyHiveImport() throws IOException { public void testCreateOnlyHiveImport() throws IOException {
final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT"; final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT";
@ -227,7 +230,10 @@ public void testCreateOnlyHiveImport() throws IOException {
new CreateHiveTableTool()); new CreateHiveTableTool());
} }
/** Test that table is created in hive and replaces the existing table if any */ /**
* Test that table is created in hive and replaces the existing table if
* any.
*/
@Test @Test
public void testCreateOverwriteHiveImport() throws IOException { public void testCreateOverwriteHiveImport() throws IOException {
final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT"; final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT";
@ -241,7 +247,7 @@ public void testCreateOverwriteHiveImport() throws IOException {
new CreateHiveTableTool()); new CreateHiveTableTool());
} }
/** Test that dates are coerced properly to strings */ /** Test that dates are coerced properly to strings. */
@Test @Test
public void testDate() throws IOException { public void testDate() throws IOException {
final String TABLE_NAME = "DATE_HIVE_IMPORT"; final String TABLE_NAME = "DATE_HIVE_IMPORT";
@ -253,7 +259,7 @@ public void testDate() throws IOException {
getArgv(false, null), new ImportTool()); getArgv(false, null), new ImportTool());
} }
/** Test that NUMERICs are coerced to doubles */ /** Test that NUMERICs are coerced to doubles. */
@Test @Test
public void testNumeric() throws IOException { public void testNumeric() throws IOException {
final String TABLE_NAME = "NUMERIC_HIVE_IMPORT"; final String TABLE_NAME = "NUMERIC_HIVE_IMPORT";
@ -265,7 +271,7 @@ public void testNumeric() throws IOException {
getArgv(false, null), new ImportTool()); getArgv(false, null), new ImportTool());
} }
/** If bin/hive returns an error exit status, we should get an IOException */ /** If bin/hive returns an error exit status, we should get an IOException. */
@Test @Test
public void testHiveExitFails() { public void testHiveExitFails() {
// The expected script is different than the one which would be generated // The expected script is different than the one which would be generated
@ -285,7 +291,7 @@ public void testHiveExitFails() {
} }
} }
/** Test that we can set delimiters how we want them */ /** Test that we can set delimiters how we want them. */
@Test @Test
public void testCustomDelimiters() throws IOException { public void testCustomDelimiters() throws IOException {
final String TABLE_NAME = "CUSTOM_DELIM_IMPORT"; final String TABLE_NAME = "CUSTOM_DELIM_IMPORT";
@ -293,8 +299,10 @@ public void testCustomDelimiters() throws IOException {
setNumCols(3); setNumCols(3);
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" }; String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
String [] vals = { "'test'", "42", "'somestring'" }; String [] vals = { "'test'", "42", "'somestring'" };
String [] extraArgs = { "--fields-terminated-by", ",", String [] extraArgs = {
"--lines-terminated-by", "|" }; "--fields-terminated-by", ",",
"--lines-terminated-by", "|",
};
runImportTest(TABLE_NAME, types, vals, "customDelimImport.q", runImportTest(TABLE_NAME, types, vals, "customDelimImport.q",
getArgv(false, extraArgs), new ImportTool()); getArgv(false, extraArgs), new ImportTool());
} }

View File

@ -25,7 +25,6 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.junit.Test;
import junit.framework.TestCase; import junit.framework.TestCase;
@ -34,7 +33,8 @@
*/ */
public class TestTableDefWriter extends TestCase { public class TestTableDefWriter extends TestCase {
public static final Log LOG = LogFactory.getLog(TestTableDefWriter.class.getName()); public static final Log LOG = LogFactory.getLog(
TestTableDefWriter.class.getName());
// Test getHiveOctalCharCode and expect an IllegalArgumentException. // Test getHiveOctalCharCode and expect an IllegalArgumentException.
private void expectExceptionInCharCode(int charCode) { private void expectExceptionInCharCode(int charCode) {
@ -73,7 +73,8 @@ public void testDifferentTableNames() throws Exception {
LOG.debug("Load data stmt: " + loadData); LOG.debug("Load data stmt: " + loadData);
// Assert that the statements generated have the form we expect. // Assert that the statements generated have the form we expect.
assertTrue(createTable.indexOf("CREATE TABLE IF NOT EXISTS outputTable") != -1); assertTrue(createTable.indexOf(
"CREATE TABLE IF NOT EXISTS outputTable") != -1);
assertTrue(loadData.indexOf("INTO TABLE outputTable") != -1); assertTrue(loadData.indexOf("INTO TABLE outputTable") != -1);
assertTrue(loadData.indexOf("/inputTable'") != -1); assertTrue(loadData.indexOf("/inputTable'") != -1);
} }

Some files were not shown because too many files have changed in this diff Show More