diff --git a/build.xml b/build.xml
index 0adfe00c..ae068a22 100644
--- a/build.xml
+++ b/build.xml
@@ -88,6 +88,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ivy.xml b/ivy.xml
index a10d1f1e..94ad6860 100644
--- a/ivy.xml
+++ b/ivy.xml
@@ -52,6 +52,9 @@
Things we actually need to ship are marked with a 'redist'
configuration. -->
+
+
+
@@ -92,5 +95,9 @@
conf="common->default"/>
+
+
+
diff --git a/ivy/libraries.properties b/ivy/libraries.properties
index 564013cc..d8b830a1 100644
--- a/ivy/libraries.properties
+++ b/ivy/libraries.properties
@@ -16,6 +16,8 @@
# This properties file lists the versions of the various artifacts we use.
# It drives ivy and the generation of a maven POM
+checkstyle.version=5.0
+
commons-cli.version=1.2
commons-io.version=1.4
commons-logging.version=1.0.4
diff --git a/src/java/org/apache/hadoop/sqoop/ConnFactory.java b/src/java/org/apache/hadoop/sqoop/ConnFactory.java
index 42cd8c08..05b438fa 100644
--- a/src/java/org/apache/hadoop/sqoop/ConnFactory.java
+++ b/src/java/org/apache/hadoop/sqoop/ConnFactory.java
@@ -37,7 +37,8 @@
*
* This class delegates the actual responsibility for instantiating
* ConnManagers to one or more instances of ManagerFactory. ManagerFactories
- * are consulted in the order specified in sqoop-site.xml (sqoop.connection.factories).
+ * are consulted in the order specified in sqoop-site.xml
+ * (sqoop.connection.factories).
*/
public class ConnFactory {
@@ -51,10 +52,13 @@ public ConnFactory(Configuration conf) {
/** The sqoop-site.xml configuration property used to set the list of
* available ManagerFactories.
*/
- public final static String FACTORY_CLASS_NAMES_KEY = "sqoop.connection.factories";
+ public static final String FACTORY_CLASS_NAMES_KEY =
+ "sqoop.connection.factories";
- // The default value for sqoop.connection.factories is the name of the DefaultManagerFactory.
- final static String DEFAULT_FACTORY_CLASS_NAMES = DefaultManagerFactory.class.getName();
+ // The default value for sqoop.connection.factories is the
+ // name of the DefaultManagerFactory.
+ static final String DEFAULT_FACTORY_CLASS_NAMES =
+ DefaultManagerFactory.class.getName();
/** The list of ManagerFactory instances consulted by getManager().
*/
@@ -76,7 +80,8 @@ private void instantiateFactories(Configuration conf) {
LOG.debug("Loaded manager factory: " + className);
factories.add(factory);
} catch (ClassNotFoundException cnfe) {
- LOG.error("Could not load ManagerFactory " + className + " (not found)");
+ LOG.error("Could not load ManagerFactory " + className
+ + " (not found)");
}
}
}
@@ -98,7 +103,8 @@ public ConnManager getManager(SqoopOptions opts) throws IOException {
}
}
- throw new IOException("No manager for connect string: " + opts.getConnectString());
+ throw new IOException("No manager for connect string: "
+ + opts.getConnectString());
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/Sqoop.java b/src/java/org/apache/hadoop/sqoop/Sqoop.java
index d9a4a8a3..653a253d 100644
--- a/src/java/org/apache/hadoop/sqoop/Sqoop.java
+++ b/src/java/org/apache/hadoop/sqoop/Sqoop.java
@@ -18,11 +18,7 @@
package org.apache.hadoop.sqoop;
-import java.io.IOException;
-import java.sql.SQLException;
import java.util.Arrays;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.commons.cli.ParseException;
import org.apache.commons.logging.Log;
@@ -32,18 +28,8 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.sqoop.cli.ToolOptions;
-import org.apache.hadoop.sqoop.hive.HiveImport;
-import org.apache.hadoop.sqoop.manager.ConnManager;
-import org.apache.hadoop.sqoop.manager.ExportJobContext;
-import org.apache.hadoop.sqoop.manager.ImportJobContext;
-import org.apache.hadoop.sqoop.orm.ClassWriter;
-import org.apache.hadoop.sqoop.orm.CompilationManager;
-import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.tool.HelpTool;
import org.apache.hadoop.sqoop.tool.SqoopTool;
-import org.apache.hadoop.sqoop.util.ExportException;
-import org.apache.hadoop.sqoop.util.ImportException;
/**
* Main entry-point for Sqoop
@@ -201,7 +187,8 @@ public static int runTool(String [] args) {
String toolName = args[0];
SqoopTool tool = SqoopTool.getTool(toolName);
if (null == tool) {
- System.err.println("No such sqoop tool: " + toolName + ". See 'sqoop help'.");
+ System.err.println("No such sqoop tool: " + toolName
+ + ". See 'sqoop help'.");
return 1;
}
@@ -219,3 +206,4 @@ public static void main(String [] args) {
System.exit(ret);
}
}
+
diff --git a/src/java/org/apache/hadoop/sqoop/SqoopOptions.java b/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
index 4441a9f7..1aaf7b2b 100644
--- a/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
+++ b/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
@@ -37,14 +37,14 @@
import org.apache.log4j.Logger;
/**
- * Command-line arguments used by Sqoop
+ * Command-line arguments used by Sqoop.
*/
public class SqoopOptions {
public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
/**
- * Thrown when invalid cmdline options are given
+ * Thrown when invalid cmdline options are given.
*/
@SuppressWarnings("serial")
public static class InvalidOptionsException extends Exception {
@@ -64,7 +64,7 @@ public String toString() {
}
}
- // selects in-HDFS destination file format
+ /** Selects in-HDFS destination file format. */
public enum FileLayout {
TextFile,
SequenceFile
@@ -98,18 +98,27 @@ public enum FileLayout {
private boolean overwriteHiveTable;
private String hiveTableName;
private String packageName; // package to prepend to auto-named classes.
- private String className; // package+class to apply to individual table import.
- // also used as an *input* class with existingJarFile.
- private String existingJarFile; // Name of a jar containing existing table definition
- // class to use.
+
+ // package+class to apply to individual table import.
+ // also used as an *input* class with existingJarFile.
+ private String className;
+
+ // Name of a jar containing existing table definition
+ // class to use.
+ private String existingJarFile;
+
private int numMappers;
private boolean useCompression;
- private long directSplitSize; // In direct mode, open a new stream every X bytes.
- private long maxInlineLobSize; // Max size of an inline LOB; larger LOBs are written
- // to external files on disk.
+ // In direct mode, open a new stream every X bytes.
+ private long directSplitSize;
- private String exportDir; // HDFS path to read from when performing an export
+ // Max size of an inline LOB; larger LOBs are written
+ // to external files on disk.
+ private long maxInlineLobSize;
+
+ // HDFS path to read from when performing an export
+ private String exportDir;
private char inputFieldDelim;
private char inputRecordDelim;
@@ -142,7 +151,7 @@ public SqoopOptions(Configuration conf) {
}
/**
- * Alternate SqoopOptions interface used mostly for unit testing
+ * Alternate SqoopOptions interface used mostly for unit testing.
* @param connect JDBC connect string to use
* @param table Table to read
*/
@@ -153,19 +162,22 @@ public SqoopOptions(final String connect, final String table) {
this.tableName = table;
}
- private boolean getBooleanProperty(Properties props, String propName, boolean defaultValue) {
+ private boolean getBooleanProperty(Properties props, String propName,
+ boolean defaultValue) {
String str = props.getProperty(propName,
Boolean.toString(defaultValue)).toLowerCase();
return "true".equals(str) || "yes".equals(str) || "1".equals(str);
}
- private long getLongProperty(Properties props, String propName, long defaultValue) {
+ private long getLongProperty(Properties props, String propName,
+ long defaultValue) {
String str = props.getProperty(propName,
Long.toString(defaultValue)).toLowerCase();
try {
return Long.parseLong(str);
} catch (NumberFormatException nfe) {
- LOG.warn("Could not parse integer value for config parameter " + propName);
+ LOG.warn("Could not parse integer value for config parameter "
+ + propName);
return defaultValue;
}
}
@@ -189,31 +201,40 @@ private void loadFromProperties() {
this.username = props.getProperty("db.username", this.username);
this.password = props.getProperty("db.password", this.password);
this.tableName = props.getProperty("db.table", this.tableName);
- this.connectString = props.getProperty("db.connect.url", this.connectString);
+ this.connectString = props.getProperty("db.connect.url",
+ this.connectString);
this.splitByCol = props.getProperty("db.split.column", this.splitByCol);
this.whereClause = props.getProperty("db.where.clause", this.whereClause);
- this.driverClassName = props.getProperty("jdbc.driver", this.driverClassName);
- this.warehouseDir = props.getProperty("hdfs.warehouse.dir", this.warehouseDir);
+ this.driverClassName = props.getProperty("jdbc.driver",
+ this.driverClassName);
+ this.warehouseDir = props.getProperty("hdfs.warehouse.dir",
+ this.warehouseDir);
this.hiveHome = props.getProperty("hive.home", this.hiveHome);
this.className = props.getProperty("java.classname", this.className);
- this.packageName = props.getProperty("java.packagename", this.packageName);
- this.existingJarFile = props.getProperty("java.jar.file", this.existingJarFile);
+ this.packageName = props.getProperty("java.packagename",
+ this.packageName);
+ this.existingJarFile = props.getProperty("java.jar.file",
+ this.existingJarFile);
this.exportDir = props.getProperty("export.dir", this.exportDir);
this.direct = getBooleanProperty(props, "direct.import", this.direct);
- this.hiveImport = getBooleanProperty(props, "hive.import", this.hiveImport);
- this.overwriteHiveTable = getBooleanProperty(props, "hive.overwrite.table", this.overwriteHiveTable);
- this.useCompression = getBooleanProperty(props, "compression", this.useCompression);
+ this.hiveImport = getBooleanProperty(props, "hive.import",
+ this.hiveImport);
+ this.overwriteHiveTable = getBooleanProperty(props,
+ "hive.overwrite.table", this.overwriteHiveTable);
+ this.useCompression = getBooleanProperty(props, "compression",
+ this.useCompression);
this.directSplitSize = getLongProperty(props, "direct.split.size",
this.directSplitSize);
} catch (IOException ioe) {
- LOG.error("Could not read properties file " + DEFAULT_CONFIG_FILE + ": " + ioe.toString());
+ LOG.error("Could not read properties file " + DEFAULT_CONFIG_FILE + ": "
+ + ioe.toString());
} finally {
if (null != istream) {
try {
istream.close();
} catch (IOException ioe) {
- // ignore this; we're closing.
+ // Ignore this; we're closing.
}
}
}
@@ -221,7 +242,7 @@ private void loadFromProperties() {
/**
* @return the temp directory to use; this is guaranteed to end with
- * the file separator character (e.g., '/')
+ * the file separator character (e.g., '/').
*/
public String getTempDir() {
return this.tmpDir;
@@ -280,17 +301,19 @@ private void initDefaults(Configuration baseConfiguration) {
}
/**
- * Given a string containing a single character or an escape sequence representing
- * a char, return that char itself.
+ * Given a string containing a single character or an escape sequence
+ * representing a char, return that char itself.
*
* Normal literal characters return themselves: "x" -> 'x', etc.
- * Strings containing a '\' followed by one of t, r, n, or b escape to the usual
- * character as seen in Java: "\n" -> (newline), etc.
+ * Strings containing a '\' followed by one of t, r, n, or b escape to the
+ * usual character as seen in Java: "\n" -> (newline), etc.
*
- * Strings like "\0ooo" return the character specified by the octal sequence 'ooo'
- * Strings like "\0xhhh" or "\0Xhhh" return the character specified by the hex sequence 'hhh'
+ * Strings like "\0ooo" return the character specified by the octal sequence
+ * 'ooo'. Strings like "\0xhhh" or "\0Xhhh" return the character specified by
+ * the hex sequence 'hhh'.
*
- * If the input string contains leading or trailing spaces, these are ignored.
+ * If the input string contains leading or trailing spaces, these are
+ * ignored.
*/
public static char toChar(String charish) throws InvalidOptionsException {
if (null == charish || charish.length() == 0) {
@@ -300,8 +323,9 @@ public static char toChar(String charish) throws InvalidOptionsException {
if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
if (charish.length() == 3) {
- throw new InvalidOptionsException("Base-16 value expected for character argument."
- + "\nTry --help for usage instructions.");
+ throw new InvalidOptionsException(
+ "Base-16 value expected for character argument."
+ + "\nTry --help for usage instructions.");
} else {
String valStr = charish.substring(3);
int val = Integer.parseInt(valStr, 16);
@@ -323,7 +347,8 @@ public static char toChar(String charish) throws InvalidOptionsException {
return '\\';
} else if (charish.length() > 2) {
// we don't have any 3+ char escape strings.
- throw new InvalidOptionsException("Cannot understand character argument: " + charish
+ throw new InvalidOptionsException(
+ "Cannot understand character argument: " + charish
+ "\nTry --help for usage instructions.");
} else {
// this is some sort of normal 1-character escape sequence.
@@ -344,7 +369,8 @@ public static char toChar(String charish) throws InvalidOptionsException {
case '\\':
return '\\';
default:
- throw new InvalidOptionsException("Cannot understand character argument: " + charish
+ throw new InvalidOptionsException(
+ "Cannot understand character argument: " + charish
+ "\nTry --help for usage instructions.");
}
}
@@ -359,8 +385,9 @@ public static char toChar(String charish) throws InvalidOptionsException {
}
}
- /** get the temporary directory; guaranteed to end in File.separator
- * (e.g., '/')
+ /**
+ * Get the temporary directory; guaranteed to end in File.separator
+ * (e.g., '/').
*/
public String getTmpDir() {
return tmpDir;
@@ -447,7 +474,8 @@ public String getPassword() {
}
/**
- * Allow the user to enter his password on the console without printing characters.
+ * Allow the user to enter his password on the console without printing
+ * characters.
* @return the password as a string
*/
private String securePasswordEntry() {
@@ -475,7 +503,7 @@ public void setDirectMode(boolean isDirect) {
}
/**
- * @return the number of map tasks to use for import
+ * @return the number of map tasks to use for import.
*/
public int getNumMappers() {
return this.numMappers;
@@ -486,7 +514,7 @@ public void setNumMappers(int numMappers) {
}
/**
- * @return the user-specified absolute class name for the table
+ * @return the user-specified absolute class name for the table.
*/
public String getClassName() {
return className;
@@ -497,7 +525,8 @@ public void setClassName(String className) {
}
/**
- * @return the user-specified package to prepend to table names via --package-name.
+ * @return the user-specified package to prepend to table names via
+ * --package-name.
*/
public String getPackageName() {
return packageName;
@@ -515,7 +544,7 @@ public void setHiveHome(String hiveHome) {
this.hiveHome = hiveHome;
}
- /** @return true if we should import the table into Hive */
+ /** @return true if we should import the table into Hive. */
public boolean doHiveImport() {
return hiveImport;
}
@@ -525,7 +554,7 @@ public void setHiveImport(boolean hiveImport) {
}
/**
- * @return the user-specified option to overwrite existing table in hive
+ * @return the user-specified option to overwrite existing table in hive.
*/
public boolean doOverwriteHiveTable() {
return overwriteHiveTable;
@@ -536,7 +565,7 @@ public void setOverwriteHiveTable(boolean overwrite) {
}
/**
- * @return location where .java files go; guaranteed to end with '/'
+ * @return location where .java files go; guaranteed to end with '/'.
*/
public String getCodeOutputDir() {
if (codeOutputDir.endsWith(File.separator)) {
@@ -551,7 +580,8 @@ public void setCodeOutputDir(String outputDir) {
}
/**
- * @return location where .jar and .class files go; guaranteed to end with '/'
+ * @return location where .jar and .class files go; guaranteed to end with
+ * '/'.
*/
public String getJarOutputDir() {
if (jarOutputDir.endsWith(File.separator)) {
@@ -566,7 +596,7 @@ public void setJarOutputDir(String outDir) {
}
/**
- * Return the value of $HADOOP_HOME
+ * Return the value of $HADOOP_HOME.
* @return $HADOOP_HOME, or null if it's not set.
*/
public String getHadoopHome() {
@@ -589,7 +619,7 @@ public void setDebugSqlCmd(String sqlStatement) {
}
/**
- * @return The JDBC driver class name specified with --driver
+ * @return The JDBC driver class name specified with --driver.
*/
public String getDriverClassName() {
return driverClassName;
@@ -622,8 +652,8 @@ public void setFileLayout(FileLayout layout) {
}
/**
- * @return the field delimiter to use when parsing lines. Defaults to the field delim
- * to use when printing lines
+ * @return the field delimiter to use when parsing lines. Defaults to the
+ * field delim to use when printing lines.
*/
public char getInputFieldDelim() {
if (inputFieldDelim == '\000') {
@@ -638,8 +668,8 @@ public void setInputFieldsTerminatedBy(char c) {
}
/**
- * @return the record delimiter to use when parsing lines. Defaults to the record delim
- * to use when printing lines.
+ * @return the record delimiter to use when parsing lines. Defaults to the
+ * record delim to use when printing lines.
*/
public char getInputRecordDelim() {
if (inputRecordDelim == '\000') {
@@ -654,8 +684,8 @@ public void setInputLinesTerminatedBy(char c) {
}
/**
- * @return the character that may enclose fields when parsing lines. Defaults to the
- * enclosing-char to use when printing lines.
+ * @return the character that may enclose fields when parsing lines.
+ * Defaults to the enclosing-char to use when printing lines.
*/
public char getInputEnclosedBy() {
if (inputEnclosedBy == '\000') {
@@ -670,8 +700,8 @@ public void setInputEnclosedBy(char c) {
}
/**
- * @return the escape character to use when parsing lines. Defaults to the escape
- * character used when printing lines.
+ * @return the escape character to use when parsing lines. Defaults to the
+ * escape character used when printing lines.
*/
public char getInputEscapedBy() {
if (inputEscapedBy == '\000') {
@@ -686,8 +716,9 @@ public void setInputEscapedBy(char c) {
}
/**
- * @return true if fields must be enclosed by the --enclosed-by character when parsing.
- * Defaults to false. Set true when --input-enclosed-by is used.
+ * @return true if fields must be enclosed by the --enclosed-by character
+ * when parsing. Defaults to false. Set true when --input-enclosed-by is
+ * used.
*/
public boolean isInputEncloseRequired() {
if (inputEnclosedBy == '\000') {
@@ -702,7 +733,8 @@ public void setInputEncloseRequired(boolean required) {
}
/**
- * @return the character to print between fields when importing them to text.
+ * @return the character to print between fields when importing them to
+ * text.
*/
public char getOutputFieldDelim() {
return this.outputFieldDelim;
@@ -714,7 +746,8 @@ public void setFieldsTerminatedBy(char c) {
/**
- * @return the character to print between records when importing them to text.
+ * @return the character to print between records when importing them to
+ * text.
*/
public char getOutputRecordDelim() {
return this.outputRecordDelim;
@@ -725,7 +758,8 @@ public void setLinesTerminatedBy(char c) {
}
/**
- * @return a character which may enclose the contents of fields when imported to text.
+ * @return a character which may enclose the contents of fields when
+ * imported to text.
*/
public char getOutputEnclosedBy() {
return this.outputEnclosedBy;
@@ -736,7 +770,8 @@ public void setEnclosedBy(char c) {
}
/**
- * @return a character which signifies an escape sequence when importing to text.
+ * @return a character which signifies an escape sequence when importing to
+ * text.
*/
public char getOutputEscapedBy() {
return this.outputEscapedBy;
@@ -747,8 +782,9 @@ public void setEscapedBy(char c) {
}
/**
- * @return true if fields imported to text must be enclosed by the EnclosedBy char.
- * default is false; set to true if --enclosed-by is used instead of --optionally-enclosed-by.
+ * @return true if fields imported to text must be enclosed by the
+ * EnclosedBy char. default is false; set to true if --enclosed-by is used
+ * instead of --optionally-enclosed-by.
*/
public boolean isOutputEncloseRequired() {
return this.outputMustBeEnclosed;
@@ -770,7 +806,7 @@ public void setUseCompression(boolean useCompression) {
}
/**
- * @return the name of the destination table when importing to Hive
+ * @return the name of the destination table when importing to Hive.
*/
public String getHiveTableName() {
if (null != this.hiveTableName) {
@@ -829,7 +865,7 @@ public void setConf(Configuration config) {
}
/**
- * @return command-line arguments after a '-'
+ * @return command-line arguments after a '-'.
*/
public String [] getExtraArgs() {
if (extraArgs == null) {
@@ -855,3 +891,4 @@ public void setExtraArgs(String [] args) {
}
}
}
+
diff --git a/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java b/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java
index 1810476f..670b48ed 100644
--- a/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java
+++ b/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java
@@ -51,7 +51,7 @@ public class SqoopParser extends GnuParser {
// this Sqoop class, we cannot see their package-specific methods.
// So we just call it by reflection. As long as we're at it, this
// allows us to also put SqoopParser in its own package.
- static java.lang.reflect.Method addValForProcessing;
+ private static java.lang.reflect.Method addValForProcessing;
static {
try {
diff --git a/src/java/org/apache/hadoop/sqoop/cli/ToolOptions.java b/src/java/org/apache/hadoop/sqoop/cli/ToolOptions.java
index ee558a65..d09bf82b 100644
--- a/src/java/org/apache/hadoop/sqoop/cli/ToolOptions.java
+++ b/src/java/org/apache/hadoop/sqoop/cli/ToolOptions.java
@@ -114,7 +114,7 @@ public void printHelp() {
}
/**
- * Print the help to the console using the specified help formatter
+ * Print the help to the console using the specified help formatter.
* @param formatter the HelpFormatter to use.
*/
public void printHelp(HelpFormatter formatter) {
diff --git a/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java b/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java
index 2d52ce93..068f45b1 100644
--- a/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java
+++ b/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java
@@ -86,7 +86,7 @@ private String getHiveBinPath() {
/**
* If we used a MapReduce-based upload of the data, remove the _logs dir
- * from where we put it, before running Hive LOAD DATA INPATH
+ * from where we put it, before running Hive LOAD DATA INPATH.
*/
private void removeTempLogs(String tableName) throws IOException {
FileSystem fs = FileSystem.get(configuration);
@@ -102,7 +102,8 @@ private void removeTempLogs(String tableName) throws IOException {
if (fs.exists(logsPath)) {
LOG.info("Removing temporary files from import process: " + logsPath);
if (!fs.delete(logsPath, true)) {
- LOG.warn("Could not delete temporary files; continuing with import, but it may fail.");
+ LOG.warn("Could not delete temporary files; "
+ + "continuing with import, but it may fail.");
}
}
}
@@ -124,7 +125,7 @@ private boolean isGenerateOnly() {
*/
private File getScriptFile(String outputTableName) throws IOException {
if (!isGenerateOnly()) {
- return File.createTempFile("hive-script-",".txt",
+ return File.createTempFile("hive-script-", ".txt",
new File(options.getTempDir()));
} else {
return new File(new File(options.getCodeOutputDir()),
@@ -194,7 +195,8 @@ public void importTable(String inputTableName, String outputTableName,
try {
w.close();
} catch (IOException ioe) {
- LOG.warn("IOException closing stream to Hive script: " + ioe.toString());
+ LOG.warn("IOException closing stream to Hive script: "
+ + ioe.toString());
}
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/hive/HiveTypes.java b/src/java/org/apache/hadoop/sqoop/hive/HiveTypes.java
index 3cbe8a03..3a58f150 100644
--- a/src/java/org/apache/hadoop/sqoop/hive/HiveTypes.java
+++ b/src/java/org/apache/hadoop/sqoop/hive/HiveTypes.java
@@ -26,10 +26,13 @@
/**
* Defines conversion between SQL types and Hive types.
*/
-public class HiveTypes {
+public final class HiveTypes {
public static final Log LOG = LogFactory.getLog(HiveTypes.class.getName());
+ private HiveTypes() {
+ }
+
/**
* Given JDBC SQL types coming from another database, what is the best
* mapping to a Hive-specific type?
diff --git a/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java b/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
index dff51a90..b330797c 100644
--- a/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
+++ b/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
@@ -45,7 +45,8 @@
*/
public class TableDefWriter {
- public static final Log LOG = LogFactory.getLog(TableDefWriter.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TableDefWriter.class.getName());
private SqoopOptions options;
private ConnManager connManager;
@@ -139,13 +140,15 @@ public String getCreateTableStmt() throws IOException {
Integer colType = columnTypes.get(col);
String hiveColType = connManager.toHiveType(colType);
if (null == hiveColType) {
- throw new IOException("Hive does not support the SQL type for column " + col);
+ throw new IOException("Hive does not support the SQL type for column "
+ + col);
}
sb.append(col + " " + hiveColType);
if (HiveTypes.isHiveTypeImprovised(colType)) {
- LOG.warn("Column " + col + " had to be cast to a less precise type in Hive");
+ LOG.warn(
+ "Column " + col + " had to be cast to a less precise type in Hive");
}
}
@@ -171,7 +174,7 @@ public String getCreateTableStmt() throws IOException {
org.apache.hadoop.hdfs.server.namenode.NameNode.DEFAULT_PORT;
/**
- * @return the LOAD DATA statement to import the data in HDFS into hive
+ * @return the LOAD DATA statement to import the data in HDFS into hive.
*/
public String getLoadDataStmt() throws IOException {
String warehouseDir = options.getWarehouseDir();
@@ -207,10 +210,9 @@ public String getLoadDataStmt() throws IOException {
* @param charNum the character to use as a delimiter
* @return a string of the form "\ooo" where ooo is an octal number
* in [000, 177].
- * @throws IllegalArgumentException if charNum >> 0177.
+ * @throws IllegalArgumentException if charNum > 0177.
*/
- static String getHiveOctalCharCode(int charNum)
- throws IllegalArgumentException {
+ static String getHiveOctalCharCode(int charNum) {
if (charNum > 0177) {
throw new IllegalArgumentException(
"Character " + charNum + " is an out-of-range delimiter");
diff --git a/src/java/org/apache/hadoop/sqoop/io/FixedLengthInputStream.java b/src/java/org/apache/hadoop/sqoop/io/FixedLengthInputStream.java
index d3a9828c..a4bee680 100644
--- a/src/java/org/apache/hadoop/sqoop/io/FixedLengthInputStream.java
+++ b/src/java/org/apache/hadoop/sqoop/io/FixedLengthInputStream.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.sqoop.io;
-import java.io.FilterInputStream;
import java.io.InputStream;
import java.io.IOException;
diff --git a/src/java/org/apache/hadoop/sqoop/io/LobFile.java b/src/java/org/apache/hadoop/sqoop/io/LobFile.java
index dd163b12..6804f145 100644
--- a/src/java/org/apache/hadoop/sqoop/io/LobFile.java
+++ b/src/java/org/apache/hadoop/sqoop/io/LobFile.java
@@ -76,7 +76,10 @@
* The LobFile format is specified at:
* http://wiki.github.com/cloudera/sqoop/sip-3
*/
-public class LobFile {
+public final class LobFile {
+
+ private LobFile() {
+ }
public static final Log LOG = LogFactory.getLog(LobFile.class.getName());
@@ -716,7 +719,7 @@ public Iterator iterator() {
/**
* Class that writes out a LobFile. Instantiate via LobFile.create().
*/
- public static abstract class Writer implements Closeable {
+ public abstract static class Writer implements Closeable {
/**
* If this Writer is writing to a physical LobFile, then this returns
@@ -733,6 +736,7 @@ public static abstract class Writer implements Closeable {
@Override
protected synchronized void finalize() throws Throwable {
close();
+ super.finalize();
}
/**
@@ -759,7 +763,7 @@ public abstract java.io.Writer writeClobRecord(long len)
throws IOException;
/**
- * Report the current position in the output file
+ * Report the current position in the output file.
* @return the number of bytes written through this Writer.
*/
public abstract long tell() throws IOException;
@@ -795,7 +799,8 @@ private static class V0Writer extends Writer {
// The LobIndex we are constructing.
private LinkedList indexSegments;
- private int entriesInSegment; // number of entries in the current IndexSegment.
+ // Number of entries in the current IndexSegment.
+ private int entriesInSegment;
private IndexTable indexTable;
// Number of entries that can be written to a single IndexSegment.
@@ -1078,7 +1083,7 @@ public java.io.Writer writeClobRecord(long len) throws IOException {
/**
* Class that can read a LobFile. Create with LobFile.open().
*/
- public static abstract class Reader implements Closeable {
+ public abstract static class Reader implements Closeable {
/**
* If this Reader is reading from a physical LobFile, then this returns
* the file path it is reading from. Otherwise it returns null.
@@ -1087,7 +1092,7 @@ public static abstract class Reader implements Closeable {
public abstract Path getPath();
/**
- * Report the current position in the file
+ * Report the current position in the file.
* @return the current offset from the start of the file in bytes.
*/
public abstract long tell() throws IOException;
@@ -1179,6 +1184,7 @@ protected void checkForNull(InputStream in) throws IOException {
@Override
protected synchronized void finalize() throws Throwable {
close();
+ super.finalize();
}
}
@@ -1449,17 +1455,20 @@ private int findRecordStartMark(byte [] buf) {
return -1; // couldn't find it.
}
+ @Override
/** {@inheritDoc} */
public Path getPath() {
return this.path;
}
+ @Override
/** {@inheritDoc} */
public long tell() throws IOException {
checkForNull(this.underlyingInput);
return this.underlyingInput.getPos();
}
+ @Override
/** {@inheritDoc} */
public void seek(long pos) throws IOException {
closeUserStream();
@@ -1576,6 +1585,7 @@ private void closeUserStream() throws IOException {
}
}
+ @Override
/** {@inheritDoc} */
public boolean next() throws IOException {
LOG.debug("Checking for next record");
@@ -1646,26 +1656,31 @@ public boolean next() throws IOException {
return true;
}
+ @Override
/** {@inheritDoc} */
public boolean isRecordAvailable() {
return this.isAligned;
}
+ @Override
/** {@inheritDoc} */
public long getRecordLen() {
return this.claimedRecordLen;
}
+ @Override
/** {@inheritDoc} */
public long getRecordId() {
return this.curEntryId;
}
+ @Override
/** {@inheritDoc} */
public long getRecordOffset() {
return this.curRecordOffset;
}
+ @Override
/** {@inheritDoc} */
public InputStream readBlobRecord() throws IOException {
if (!isRecordAvailable()) {
@@ -1700,6 +1715,7 @@ public InputStream readBlobRecord() throws IOException {
return this.userInputStream;
}
+ @Override
/** {@inheritDoc} */
public java.io.Reader readClobRecord() throws IOException {
// Get a handle to the binary reader and then wrap it.
@@ -1707,6 +1723,7 @@ public java.io.Reader readClobRecord() throws IOException {
return new InputStreamReader(is);
}
+ @Override
/** {@inheritDoc} */
public void close() throws IOException {
closeUserStream();
@@ -1724,6 +1741,7 @@ public void close() throws IOException {
this.isAligned = false;
}
+ @Override
/** {@inheritDoc} */
public boolean isClosed() {
return this.underlyingInput == null;
diff --git a/src/java/org/apache/hadoop/sqoop/io/SplittableBufferedWriter.java b/src/java/org/apache/hadoop/sqoop/io/SplittableBufferedWriter.java
index bf55357e..7926e3eb 100644
--- a/src/java/org/apache/hadoop/sqoop/io/SplittableBufferedWriter.java
+++ b/src/java/org/apache/hadoop/sqoop/io/SplittableBufferedWriter.java
@@ -19,10 +19,8 @@
package org.apache.hadoop.sqoop.io;
import java.io.BufferedWriter;
-import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.IOException;
-import java.util.Formatter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -48,7 +46,7 @@ public SplittableBufferedWriter(
this.alwaysFlush = false;
}
- /** For testing */
+ /** For testing. */
SplittableBufferedWriter(final SplittingOutputStream splitOutputStream,
final boolean alwaysFlush) {
super(new OutputStreamWriter(splitOutputStream));
diff --git a/src/java/org/apache/hadoop/sqoop/lib/BigDecimalSerializer.java b/src/java/org/apache/hadoop/sqoop/lib/BigDecimalSerializer.java
index e75f235b..7c2b9337 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/BigDecimalSerializer.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/BigDecimalSerializer.java
@@ -35,7 +35,7 @@
* We serialize in one of two formats:
*
* First, check whether the BigInt can fit in a long:
- * boolean b = BigIntegerPart > LONG_MAX || BigIntegerPart < LONG_MIN
+ * boolean b = BigIntegerPart > LONG_MAX || BigIntegerPart < LONG_MIN
*
* [int: scale][boolean: b == false][long: BigInt-part]
* [int: scale][boolean: b == true][string: BigInt-part.toString()]
@@ -46,8 +46,10 @@ public final class BigDecimalSerializer {
private BigDecimalSerializer() { }
- static final BigInteger LONG_MAX_AS_BIGINT = BigInteger.valueOf(Long.MAX_VALUE);
- static final BigInteger LONG_MIN_AS_BIGINT = BigInteger.valueOf(Long.MIN_VALUE);
+ static final BigInteger LONG_MAX_AS_BIGINT =
+ BigInteger.valueOf(Long.MAX_VALUE);
+ static final BigInteger LONG_MIN_AS_BIGINT =
+ BigInteger.valueOf(Long.MIN_VALUE);
public static void write(BigDecimal d, DataOutput out) throws IOException {
int scale = d.scale();
diff --git a/src/java/org/apache/hadoop/sqoop/lib/BlobRef.java b/src/java/org/apache/hadoop/sqoop/lib/BlobRef.java
index 43a250d1..92aef56b 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/BlobRef.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/BlobRef.java
@@ -25,19 +25,9 @@
import java.io.InputStream;
import java.util.Arrays;
import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.sqoop.io.LobFile;
-import org.apache.hadoop.sqoop.io.LobReaderCache;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -85,7 +75,7 @@ protected InputStream getInternalSource(BytesWritable data) {
}
@Override
- protected BytesWritable deepCopyData() {
+ protected BytesWritable deepCopyData(BytesWritable data) {
return new BytesWritable(Arrays.copyOf(data.getBytes(), data.getLength()));
}
@@ -94,15 +84,18 @@ public void readFieldsInternal(DataInput in) throws IOException {
// For internally-stored BLOBs, the data is a BytesWritable
// containing the actual data.
- if (null == this.data) {
- this.data = new BytesWritable();
+ BytesWritable data = getDataObj();
+
+ if (null == data) {
+ data = new BytesWritable();
}
- this.data.readFields(in);
+ data.readFields(in);
+ setDataObj(data);
}
@Override
public void writeInternal(DataOutput out) throws IOException {
- data.write(out);
+ getDataObj().write(out);
}
/**
diff --git a/src/java/org/apache/hadoop/sqoop/lib/ClobRef.java b/src/java/org/apache/hadoop/sqoop/lib/ClobRef.java
index 165c20c1..d60fd563 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/ClobRef.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/ClobRef.java
@@ -21,20 +21,11 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.sqoop.io.LobFile;
/**
@@ -73,7 +64,7 @@ protected Reader getInternalSource(String data) {
}
@Override
- protected String deepCopyData() {
+ protected String deepCopyData(String data) {
return data;
}
@@ -85,12 +76,12 @@ protected String getInternalData(String data) {
@Override
public void readFieldsInternal(DataInput in) throws IOException {
// For internally-stored clobs, the data is written as UTF8 Text.
- this.data = Text.readString(in);
+ setDataObj(Text.readString(in));
}
@Override
public void writeInternal(DataOutput out) throws IOException {
- Text.writeString(out, data);
+ Text.writeString(out, getDataObj());
}
/**
diff --git a/src/java/org/apache/hadoop/sqoop/lib/FieldFormatter.java b/src/java/org/apache/hadoop/sqoop/lib/FieldFormatter.java
index bfd14f4c..b5d7f770 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/FieldFormatter.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/FieldFormatter.java
@@ -28,31 +28,35 @@ private FieldFormatter() { }
/**
* Takes an input string representing the value of a field, encloses it in
- * enclosing chars, and escapes any occurrences of such characters in the middle.
- * The escape character itself is also escaped if it appears in the text of the
- * field.
+ * enclosing chars, and escapes any occurrences of such characters in the
+ * middle. The escape character itself is also escaped if it appears in the
+ * text of the field.
*
* The field is enclosed only if:
* enclose != '\000', and:
* encloseRequired is true, or
- * one of the characters in the mustEscapeFor list is present in the string.
+ * one of the characters in the mustEscapeFor list is present
+ * in the string.
*
* Escaping is not performed if the escape char is '\000'.
*
* @param str - The user's string to escape and enclose
- * @param escape - What string to use as the escape sequence. If "" or null, then don't escape.
- * @param enclose - The string to use to enclose str e.g. "quoted". If "" or null, then don't
- * enclose.
- * @param mustEncloseFor - A list of characters; if one is present in 'str', then str must be
- * enclosed
- * @param encloseRequired - If true, then always enclose, regardless of mustEscapeFor
- * @return the escaped, enclosed version of 'str'
+ * @param escape - What string to use as the escape sequence. If "" or null,
+ * then don't escape.
+ * @param enclose - The string to use to enclose str e.g. "quoted". If "" or
+ * null, then don't enclose.
+ * @param mustEncloseFor - A list of characters; if one is present in 'str',
+ * then str must be enclosed.
+ * @param encloseRequired - If true, then always enclose, regardless of
+ * mustEscapeFor.
+ * @return the escaped, enclosed version of 'str'.
*/
- public static final String escapeAndEnclose(String str, String escape, String enclose,
- char [] mustEncloseFor, boolean encloseRequired) {
+ public static String escapeAndEnclose(String str, String escape,
+ String enclose, char [] mustEncloseFor, boolean encloseRequired) {
// true if we can use an escape character.
- boolean escapingLegal = (null != escape && escape.length() > 0 && !escape.equals("\000"));
+ boolean escapingLegal = (null != escape
+ && escape.length() > 0 && !escape.equals("\000"));
String withEscapes;
if (null == str) {
@@ -60,7 +64,7 @@ public static final String escapeAndEnclose(String str, String escape, String en
}
if (escapingLegal) {
- // escaping is legal. Escape any instances of the escape char itself
+ // escaping is legal. Escape any instances of the escape char itself.
withEscapes = str.replace(escape, escape + escape);
} else {
// no need to double-escape
@@ -68,12 +72,13 @@ public static final String escapeAndEnclose(String str, String escape, String en
}
if (null == enclose || enclose.length() == 0 || enclose.equals("\000")) {
- // The enclose-with character was left unset, so we can't enclose items. We're done.
+ // The enclose-with character was left unset, so we can't enclose items.
+ // We're done.
return withEscapes;
}
- // if we have an enclosing character, and escaping is legal, then the encloser must
- // always be escaped.
+ // if we have an enclosing character, and escaping is legal, then the
+ // encloser must always be escaped.
if (escapingLegal) {
withEscapes = withEscapes.replace(enclose, escape + enclose);
}
diff --git a/src/java/org/apache/hadoop/sqoop/lib/JdbcWritableBridge.java b/src/java/org/apache/hadoop/sqoop/lib/JdbcWritableBridge.java
index 8d94d59d..42441015 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/JdbcWritableBridge.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/JdbcWritableBridge.java
@@ -20,8 +20,6 @@
import org.apache.hadoop.io.BytesWritable;
import java.math.BigDecimal;
-import java.sql.Blob;
-import java.sql.Clob;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -37,14 +35,16 @@
*/
public final class JdbcWritableBridge {
- // Currently, cap BLOB/CLOB objects at 16 MB until we can use external storage.
- public final static long MAX_BLOB_LENGTH = 16 * 1024 * 1024;
- public final static long MAX_CLOB_LENGTH = 16 * 1024 * 1024;
+ // Currently, cap BLOB/CLOB objects at 16 MB until we can use external
+ // storage.
+ public static final long MAX_BLOB_LENGTH = 16 * 1024 * 1024;
+ public static final long MAX_CLOB_LENGTH = 16 * 1024 * 1024;
private JdbcWritableBridge() {
}
- public static Integer readInteger(int colNum, ResultSet r) throws SQLException {
+ public static Integer readInteger(int colNum, ResultSet r)
+ throws SQLException {
int val;
val = r.getInt(colNum);
if (r.wasNull()) {
@@ -88,7 +88,8 @@ public static Double readDouble(int colNum, ResultSet r) throws SQLException {
}
}
- public static Boolean readBoolean(int colNum, ResultSet r) throws SQLException {
+ public static Boolean readBoolean(int colNum, ResultSet r)
+ throws SQLException {
boolean val;
val = r.getBoolean(colNum);
if (r.wasNull()) {
@@ -102,7 +103,8 @@ public static Time readTime(int colNum, ResultSet r) throws SQLException {
return r.getTime(colNum);
}
- public static Timestamp readTimestamp(int colNum, ResultSet r) throws SQLException {
+ public static Timestamp readTimestamp(int colNum, ResultSet r)
+ throws SQLException {
return r.getTimestamp(colNum);
}
@@ -116,7 +118,8 @@ public static BytesWritable readBytesWritable(int colNum, ResultSet r)
return new BytesWritable(bytes);
}
- public static BigDecimal readBigDecimal(int colNum, ResultSet r) throws SQLException {
+ public static BigDecimal readBigDecimal(int colNum, ResultSet r)
+ throws SQLException {
return r.getBigDecimal(colNum);
}
@@ -132,8 +135,8 @@ public static ClobRef readClobRef(int colNum, ResultSet r)
return null;
}
- public static void writeInteger(Integer val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeInteger(Integer val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -141,8 +144,8 @@ public static void writeInteger(Integer val, int paramIdx, int sqlType, Prepared
}
}
- public static void writeLong(Long val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeLong(Long val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -150,8 +153,8 @@ public static void writeLong(Long val, int paramIdx, int sqlType, PreparedStatem
}
}
- public static void writeDouble(Double val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeDouble(Double val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -159,8 +162,8 @@ public static void writeDouble(Double val, int paramIdx, int sqlType, PreparedSt
}
}
- public static void writeBoolean(Boolean val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeBoolean(Boolean val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -168,8 +171,8 @@ public static void writeBoolean(Boolean val, int paramIdx, int sqlType, Prepared
}
}
- public static void writeFloat(Float val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeFloat(Float val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -177,8 +180,8 @@ public static void writeFloat(Float val, int paramIdx, int sqlType, PreparedStat
}
}
- public static void writeString(String val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeString(String val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -186,8 +189,8 @@ public static void writeString(String val, int paramIdx, int sqlType, PreparedSt
}
}
- public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -195,8 +198,8 @@ public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType, Prep
}
}
- public static void writeTime(Time val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeTime(Time val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -204,8 +207,8 @@ public static void writeTime(Time val, int paramIdx, int sqlType, PreparedStatem
}
}
- public static void writeDate(Date val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeDate(Date val, int paramIdx, int sqlType,
+ PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
@@ -228,8 +231,8 @@ public static void writeBytesWritable(BytesWritable val, int paramIdx,
}
- public static void writeBigDecimal(BigDecimal val, int paramIdx, int sqlType, PreparedStatement s)
- throws SQLException {
+ public static void writeBigDecimal(BigDecimal val, int paramIdx,
+ int sqlType, PreparedStatement s) throws SQLException {
if (null == val) {
s.setNull(paramIdx, sqlType);
} else {
diff --git a/src/java/org/apache/hadoop/sqoop/lib/LargeObjectLoader.java b/src/java/org/apache/hadoop/sqoop/lib/LargeObjectLoader.java
index a31fb0b3..6769d33b 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/LargeObjectLoader.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/LargeObjectLoader.java
@@ -18,30 +18,21 @@
package org.apache.hadoop.sqoop.lib;
-import java.io.BufferedOutputStream;
-import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
-import java.math.BigDecimal;
import java.sql.Blob;
import java.sql.Clob;
-import java.sql.Date;
-import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.sqoop.io.LobFile;
/**
@@ -57,9 +48,9 @@
public class LargeObjectLoader implements Closeable {
// Spill to external storage for BLOB/CLOB objects > 16 MB.
- public final static long DEFAULT_MAX_LOB_LENGTH = 16 * 1024 * 1024;
+ public static final long DEFAULT_MAX_LOB_LENGTH = 16 * 1024 * 1024;
- public final static String MAX_INLINE_LOB_LEN_KEY =
+ public static final String MAX_INLINE_LOB_LEN_KEY =
"sqoop.inline.lob.length.max";
private Configuration conf;
@@ -75,7 +66,7 @@ public class LargeObjectLoader implements Closeable {
private long nextLobFileId = 0;
/**
- * Create a new LargeObjectLoader
+ * Create a new LargeObjectLoader.
* @param conf the Configuration to use
* @param workPath the HDFS working directory for this task.
*/
@@ -91,6 +82,7 @@ public LargeObjectLoader(Configuration conf, Path workPath)
@Override
protected synchronized void finalize() throws Throwable {
close();
+ super.finalize();
}
@Override
diff --git a/src/java/org/apache/hadoop/sqoop/lib/LobRef.java b/src/java/org/apache/hadoop/sqoop/lib/LobRef.java
index 51f7b7bf..23b05d75 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/LobRef.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/LobRef.java
@@ -22,12 +22,10 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.io.InputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -57,7 +55,7 @@ protected LobRef() {
this.offset = 0;
this.length = 0;
- this.data = null;
+ this.realData = null;
}
protected LobRef(CONTAINERTYPE container) {
@@ -65,7 +63,7 @@ protected LobRef(CONTAINERTYPE container) {
this.offset = 0;
this.length = 0;
- this.data = container;
+ this.realData = container;
}
protected LobRef(String file, long offset, long length) {
@@ -73,11 +71,21 @@ protected LobRef(String file, long offset, long length) {
this.offset = offset;
this.length = length;
- this.data = null;
+ this.realData = null;
}
// If the data is 'small', it's held directly, here.
- protected CONTAINERTYPE data;
+ private CONTAINERTYPE realData;
+
+ /** Internal API to retrieve the data object. */
+ protected CONTAINERTYPE getDataObj() {
+ return realData;
+ }
+
+ /** Internal API to set the data object. */
+ protected void setDataObj(CONTAINERTYPE data) {
+ this.realData = data;
+ }
// If there data is too large to materialize fully, it's written into a file
// whose path (relative to the rest of the dataset) is recorded here. This
@@ -90,7 +98,7 @@ protected LobRef(String file, long offset, long length) {
private long length;
// If we've opened a LobFile object, track our reference to it here.
- protected LobFile.Reader reader;
+ private LobFile.Reader lobReader;
@Override
@SuppressWarnings("unchecked")
@@ -102,9 +110,9 @@ public Object clone() throws CloneNotSupportedException {
LobRef r =
(LobRef) super.clone();
- r.reader = null; // Reference to opened reader is not duplicated.
- if (null != data) {
- r.data = deepCopyData();
+ r.lobReader = null; // Reference to opened reader is not duplicated.
+ if (null != realData) {
+ r.realData = deepCopyData(realData);
}
return r;
@@ -113,12 +121,13 @@ public Object clone() throws CloneNotSupportedException {
@Override
protected synchronized void finalize() throws Throwable {
close();
+ super.finalize();
}
public void close() throws IOException {
// Discard any open LobReader.
- if (null != this.reader) {
- LobReaderCache.getCache().recycle(this.reader);
+ if (null != this.lobReader) {
+ LobReaderCache.getCache().recycle(this.lobReader);
}
}
@@ -142,7 +151,7 @@ public boolean isExternal() {
* @throws IOException if it could not read the LOB from external storage.
*/
public ACCESSORTYPE getDataStream(Mapper.Context mapContext)
- throws IllegalArgumentException, IOException {
+ throws IOException {
InputSplit split = mapContext.getInputSplit();
if (split instanceof FileSplit) {
Path basePath = ((FileSplit) split).getPath().getParent();
@@ -171,35 +180,35 @@ public ACCESSORTYPE getDataStream(Configuration conf, Path basePath)
Path pathToRead = LobReaderCache.qualify(
new Path(basePath, fileName), conf);
LOG.debug("Retreving data stream from external path: " + pathToRead);
- if (reader != null) {
+ if (lobReader != null) {
// We already have a reader open to a LobFile. Is it the correct file?
- if (!pathToRead.equals(reader.getPath())) {
- // No. Close this reader and get the correct one.
+ if (!pathToRead.equals(lobReader.getPath())) {
+ // No. Close this.lobReader and get the correct one.
LOG.debug("Releasing previous external reader for "
- + reader.getPath());
- LobReaderCache.getCache().recycle(reader);
- reader = LobReaderCache.getCache().get(pathToRead, conf);
+ + lobReader.getPath());
+ LobReaderCache.getCache().recycle(lobReader);
+ lobReader = LobReaderCache.getCache().get(pathToRead, conf);
}
} else {
- reader = LobReaderCache.getCache().get(pathToRead, conf);
+ lobReader = LobReaderCache.getCache().get(pathToRead, conf);
}
// We now have a LobFile.Reader associated with the correct file. Get to
// the correct offset and return an InputStream/Reader to the user.
- if (reader.tell() != offset) {
+ if (lobReader.tell() != offset) {
LOG.debug("Seeking to record start offset " + offset);
- reader.seek(offset);
+ lobReader.seek(offset);
}
- if (!reader.next()) {
+ if (!lobReader.next()) {
throw new IOException("Could not locate record at " + pathToRead
+ ":" + offset);
}
- return getExternalSource(reader);
+ return getExternalSource(lobReader);
} else {
// This data is already materialized in memory; wrap it and return.
- return getInternalSource(data);
+ return getInternalSource(realData);
}
}
@@ -223,7 +232,7 @@ protected abstract ACCESSORTYPE getExternalSource(LobFile.Reader reader)
/**
* Make a copy of the materialized data.
*/
- protected abstract CONTAINERTYPE deepCopyData();
+ protected abstract CONTAINERTYPE deepCopyData(CONTAINERTYPE data);
public DATATYPE getData() {
if (isExternal()) {
@@ -231,7 +240,7 @@ public DATATYPE getData() {
"External LOBs must be read via getDataStream()");
}
- return getInternalData(data);
+ return getInternalData(realData);
}
@Override
@@ -240,7 +249,7 @@ public String toString() {
return "externalLob(lf," + fileName + "," + Long.toString(offset)
+ "," + Long.toString(length) + ")";
} else {
- return data.toString();
+ return realData.toString();
}
}
@@ -260,7 +269,7 @@ public void readFields(DataInput in) throws IOException {
boolean isExternal = in.readBoolean();
if (isExternal) {
- this.data = null;
+ this.realData = null;
String storageType = Text.readString(in);
if (!storageType.equals("lf")) {
diff --git a/src/java/org/apache/hadoop/sqoop/lib/LobSerializer.java b/src/java/org/apache/hadoop/sqoop/lib/LobSerializer.java
index 527c5954..d38ff70d 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/LobSerializer.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/LobSerializer.java
@@ -29,11 +29,13 @@ public final class LobSerializer {
private LobSerializer() { }
- public static void writeClob(ClobRef clob, DataOutput out) throws IOException {
+ public static void writeClob(ClobRef clob, DataOutput out)
+ throws IOException {
clob.write(out);
}
- public static void writeBlob(BlobRef blob, DataOutput out) throws IOException {
+ public static void writeBlob(BlobRef blob, DataOutput out)
+ throws IOException {
blob.write(out);
}
diff --git a/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java b/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java
index d7baa6c6..fce0579b 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java
@@ -65,6 +65,9 @@ private enum ParseState {
UNENCLOSED_ESCAPE
}
+ /**
+ * An error thrown when parsing fails.
+ */
public static class ParseError extends Exception {
public ParseError() {
super("ParseError");
@@ -246,7 +249,8 @@ record sep halts processing.
sb.append(curChar);
if (this.enclosingRequired) {
- throw new ParseError("Opening field-encloser expected at position " + pos);
+ throw new ParseError(
+ "Opening field-encloser expected at position " + pos);
}
}
@@ -285,15 +289,15 @@ record sep halts processing.
break;
case ENCLOSED_ESCAPE:
- // Treat this character literally, whatever it is, and return to enclosed
- // field processing.
+ // Treat this character literally, whatever it is, and return to
+ // enclosed field processing.
sb.append(curChar);
state = ParseState.ENCLOSED_FIELD;
break;
case ENCLOSED_EXPECT_DELIMITER:
- // We were in an enclosed field, but got the final encloser. Now we expect
- // either an end-of-field or an end-of-record.
+ // We were in an enclosed field, but got the final encloser. Now we
+ // expect either an end-of-field or an end-of-record.
if (this.fieldDelim == curChar) {
// end of one field is the beginning of the next.
state = ParseState.FIELD_START;
@@ -308,8 +312,8 @@ record sep halts processing.
break;
case UNENCLOSED_ESCAPE:
- // Treat this character literally, whatever it is, and return to non-enclosed
- // field processing.
+ // Treat this character literally, whatever it is, and return to
+ // non-enclosed field processing.
sb.append(curChar);
state = ParseState.UNENCLOSED_FIELD;
break;
@@ -342,8 +346,8 @@ public boolean isEnclosingRequired() {
@Override
public String toString() {
- return "RecordParser[" + fieldDelim + ',' + recordDelim + ',' + enclosingChar + ','
- + escapeChar + ',' + enclosingRequired + "]";
+ return "RecordParser[" + fieldDelim + ',' + recordDelim + ','
+ + enclosingChar + ',' + escapeChar + ',' + enclosingRequired + "]";
}
@Override
diff --git a/src/java/org/apache/hadoop/sqoop/lib/SqoopRecord.java b/src/java/org/apache/hadoop/sqoop/lib/SqoopRecord.java
index a4cdf881..7611883c 100644
--- a/src/java/org/apache/hadoop/sqoop/lib/SqoopRecord.java
+++ b/src/java/org/apache/hadoop/sqoop/lib/SqoopRecord.java
@@ -32,21 +32,21 @@
* Interface implemented by the classes generated by sqoop's orm.ClassWriter.
*/
public interface SqoopRecord extends Cloneable, DBWritable, Writable {
- public void parse(CharSequence s) throws RecordParser.ParseError;
- public void parse(Text s) throws RecordParser.ParseError;
- public void parse(byte [] s) throws RecordParser.ParseError;
- public void parse(char [] s) throws RecordParser.ParseError;
- public void parse(ByteBuffer s) throws RecordParser.ParseError;
- public void parse(CharBuffer s) throws RecordParser.ParseError;
- public void loadLargeObjects(LargeObjectLoader objLoader)
+ void parse(CharSequence s) throws RecordParser.ParseError;
+ void parse(Text s) throws RecordParser.ParseError;
+ void parse(byte [] s) throws RecordParser.ParseError;
+ void parse(char [] s) throws RecordParser.ParseError;
+ void parse(ByteBuffer s) throws RecordParser.ParseError;
+ void parse(CharBuffer s) throws RecordParser.ParseError;
+ void loadLargeObjects(LargeObjectLoader objLoader)
throws SQLException, IOException, InterruptedException;
- public Object clone() throws CloneNotSupportedException;
+ Object clone() throws CloneNotSupportedException;
/**
* Inserts the data in this object into the PreparedStatement, starting
* at parameter 'offset'.
* @return the number of fields written to the statement.
*/
- public int write(PreparedStatement stmt, int offset) throws SQLException;
+ int write(PreparedStatement stmt, int offset) throws SQLException;
}
diff --git a/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java b/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
index 467c0f2a..e8c20651 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
@@ -37,12 +37,12 @@
public abstract class ConnManager {
/**
- * Return a list of all databases on a server
+ * Return a list of all databases on a server.
*/
public abstract String [] listDatabases();
/**
- * Return a list of all tables in a database
+ * Return a list of all tables in a database.
*/
public abstract String [] listTables();
@@ -57,14 +57,14 @@ public abstract class ConnManager {
public abstract String getPrimaryKey(String tableName);
/**
- * Return java type for SQL type
+ * Return java type for SQL type.
* @param sqlType sql type
* @return java type
*/
public abstract String toJavaType(int sqlType);
/**
- * Return hive type for SQL type
+ * Return hive type for SQL type.
* @param sqlType sql type
* @return hive type
*/
@@ -86,25 +86,27 @@ public abstract class ConnManager {
* returned ResultSet object, and for calling release() after that to free
* internal state.
*/
- public abstract ResultSet readTable(String tableName, String [] columns) throws SQLException;
+ public abstract ResultSet readTable(String tableName, String [] columns)
+ throws SQLException;
/**
- * @return the actual database connection
+ * @return the actual database connection.
*/
public abstract Connection getConnection() throws SQLException;
/**
- * @return a string identifying the driver class to load for this JDBC connection type.
+ * @return a string identifying the driver class to load for this
+ * JDBC connection type.
*/
public abstract String getDriverClass();
/**
- * Execute a SQL statement 's' and print its results to stdout
+ * Execute a SQL statement 's' and print its results to stdout.
*/
public abstract void execAndPrint(String s);
/**
- * Perform an import of a table from the database into HDFS
+ * Perform an import of a table from the database into HDFS.
*/
public abstract void importTable(ImportJobContext context)
throws IOException, ImportException;
@@ -139,7 +141,7 @@ public String escapeTableName(String tableName) {
public abstract void close() throws SQLException;
/**
- * Export data stored in HDFS into a table in a database
+ * Export data stored in HDFS into a table in a database.
*/
public void exportTable(ExportJobContext context)
throws IOException, ExportException {
diff --git a/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java b/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java
index 297c19a8..c6decdc6 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java
@@ -29,7 +29,8 @@
*/
public final class DefaultManagerFactory extends ManagerFactory {
- public static final Log LOG = LogFactory.getLog(DefaultManagerFactory.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ DefaultManagerFactory.class.getName());
public ConnManager accept(SqoopOptions options) {
String manualDriver = options.getDriverClassName();
diff --git a/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java b/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java
index 60cfea9a..b5b97df6 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java
@@ -46,7 +46,8 @@
*/
public class DirectMySQLManager extends MySQLManager {
- public static final Log LOG = LogFactory.getLog(DirectMySQLManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ DirectMySQLManager.class.getName());
public DirectMySQLManager(final SqoopOptions options) {
super(options, false);
diff --git a/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java b/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java
index 9dc7afde..88adf3da 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java
@@ -51,10 +51,12 @@
* commands.
*/
public class DirectPostgresqlManager extends PostgresqlManager {
- public static final Log LOG = LogFactory.getLog(DirectPostgresqlManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ DirectPostgresqlManager.class.getName());
public DirectPostgresqlManager(final SqoopOptions opts) {
- // Inform superclass that we're overriding import method via alt. constructor.
+ // Inform superclass that we're overriding import method via alt.
+ // constructor.
super(opts, true);
}
@@ -68,8 +70,8 @@ static class PostgresqlAsyncSink extends ErrorableAsyncSink {
private final PerfCounters counters;
private final SqoopOptions options;
- PostgresqlAsyncSink(final SplittableBufferedWriter w, final SqoopOptions opts,
- final PerfCounters ctrs) {
+ PostgresqlAsyncSink(final SplittableBufferedWriter w,
+ final SqoopOptions opts, final PerfCounters ctrs) {
this.writer = w;
this.options = opts;
this.counters = ctrs;
@@ -81,14 +83,16 @@ public void processStream(InputStream is) {
}
private static class PostgresqlStreamThread extends ErrorableThread {
- public static final Log LOG = LogFactory.getLog(PostgresqlStreamThread.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ PostgresqlStreamThread.class.getName());
private final SplittableBufferedWriter writer;
private final InputStream stream;
private final SqoopOptions options;
private final PerfCounters counters;
- PostgresqlStreamThread(final InputStream is, final SplittableBufferedWriter w,
+ PostgresqlStreamThread(final InputStream is,
+ final SplittableBufferedWriter w,
final SqoopOptions opts, final PerfCounters ctrs) {
this.stream = is;
this.writer = w;
@@ -143,7 +147,8 @@ public void run() {
}
/**
- Takes a list of columns and turns them into a string like "col1, col2, col3..."
+ * Takes a list of columns and turns them into a string like
+ * "col1, col2, col3...".
*/
private String getColumnListStr(String [] cols) {
if (null == cols) {
@@ -164,20 +169,20 @@ private String getColumnListStr(String [] cols) {
}
/**
- * @return the Postgresql-specific SQL command to copy the table ("COPY .... TO STDOUT")
+ * @return the Postgresql-specific SQL command to copy the
+ * table ("COPY .... TO STDOUT").
*/
private String getCopyCommand(String tableName) {
- /*
- Format of this command is:
- COPY table(col, col....) TO STDOUT
- or COPY ( query ) TO STDOUT
- WITH DELIMITER 'fieldsep'
- CSV
- QUOTE 'quotechar'
- ESCAPE 'escapechar'
- FORCE QUOTE col, col, col....
- */
+ // Format of this command is:
+ //
+ // COPY table(col, col....) TO STDOUT
+ // or COPY ( query ) TO STDOUT
+ // WITH DELIMITER 'fieldsep'
+ // CSV
+ // QUOTE 'quotechar'
+ // ESCAPE 'escapechar'
+ // FORCE QUOTE col, col, col....
StringBuilder sb = new StringBuilder();
String [] cols = getColumnNames(tableName);
@@ -238,12 +243,12 @@ or COPY ( query ) TO STDOUT
return copyCmd;
}
- /** Write the COPY command to a temp file
+ /** Write the COPY command to a temp file.
* @return the filename we wrote to.
*/
private String writeCopyCommand(String command) throws IOException {
String tmpDir = options.getTempDir();
- File tempFile = File.createTempFile("tmp-",".sql", new File(tmpDir));
+ File tempFile = File.createTempFile("tmp-", ".sql", new File(tmpDir));
BufferedWriter w = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(tempFile)));
w.write(command);
@@ -258,7 +263,7 @@ private String writeCopyCommand(String command) throws IOException {
private String writePasswordFile(String password) throws IOException {
String tmpDir = options.getTempDir();
- File tempFile = File.createTempFile("pgpass",".pgpass", new File(tmpDir));
+ File tempFile = File.createTempFile("pgpass", ".pgpass", new File(tmpDir));
LOG.debug("Writing password to tempfile: " + tempFile);
// Make sure it's only readable by the current user.
@@ -300,9 +305,9 @@ public void importTable(ImportJobContext context)
PerfCounters counters = new PerfCounters();
try {
- // Get the COPY TABLE command to issue, write this to a file, and pass it
- // in to psql with -f filename.
- // Then make sure we delete this file in our finally block.
+ // Get the COPY TABLE command to issue, write this to a file, and pass
+ // it in to psql with -f filename. Then make sure we delete this file
+ // in our finally block.
String copyCmd = getCopyCommand(tableName);
commandFilename = writeCopyCommand(copyCmd);
@@ -312,10 +317,10 @@ public void importTable(ImportJobContext context)
// Environment to pass to psql.
List envp = Executor.getCurEnvpStrings();
- // We need to parse the connect string URI to determine the database name
- // and the host and port. If the host is localhost and the port is not specified,
- // we don't want to pass this to psql, because we want to force the use of a
- // UNIX domain socket, not a TCP/IP socket.
+ // We need to parse the connect string URI to determine the database
+ // name and the host and port. If the host is localhost and the port is
+ // not specified, we don't want to pass this to psql, because we want to
+ // force the use of a UNIX domain socket, not a TCP/IP socket.
String connectString = options.getConnectString();
String databaseName = JdbcUrl.getDatabaseName(connectString);
String hostname = JdbcUrl.getHostName(connectString);
@@ -325,7 +330,8 @@ public void importTable(ImportJobContext context)
throw new ImportException("Could not determine database name");
}
- LOG.info("Performing import of table " + tableName + " from database " + databaseName);
+ LOG.info("Performing import of table " + tableName + " from database "
+ + databaseName);
args.add(PSQL_CMD); // requires that this is on the path.
args.add("--tuples-only");
args.add("--quiet");
@@ -401,7 +407,8 @@ public void importTable(ImportJobContext context)
// Remove any password file we wrote
if (null != passwordFilename) {
if (!new File(passwordFilename).delete()) {
- LOG.error("Could not remove postgresql password file " + passwordFilename);
+ LOG.error("Could not remove postgresql password file "
+ + passwordFilename);
LOG.error("You should remove this file to protect your credentials.");
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java b/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java
index d206f108..b6325450 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java
@@ -30,7 +30,8 @@ public class ExportJobContext {
private String jarFile;
private SqoopOptions options;
- public ExportJobContext(final String table, final String jar, final SqoopOptions opts) {
+ public ExportJobContext(final String table, final String jar,
+ final SqoopOptions opts) {
this.tableName = table;
this.jarFile = jar;
this.options = opts;
diff --git a/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java b/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java
index 0c46a17e..236b3e7a 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java
@@ -33,7 +33,8 @@
*/
public class GenericJdbcManager extends SqlManager {
- public static final Log LOG = LogFactory.getLog(GenericJdbcManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ GenericJdbcManager.class.getName());
private String jdbcDriverClass;
private Connection connection;
diff --git a/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java b/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java
index bd70441e..f4aa814f 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java
@@ -29,13 +29,14 @@
*/
public class HsqldbManager extends GenericJdbcManager {
- public static final Log LOG = LogFactory.getLog(HsqldbManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ HsqldbManager.class.getName());
// driver class to ensure is loaded when making db connection.
private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver";
- // HsqlDb doesn't have a notion of multiple "databases"; the user's database is always called
- // "PUBLIC";
+ // HsqlDb doesn't have a notion of multiple "databases"; the user's database
+ // is always called "PUBLIC".
private static final String HSQL_SCHEMA_NAME = "PUBLIC";
public HsqldbManager(final SqoopOptions opts) {
@@ -43,7 +44,8 @@ public HsqldbManager(final SqoopOptions opts) {
}
/**
- * Note: HSqldb only supports a single schema named "PUBLIC"
+ * Return list of databases hosted by the server.
+ * HSQLDB only supports a single schema named "PUBLIC".
*/
@Override
public String[] listDatabases() {
diff --git a/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java b/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java
index a5633fb0..a5e0dba0 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java
@@ -33,7 +33,8 @@ public class ImportJobContext {
private SqoopOptions options;
private Class extends InputFormat> inputFormatClass;
- public ImportJobContext(final String table, final String jar, final SqoopOptions opts) {
+ public ImportJobContext(final String table, final String jar,
+ final SqoopOptions opts) {
this.tableName = table;
this.jarFile = jar;
this.options = opts;
diff --git a/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java b/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
index 95b2ec8c..a1c819ae 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
@@ -37,7 +37,7 @@
import org.apache.hadoop.sqoop.util.ImportException;
/**
- * Manages connections to MySQL databases
+ * Manages connections to MySQL databases.
*/
public class MySQLManager extends GenericJdbcManager {
@@ -103,7 +103,7 @@ public String[] listDatabases() {
@Override
public void importTable(ImportJobContext context)
- throws IOException, ImportException {
+ throws IOException, ImportException {
// Check that we're not doing a MapReduce from localhost. If we are, point
// out that we could use mysqldump.
diff --git a/src/java/org/apache/hadoop/sqoop/manager/MySQLUtils.java b/src/java/org/apache/hadoop/sqoop/manager/MySQLUtils.java
index 6865376e..5e4a541f 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/MySQLUtils.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/MySQLUtils.java
@@ -18,28 +18,20 @@
package org.apache.hadoop.sqoop.manager;
-import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
import java.io.OutputStreamWriter;
-import java.nio.CharBuffer;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.util.DirectImportUtils;
/**
- * Helper methods and constants for MySQL imports/exports
+ * Helper methods and constants for MySQL imports/exports.
*/
public final class MySQLUtils {
@@ -100,7 +92,7 @@ public static String writePasswordFile(Configuration conf)
// Create the temp file to hold the user's password.
String tmpDir = conf.get(
HadoopShim.get().getJobLocalDirProperty(), "/tmp/");
- File tempFile = File.createTempFile("mysql-cnf",".cnf", new File(tmpDir));
+ File tempFile = File.createTempFile("mysql-cnf", ".cnf", new File(tmpDir));
// Make the password file only private readable.
DirectImportUtils.setFilePermissions(tempFile, "0600");
diff --git a/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java b/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
index 26b65f75..19afe059 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
@@ -47,7 +47,8 @@
*/
public class OracleManager extends GenericJdbcManager {
- public static final Log LOG = LogFactory.getLog(OracleManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ OracleManager.class.getName());
// driver class to ensure is loaded when making db connection.
private static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver";
@@ -65,8 +66,8 @@ private static class ConnCache {
public static final Log LOG = LogFactory.getLog(ConnCache.class.getName());
private static class CacheKey {
- public final String connectString;
- public final String username;
+ private final String connectString;
+ private final String username;
public CacheKey(String connect, String user) {
this.connectString = connect;
@@ -212,7 +213,8 @@ protected Connection makeConnection() throws SQLException {
try {
Class.forName(driverClass);
} catch (ClassNotFoundException cnfe) {
- throw new RuntimeException("Could not load db driver class: " + driverClass);
+ throw new RuntimeException("Could not load db driver class: "
+ + driverClass);
}
String username = options.getUsername();
@@ -242,34 +244,37 @@ protected Connection makeConnection() throws SQLException {
}
/**
- * Set session time zone
+ * Set session time zone.
* @param conn Connection object
* @throws SQLException instance
*/
private void setSessionTimeZone(Connection conn) throws SQLException {
- // need to use reflection to call the method setSessionTimeZone on the OracleConnection class
- // because oracle specific java libraries are not accessible in this context
+ // Need to use reflection to call the method setSessionTimeZone on the
+ // OracleConnection class because oracle specific java libraries are not
+ // accessible in this context.
Method method;
try {
method = conn.getClass().getMethod(
"setSessionTimeZone", new Class [] {String.class});
} catch (Exception ex) {
- LOG.error("Could not find method setSessionTimeZone in " + conn.getClass().getName(), ex);
+ LOG.error("Could not find method setSessionTimeZone in "
+ + conn.getClass().getName(), ex);
// rethrow SQLException
throw new SQLException(ex);
}
- // Need to set the time zone in order for Java
- // to correctly access the column "TIMESTAMP WITH LOCAL TIME ZONE".
- // The user may have set this in the configuration as 'oracle.sessionTimeZone'.
- String clientTimeZoneStr = options.getConf().get(ORACLE_TIMEZONE_KEY, "GMT");
+ // Need to set the time zone in order for Java to correctly access the
+ // column "TIMESTAMP WITH LOCAL TIME ZONE". The user may have set this in
+ // the configuration as 'oracle.sessionTimeZone'.
+ String clientTimeZoneStr = options.getConf().get(ORACLE_TIMEZONE_KEY,
+ "GMT");
try {
method.setAccessible(true);
method.invoke(conn, clientTimeZoneStr);
LOG.info("Time zone has been set to " + clientTimeZoneStr);
} catch (Exception ex) {
- LOG.warn("Time zone " + clientTimeZoneStr +
- " could not be set on Oracle database.");
+ LOG.warn("Time zone " + clientTimeZoneStr
+ + " could not be set on Oracle database.");
LOG.info("Setting default time zone: GMT");
try {
// Per the documentation at:
@@ -310,7 +315,8 @@ public void exportTable(ExportJobContext context)
}
@Override
- public ResultSet readTable(String tableName, String[] columns) throws SQLException {
+ public ResultSet readTable(String tableName, String[] columns)
+ throws SQLException {
if (columns == null) {
columns = getColumnNames(tableName);
}
@@ -408,14 +414,14 @@ private String dbToHiveType(int sqlType) {
}
/**
- * Get database type
+ * Get database type.
* @param clazz oracle class representing sql types
* @param fieldName field name
* @return value of database type constant
*/
private int getDatabaseType(Class clazz, String fieldName) {
- // need to use reflection to extract constant values
- // because the database specific java libraries are not accessible in this context
+ // Need to use reflection to extract constant values because the database
+ // specific java libraries are not accessible in this context.
int value = -1;
try {
java.lang.reflect.Field field = clazz.getDeclaredField(fieldName);
@@ -429,13 +435,13 @@ private int getDatabaseType(Class clazz, String fieldName) {
}
/**
- * Load class by name
+ * Load class by name.
* @param className class name
* @return class instance
*/
private Class getTypeClass(String className) {
- // need to use reflection to load class
- // because the database specific java libraries are not accessible in this context
+ // Need to use reflection to load class because the database specific java
+ // libraries are not accessible in this context.
Class typeClass = null;
try {
typeClass = Class.forName(className);
diff --git a/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java b/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
index 57c842fb..2b120bb6 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
@@ -33,16 +33,18 @@
import org.apache.hadoop.sqoop.util.ImportException;
/**
- * Manages connections to Postgresql databases
+ * Manages connections to Postgresql databases.
*/
public class PostgresqlManager extends GenericJdbcManager {
- public static final Log LOG = LogFactory.getLog(PostgresqlManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ PostgresqlManager.class.getName());
// driver class to ensure is loaded when making db connection.
private static final String DRIVER_CLASS = "org.postgresql.Driver";
- private static final int POSTGRESQL_FETCH_SIZE = 50; // Fetch 50 rows at a time.
+ // Fetch 50 rows at a time.
+ private static final int POSTGRESQL_FETCH_SIZE = 50;
// set to true after we warn the user that we can use direct fastpath.
private static boolean warningPrinted = false;
diff --git a/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java b/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
index a605da37..4ce0f1e0 100644
--- a/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
+++ b/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
@@ -62,7 +62,7 @@ public abstract class SqlManager extends ConnManager {
private Statement lastStatement;
/**
- * Constructs the SqlManager
+ * Constructs the SqlManager.
* @param opts the SqoopOptions describing the user's requested action.
*/
public SqlManager(final SqoopOptions opts) {
@@ -171,7 +171,8 @@ public Map getColumnTypes(String tableName) {
}
@Override
- public ResultSet readTable(String tableName, String[] columns) throws SQLException {
+ public ResultSet readTable(String tableName, String[] columns)
+ throws SQLException {
if (columns == null) {
columns = getColumnNames(tableName);
}
@@ -212,7 +213,8 @@ public String[] listTables() {
DatabaseMetaData metaData = this.getConnection().getMetaData();
results = metaData.getTables(null, null, null, tableTypes);
} catch (SQLException sqlException) {
- LOG.error("Error reading database metadata: " + sqlException.toString());
+ LOG.error("Error reading database metadata: "
+ + sqlException.toString());
return null;
}
@@ -264,25 +266,26 @@ public String getPrimaryKey(String tableName) {
getConnection().commit();
}
} catch (SQLException sqlException) {
- LOG.error("Error reading primary key metadata: " + sqlException.toString());
+ LOG.error("Error reading primary key metadata: "
+ + sqlException.toString());
return null;
}
}
/**
- * Retrieve the actual connection from the outer ConnManager
+ * Retrieve the actual connection from the outer ConnManager.
*/
public abstract Connection getConnection() throws SQLException;
/**
* Determine what column to use to split the table.
- * @param options the SqoopOptions controlling this import.
+ * @param opts the SqoopOptions controlling this import.
* @param tableName the table to import.
* @return the splitting column, if one is set or inferrable, or null
* otherwise.
*/
- protected String getSplitColumn(SqoopOptions options, String tableName) {
- String splitCol = options.getSplitByCol();
+ protected String getSplitColumn(SqoopOptions opts, String tableName) {
+ String splitCol = opts.getSplitByCol();
if (null == splitCol) {
// If the user didn't specify a splitting column, try to infer one.
splitCol = getPrimaryKey(tableName);
@@ -344,7 +347,8 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
* @return the name of a Java type to hold the sql datatype, or null if none.
*/
public String toJavaType(int sqlType) {
- // mappings from http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html
+ // Mappings taken from:
+ // http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html
if (sqlType == Types.INTEGER) {
return "Integer";
} else if (sqlType == Types.VARCHAR) {
@@ -396,7 +400,7 @@ public String toJavaType(int sqlType) {
}
/**
- * Resolve a database-specific type to Hive data type
+ * Resolve a database-specific type to Hive data type.
* @param sqlType sql type
* @return hive type
*/
@@ -489,7 +493,8 @@ protected Connection makeConnection() throws SQLException {
try {
Class.forName(driverClass);
} catch (ClassNotFoundException cnfe) {
- throw new RuntimeException("Could not load db driver class: " + driverClass);
+ throw new RuntimeException("Could not load db driver class: "
+ + driverClass);
}
String username = options.getUsername();
@@ -497,7 +502,8 @@ protected Connection makeConnection() throws SQLException {
if (null == username) {
connection = DriverManager.getConnection(options.getConnectString());
} else {
- connection = DriverManager.getConnection(options.getConnectString(), username, password);
+ connection = DriverManager.getConnection(options.getConnectString(),
+ username, password);
}
// We only use this for metadata queries. Loosest semantics are okay.
@@ -508,7 +514,7 @@ protected Connection makeConnection() throws SQLException {
}
/**
- * Export data stored in HDFS into a table in a database
+ * Export data stored in HDFS into a table in a database.
*/
public void exportTable(ExportJobContext context)
throws IOException, ExportException {
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/AutoProgressMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/AutoProgressMapper.java
index 0a3904af..20d639b6 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/AutoProgressMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/AutoProgressMapper.java
@@ -34,27 +34,34 @@
public class AutoProgressMapper
extends Mapper {
- public static final Log LOG = LogFactory.getLog(AutoProgressMapper.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ AutoProgressMapper.class.getName());
- /** Total number of millis for which progress will be reported
- by the auto-progress thread. If this is zero, then the auto-progress
- thread will never voluntarily exit.
- */
+ /**
+ * Total number of millis for which progress will be reported by the
+ * auto-progress thread. If this is zero, then the auto-progress thread will
+ * never voluntarily exit.
+ */
private int maxProgressPeriod;
- /** Number of milliseconds to sleep for between loop iterations. Must be less
- than report interval.
- */
+ /**
+ * Number of milliseconds to sleep for between loop iterations. Must be less
+ * than report interval.
+ */
private int sleepInterval;
- /** Number of milliseconds between calls to Reporter.progress(). Should be a multiple
- of the sleepInterval.
- */
+ /**
+ * Number of milliseconds between calls to Reporter.progress().
+ * Should be a multiple of the sleepInterval.
+ */
private int reportInterval;
- public static final String MAX_PROGRESS_PERIOD_KEY = "sqoop.mapred.auto.progress.max";
- public static final String SLEEP_INTERVAL_KEY = "sqoop.mapred.auto.progress.sleep";
- public static final String REPORT_INTERVAL_KEY = "sqoop.mapred.auto.progress.report";
+ public static final String MAX_PROGRESS_PERIOD_KEY =
+ "sqoop.mapred.auto.progress.max";
+ public static final String SLEEP_INTERVAL_KEY =
+ "sqoop.mapred.auto.progress.sleep";
+ public static final String REPORT_INTERVAL_KEY =
+ "sqoop.mapred.auto.progress.report";
// Sleep for 10 seconds at a time.
static final int DEFAULT_SLEEP_INTERVAL = 10000;
@@ -67,7 +74,7 @@ public class AutoProgressMapper
private class ProgressThread extends Thread {
- private volatile boolean keepGoing; // while this is true, thread runs.
+ private volatile boolean keepGoing; // While this is true, thread runs.
private Context context;
private long startTimeMillis;
@@ -91,17 +98,20 @@ public void run() {
final long REPORT_INTERVAL = AutoProgressMapper.this.reportInterval;
final long SLEEP_INTERVAL = AutoProgressMapper.this.sleepInterval;
- // in a loop:
- // * Check that we haven't run for too long (maxProgressPeriod)
- // * If it's been a report interval since we last made progress, make more.
+ // In a loop:
+ // * Check that we haven't run for too long (maxProgressPeriod).
+ // * If it's been a report interval since we last made progress,
+ // make more.
// * Sleep for a bit.
// * If the parent thread has signaled for exit, do so.
while (this.keepGoing) {
long curTimeMillis = System.currentTimeMillis();
- if (MAX_PROGRESS != 0 && curTimeMillis - this.startTimeMillis > MAX_PROGRESS) {
+ if (MAX_PROGRESS != 0
+ && curTimeMillis - this.startTimeMillis > MAX_PROGRESS) {
this.keepGoing = false;
- LOG.info("Auto-progress thread exiting after " + MAX_PROGRESS + " ms.");
+ LOG.info("Auto-progress thread exiting after " + MAX_PROGRESS
+ + " ms.");
break;
}
@@ -130,23 +140,29 @@ public void run() {
/**
* Set configuration parameters for the auto-progress thread.
*/
- private final void configureAutoProgress(Configuration job) {
- this.maxProgressPeriod = job.getInt(MAX_PROGRESS_PERIOD_KEY, DEFAULT_MAX_PROGRESS);
- this.sleepInterval = job.getInt(SLEEP_INTERVAL_KEY, DEFAULT_SLEEP_INTERVAL);
- this.reportInterval = job.getInt(REPORT_INTERVAL_KEY, DEFAULT_REPORT_INTERVAL);
+ private void configureAutoProgress(Configuration job) {
+ this.maxProgressPeriod = job.getInt(MAX_PROGRESS_PERIOD_KEY,
+ DEFAULT_MAX_PROGRESS);
+ this.sleepInterval = job.getInt(SLEEP_INTERVAL_KEY,
+ DEFAULT_SLEEP_INTERVAL);
+ this.reportInterval = job.getInt(REPORT_INTERVAL_KEY,
+ DEFAULT_REPORT_INTERVAL);
if (this.reportInterval < 1) {
- LOG.warn("Invalid " + REPORT_INTERVAL_KEY + "; setting to " + DEFAULT_REPORT_INTERVAL);
+ LOG.warn("Invalid " + REPORT_INTERVAL_KEY + "; setting to "
+ + DEFAULT_REPORT_INTERVAL);
this.reportInterval = DEFAULT_REPORT_INTERVAL;
}
if (this.sleepInterval > this.reportInterval || this.sleepInterval < 1) {
- LOG.warn("Invalid " + SLEEP_INTERVAL_KEY + "; setting to " + DEFAULT_SLEEP_INTERVAL);
+ LOG.warn("Invalid " + SLEEP_INTERVAL_KEY + "; setting to "
+ + DEFAULT_SLEEP_INTERVAL);
this.sleepInterval = DEFAULT_SLEEP_INTERVAL;
}
if (this.maxProgressPeriod < 0) {
- LOG.warn("Invalid " + MAX_PROGRESS_PERIOD_KEY + "; setting to " + DEFAULT_MAX_PROGRESS);
+ LOG.warn("Invalid " + MAX_PROGRESS_PERIOD_KEY + "; setting to "
+ + DEFAULT_MAX_PROGRESS);
this.maxProgressPeriod = DEFAULT_MAX_PROGRESS;
}
}
@@ -179,7 +195,8 @@ public void run(Context context) throws IOException, InterruptedException {
thread.join();
LOG.debug("Progress thread shutdown detected.");
} catch (InterruptedException ie) {
- LOG.warn("Interrupted when waiting on auto-progress thread: " + ie.toString());
+ LOG.warn("Interrupted when waiting on auto-progress thread: "
+ + ie.toString());
}
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java b/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
index 5781297a..b67f2497 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
@@ -24,19 +24,12 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
@@ -46,12 +39,8 @@
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.lib.LargeObjectLoader;
-import org.apache.hadoop.sqoop.orm.TableClassName;
import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.shims.ShimLoader;
-import org.apache.hadoop.sqoop.util.ClassLoaderStack;
-import org.apache.hadoop.sqoop.util.ImportException;
-import org.apache.hadoop.sqoop.util.PerfCounters;
/**
* Actually runs a jdbc import job using the ORM files generated by the
@@ -59,7 +48,8 @@
*/
public class DataDrivenImportJob extends ImportJobBase {
- public static final Log LOG = LogFactory.getLog(DataDrivenImportJob.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ DataDrivenImportJob.class.getName());
@SuppressWarnings("unchecked")
public DataDrivenImportJob(final SqoopOptions opts) {
@@ -115,11 +105,12 @@ protected void configureInputFormat(Job job, String tableName,
try {
String username = options.getUsername();
if (null == username || username.length() == 0) {
- DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
- options.getConnectString());
+ DBConfiguration.configureDB(job.getConfiguration(),
+ mgr.getDriverClass(), options.getConnectString());
} else {
- DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
- options.getConnectString(), username, options.getPassword());
+ DBConfiguration.configureDB(job.getConfiguration(),
+ mgr.getDriverClass(), options.getConnectString(),
+ username, options.getPassword());
}
String [] colNames = options.getColumns();
@@ -139,8 +130,8 @@ protected void configureInputFormat(Job job, String tableName,
String whereClause = options.getWhereClause();
// We can't set the class properly in here, because we may not have the
- // jar loaded in this JVM. So we start by calling setInput() with DBWritable
- // and then overriding the string manually.
+ // jar loaded in this JVM. So we start by calling setInput() with
+ // DBWritable and then overriding the string manually.
DataDrivenDBInputFormat.setInput(job, DBWritable.class,
mgr.escapeTableName(tableName), whereClause,
mgr.escapeColName(splitByCol), sqlColNames);
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java b/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java
index e716a889..e58b8bc8 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java
@@ -112,7 +112,8 @@ public static boolean isSequenceFiles(Configuration conf, Path p)
}
if (null == stat) {
- LOG.warn("null FileStatus object in isSequenceFiles(); assuming false.");
+ LOG.warn("null FileStatus object in isSequenceFiles(); "
+ + "assuming false.");
return false;
}
@@ -239,7 +240,7 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
/**
- * Run an export job to dump a table from HDFS to a database
+ * Run an export job to dump a table from HDFS to a database.
* @throws IOException if the export job encounters an IO error
* @throws ExportException if the job fails unexpectedly or is misconfigured.
*/
@@ -248,7 +249,8 @@ public void runExport() throws ExportException, IOException {
SqoopOptions options = context.getOptions();
Configuration conf = options.getConf();
String tableName = context.getTableName();
- String tableClassName = new TableClassName(options).getClassForTable(tableName);
+ String tableClassName =
+ new TableClassName(options).getClassForTable(tableName);
String ormJarFile = context.getJarFile();
LOG.info("Beginning export of " + tableName);
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java b/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java
index 634f5b54..ea54296a 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java
@@ -57,7 +57,8 @@
*/
public class ImportJobBase extends JobBase {
- public static final Log LOG = LogFactory.getLog(ImportJobBase.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ ImportJobBase.class.getName());
public ImportJobBase() {
this(null);
@@ -129,20 +130,24 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
/**
- * Run an import job to read a table in to HDFS
+ * Run an import job to read a table in to HDFS.
*
* @param tableName the database table to read
- * @param ormJarFile the Jar file to insert into the dcache classpath. (may be null)
- * @param splitByCol the column of the database table to use to split the import
+ * @param ormJarFile the Jar file to insert into the dcache classpath.
+ * (may be null)
+ * @param splitByCol the column of the database table to use to split
+ * the import
* @param conf A fresh Hadoop Configuration to use to build an MR job.
* @throws IOException if the job encountered an IO problem
- * @throws ImportException if the job failed unexpectedly or was misconfigured.
+ * @throws ImportException if the job failed unexpectedly or was
+ * misconfigured.
*/
public void runImport(String tableName, String ormJarFile, String splitByCol,
Configuration conf) throws IOException, ImportException {
LOG.info("Beginning import of " + tableName);
- String tableClassName = new TableClassName(options).getClassForTable(tableName);
+ String tableClassName =
+ new TableClassName(options).getClassForTable(tableName);
loadJars(conf, ormJarFile, tableClassName);
try {
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/JdbcExportJob.java b/src/java/org/apache/hadoop/sqoop/mapreduce/JdbcExportJob.java
index 3f75d947..0d5edd6e 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/JdbcExportJob.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/JdbcExportJob.java
@@ -35,10 +35,9 @@
import org.apache.hadoop.sqoop.ConnFactory;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.ExportJobContext;
-import org.apache.hadoop.sqoop.shims.ShimLoader;
/**
- * Run an export using JDBC (JDBC-based ExportOutputFormat)
+ * Run an export using JDBC (JDBC-based ExportOutputFormat).
*/
public class JdbcExportJob extends ExportJobBase {
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/JobBase.java b/src/java/org/apache/hadoop/sqoop/mapreduce/JobBase.java
index 8c777b98..fb926623 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/JobBase.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/JobBase.java
@@ -26,7 +26,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
@@ -92,23 +91,23 @@ protected Class extends OutputFormat> getOutputFormatClass()
return this.outputFormatClass;
}
- /** Set the OutputFormat class to use for this job */
+ /** Set the OutputFormat class to use for this job. */
public void setOutputFormatClass(Class extends OutputFormat> cls) {
this.outputFormatClass = cls;
}
- /** Set the InputFormat class to use for this job */
+ /** Set the InputFormat class to use for this job. */
public void setInputFormatClass(Class extends InputFormat> cls) {
this.inputFormatClass = cls;
}
- /** Set the Mapper class to use for this job */
+ /** Set the Mapper class to use for this job. */
public void setMapperClass(Class extends Mapper> cls) {
this.mapperClass = cls;
}
/**
- * Set the SqoopOptions configuring this job
+ * Set the SqoopOptions configuring this job.
*/
public void setOptions(SqoopOptions opts) {
this.options = opts;
@@ -122,9 +121,10 @@ protected void loadJars(Configuration conf, String ormJarFile,
boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
|| "local".equals(conf.get("mapred.job.tracker"));
if (isLocal) {
- // If we're using the LocalJobRunner, then instead of using the compiled jar file
- // as the job source, we're running in the current thread. Push on another classloader
- // that loads from that jar in addition to everything currently on the classpath.
+ // If we're using the LocalJobRunner, then instead of using the compiled
+ // jar file as the job source, we're running in the current thread. Push
+ // on another classloader that loads from that jar in addition to
+ // everything currently on the classpath.
this.prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile,
tableClassName);
}
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpImportJob.java b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpImportJob.java
index f44d91ef..21c44cc2 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpImportJob.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpImportJob.java
@@ -25,20 +25,10 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
@@ -47,11 +37,7 @@
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.MySQLUtils;
-import org.apache.hadoop.sqoop.orm.TableClassName;
import org.apache.hadoop.sqoop.shims.ShimLoader;
-import org.apache.hadoop.sqoop.util.ClassLoaderStack;
-import org.apache.hadoop.sqoop.util.ImportException;
-import org.apache.hadoop.sqoop.util.PerfCounters;
/**
* Class that runs an import job using mysqldump in the mapper.
@@ -82,11 +68,12 @@ protected void configureInputFormat(Job job, String tableName,
try {
String username = options.getUsername();
if (null == username || username.length() == 0) {
- DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
- options.getConnectString());
+ DBConfiguration.configureDB(job.getConfiguration(),
+ mgr.getDriverClass(), options.getConnectString());
} else {
- DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
- options.getConnectString(), username, options.getPassword());
+ DBConfiguration.configureDB(job.getConfiguration(),
+ mgr.getDriverClass(), options.getConnectString(), username,
+ options.getPassword());
}
String [] colNames = options.getColumns();
@@ -106,8 +93,8 @@ protected void configureInputFormat(Job job, String tableName,
String whereClause = options.getWhereClause();
// We can't set the class properly in here, because we may not have the
- // jar loaded in this JVM. So we start by calling setInput() with DBWritable
- // and then overriding the string manually.
+ // jar loaded in this JVM. So we start by calling setInput() with
+ // DBWritable and then overriding the string manually.
// Note that mysqldump also does *not* want a quoted table name.
DataDrivenDBInputFormat.setInput(job, DBWritable.class,
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpMapper.java
index b9dd9d9e..e85815db 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLDumpMapper.java
@@ -19,14 +19,10 @@
package org.apache.hadoop.sqoop.mapreduce;
import java.io.BufferedReader;
-import java.io.BufferedWriter;
import java.io.File;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
import java.nio.CharBuffer;
import java.util.ArrayList;
import java.util.List;
@@ -35,18 +31,13 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
-import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.lib.FieldFormatter;
import org.apache.hadoop.sqoop.lib.RecordParser;
import org.apache.hadoop.sqoop.manager.MySQLUtils;
import org.apache.hadoop.sqoop.util.AsyncSink;
-import org.apache.hadoop.sqoop.util.DirectImportUtils;
import org.apache.hadoop.sqoop.util.ErrorableAsyncSink;
import org.apache.hadoop.sqoop.util.ErrorableThread;
-import org.apache.hadoop.sqoop.util.ImportException;
import org.apache.hadoop.sqoop.util.JdbcUrl;
import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
import org.apache.hadoop.sqoop.util.PerfCounters;
@@ -57,7 +48,8 @@
public class MySQLDumpMapper
extends Mapper {
- public static final Log LOG = LogFactory.getLog(MySQLDumpMapper.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ MySQLDumpMapper.class.getName());
private Configuration conf;
@@ -111,19 +103,22 @@ public void run() {
break; // EOF.
}
- // this line is of the form "INSERT .. VALUES ( actual value text );"
- // strip the leading preamble up to the '(' and the trailing ');'.
+ // this line is of the form "INSERT .. VALUES ( actual value text
+ // );" strip the leading preamble up to the '(' and the trailing
+ // ');'.
if (preambleLen == -1) {
// we haven't determined how long the preamble is. It's constant
// across all lines, so just figure this out once.
String recordStartMark = "VALUES (";
- preambleLen = inLine.indexOf(recordStartMark) + recordStartMark.length();
+ preambleLen = inLine.indexOf(recordStartMark)
+ + recordStartMark.length();
}
// chop off the leading and trailing text as we write the
// output to HDFS.
int len = inLine.length() - 2 - preambleLen;
- context.write(inLine.substring(preambleLen, inLine.length() - 2), null);
+ context.write(inLine.substring(preambleLen, inLine.length() - 2),
+ null);
context.write("\n", null);
counters.addBytes(1 + len);
}
@@ -235,18 +230,21 @@ public void run() {
break; // EOF.
}
- // this line is of the form "INSERT .. VALUES ( actual value text );"
- // strip the leading preamble up to the '(' and the trailing ');'.
+ // this line is of the form "INSERT .. VALUES ( actual value text
+ // );" strip the leading preamble up to the '(' and the trailing
+ // ');'.
if (preambleLen == -1) {
// we haven't determined how long the preamble is. It's constant
// across all lines, so just figure this out once.
String recordStartMark = "VALUES (";
- preambleLen = inLine.indexOf(recordStartMark) + recordStartMark.length();
+ preambleLen = inLine.indexOf(recordStartMark)
+ + recordStartMark.length();
}
- // Wrap the input string in a char buffer that ignores the leading and trailing
- // text.
- CharBuffer charbuf = CharBuffer.wrap(inLine, preambleLen, inLine.length() - 2);
+ // Wrap the input string in a char buffer that ignores the leading
+ // and trailing text.
+ CharBuffer charbuf = CharBuffer.wrap(inLine, preambleLen,
+ inLine.length() - 2);
// Pass this along to the parser
List fields = null;
@@ -258,7 +256,8 @@ public void run() {
continue; // Skip emitting this row.
}
- // For all of the output fields, emit them using the delimiters the user chooses.
+ // For all of the output fields, emit them using the delimiters
+ // the user chooses.
boolean first = true;
int recordLen = 1; // for the delimiter.
for (String field : fields) {
@@ -312,10 +311,11 @@ public void map(String splitConditions, NullWritable val, Context context)
ArrayList args = new ArrayList();
String tableName = conf.get(MySQLUtils.TABLE_NAME_KEY);
- // We need to parse the connect string URI to determine the database
- // name. Using java.net.URL directly on the connect string will fail because
- // Java doesn't respect arbitrary JDBC-based schemes. So we chop off the scheme
- // (everything before '://') and replace it with 'http', which we know will work.
+ // We need to parse the connect string URI to determine the database name.
+ // Using java.net.URL directly on the connect string will fail because
+ // Java doesn't respect arbitrary JDBC-based schemes. So we chop off the
+ // scheme (everything before '://') and replace it with 'http', which we
+ // know will work.
String connectString = conf.get(MySQLUtils.CONNECT_STRING_KEY);
String databaseName = JdbcUrl.getDatabaseName(connectString);
String hostname = JdbcUrl.getHostName(connectString);
@@ -391,7 +391,8 @@ public void map(String splitConditions, NullWritable val, Context context)
InputStream is = p.getInputStream();
if (MySQLUtils.outputDelimsAreMySQL(conf)) {
- LOG.debug("Output delimiters conform to mysqldump; using straight copy");
+ LOG.debug("Output delimiters conform to mysqldump; "
+ + "using straight copy");
sink = new CopyingAsyncSink(context, counters);
} else {
LOG.debug("User-specified delimiters; using reparsing import");
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportJob.java b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportJob.java
index 3cecce2c..0e2b4150 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportJob.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportJob.java
@@ -27,7 +27,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
@@ -80,11 +79,12 @@ protected void configureInputFormat(Job job, String tableName,
mgr = new ConnFactory(conf).getManager(options);
String username = options.getUsername();
if (null == username || username.length() == 0) {
- DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
- options.getConnectString());
+ DBConfiguration.configureDB(job.getConfiguration(),
+ mgr.getDriverClass(), options.getConnectString());
} else {
- DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
- options.getConnectString(), username, options.getPassword());
+ DBConfiguration.configureDB(job.getConfiguration(),
+ mgr.getDriverClass(), options.getConnectString(), username,
+ options.getPassword());
}
String [] colNames = options.getColumns();
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportMapper.java
index 64e317b6..96726a8d 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/MySQLExportMapper.java
@@ -30,7 +30,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.util.Shell;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.sqoop.lib.TaskId;
@@ -53,7 +52,8 @@
public class MySQLExportMapper
extends Mapper {
- public static final Log LOG = LogFactory.getLog(MySQLExportMapper.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ MySQLExportMapper.class.getName());
/** Configuration key that specifies the number of bytes before which it
* commits the current export transaction and opens a new one.
@@ -82,7 +82,7 @@ public class MySQLExportMapper
protected AsyncSink outSink;
protected AsyncSink errSink;
- /** File object where we wrote the user's password to pass to mysqlimport */
+ /** File object where we wrote the user's password to pass to mysqlimport. */
protected File passwordFile;
/** Character set used to write to mysqlimport. */
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java
index ef50bbaa..4902e04a 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java
@@ -31,7 +31,8 @@
* that DBWritable to the OutputFormat for writeback to the database.
*/
public class SequenceFileExportMapper
- extends AutoProgressMapper {
+ extends AutoProgressMapper {
public SequenceFileExportMapper() {
}
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileImportMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileImportMapper.java
index 3f337b05..f8db27a1 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileImportMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileImportMapper.java
@@ -31,12 +31,14 @@
* Imports records by writing them to a SequenceFile.
*/
public class SequenceFileImportMapper
- extends AutoProgressMapper {
+ extends AutoProgressMapper {
private LargeObjectLoader lobLoader;
@Override
- protected void setup(Context context) throws IOException, InterruptedException {
+ protected void setup(Context context)
+ throws IOException, InterruptedException {
this.lobLoader = new LargeObjectLoader(context.getConfiguration(),
FileOutputFormat.getWorkOutputPath(context));
}
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java
index 511bdcef..4c31bb7b 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java
@@ -25,15 +25,15 @@
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper.Context;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.sqoop.lib.RecordParser;
import org.apache.hadoop.sqoop.lib.SqoopRecord;
/**
- * Converts an input record from a string representation to a parsed Sqoop record
- * and emits that DBWritable to the OutputFormat for writeback to the database.
+ * Converts an input record from a string representation to a parsed Sqoop
+ * record and emits that DBWritable to the OutputFormat for writeback to the
+ * database.
*/
public class TextExportMapper
extends AutoProgressMapper {
@@ -67,7 +67,8 @@ protected void setup(Context context)
}
if (null == recordImpl) {
- throw new IOException("Could not instantiate object of type " + recordClassName);
+ throw new IOException("Could not instantiate object of type "
+ + recordClassName);
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/TextImportMapper.java b/src/java/org/apache/hadoop/sqoop/mapreduce/TextImportMapper.java
index 57042c82..dd973b33 100644
--- a/src/java/org/apache/hadoop/sqoop/mapreduce/TextImportMapper.java
+++ b/src/java/org/apache/hadoop/sqoop/mapreduce/TextImportMapper.java
@@ -43,7 +43,8 @@ public TextImportMapper() {
}
@Override
- protected void setup(Context context) throws IOException, InterruptedException {
+ protected void setup(Context context)
+ throws IOException, InterruptedException {
this.lobLoader = new LargeObjectLoader(context.getConfiguration(),
FileOutputFormat.getWorkOutputPath(context));
}
diff --git a/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java b/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java
index 46512d75..84ba6810 100644
--- a/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java
+++ b/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java
@@ -21,7 +21,6 @@
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager;
-import org.apache.hadoop.sqoop.manager.SqlManager;
import org.apache.hadoop.sqoop.lib.BigDecimalSerializer;
import org.apache.hadoop.sqoop.lib.FieldFormatter;
import org.apache.hadoop.sqoop.lib.JdbcWritableBridge;
@@ -45,7 +44,7 @@
import org.apache.commons.logging.LogFactory;
/**
- * Creates an ORM class to represent a table from a database
+ * Creates an ORM class to represent a table from a database.
*/
public class ClassWriter {
@@ -209,12 +208,13 @@ public static String toIdentifier(String candidate) {
/**
* @param javaType
- * @return the name of the method of JdbcWritableBridge to read an entry with a given java type.
+ * @return the name of the method of JdbcWritableBridge to read an entry
+ * with a given java type.
*/
private String dbGetterForType(String javaType) {
// All Class-based types (e.g., java.math.BigDecimal) are handled with
- // "readBar" where some.package.foo.Bar is the canonical class name.
- // Turn the javaType string into the getter type string.
+ // "readBar" where some.package.foo.Bar is the canonical class name. Turn
+ // the javaType string into the getter type string.
String [] parts = javaType.split("\\.");
if (parts.length == 0) {
@@ -224,18 +224,21 @@ private String dbGetterForType(String javaType) {
String lastPart = parts[parts.length - 1];
try {
- String getter = "read" + Character.toUpperCase(lastPart.charAt(0)) + lastPart.substring(1);
+ String getter = "read" + Character.toUpperCase(lastPart.charAt(0))
+ + lastPart.substring(1);
return getter;
} catch (StringIndexOutOfBoundsException oob) {
// lastPart.*() doesn't work on empty strings.
- LOG.error("Could not infer JdbcWritableBridge getter for Java type " + javaType);
+ LOG.error("Could not infer JdbcWritableBridge getter for Java type "
+ + javaType);
return null;
}
}
/**
* @param javaType
- * @return the name of the method of JdbcWritableBridge to write an entry with a given java type.
+ * @return the name of the method of JdbcWritableBridge to write an entry
+ * with a given java type.
*/
private String dbSetterForType(String javaType) {
// TODO(aaron): Lots of unit tests needed here.
@@ -249,11 +252,13 @@ private String dbSetterForType(String javaType) {
String lastPart = parts[parts.length - 1];
try {
- String setter = "write" + Character.toUpperCase(lastPart.charAt(0)) + lastPart.substring(1);
+ String setter = "write" + Character.toUpperCase(lastPart.charAt(0))
+ + lastPart.substring(1);
return setter;
} catch (StringIndexOutOfBoundsException oob) {
// lastPart.*() doesn't work on empty strings.
- LOG.error("Could not infer PreparedStatement setter for Java type " + javaType);
+ LOG.error("Could not infer PreparedStatement setter for Java type "
+ + javaType);
return null;
}
}
@@ -262,7 +267,7 @@ private String stringifierForType(String javaType, String colName) {
if (javaType.equals("String")) {
return colName;
} else {
- // this is an object type -- just call its toString() in a null-safe way.
+ // This is an object type -- just call its toString() in a null-safe way.
return "\"\" + " + colName;
}
}
@@ -271,36 +276,49 @@ private String stringifierForType(String javaType, String colName) {
* @param javaType the type to read
* @param inputObj the name of the DataInput to read from
* @param colName the column name to read
- * @return the line of code involving a DataInput object to read an entry with a given java type.
+ * @return the line of code involving a DataInput object to read an entry
+ * with a given java type.
*/
- private String rpcGetterForType(String javaType, String inputObj, String colName) {
+ private String rpcGetterForType(String javaType, String inputObj,
+ String colName) {
if (javaType.equals("Integer")) {
- return " this." + colName + " = Integer.valueOf(" + inputObj + ".readInt());\n";
+ return " this." + colName + " = Integer.valueOf(" + inputObj
+ + ".readInt());\n";
} else if (javaType.equals("Long")) {
- return " this." + colName + " = Long.valueOf(" + inputObj + ".readLong());\n";
+ return " this." + colName + " = Long.valueOf(" + inputObj
+ + ".readLong());\n";
} else if (javaType.equals("Float")) {
- return " this." + colName + " = Float.valueOf(" + inputObj + ".readFloat());\n";
+ return " this." + colName + " = Float.valueOf(" + inputObj
+ + ".readFloat());\n";
} else if (javaType.equals("Double")) {
- return " this." + colName + " = Double.valueOf(" + inputObj + ".readDouble());\n";
+ return " this." + colName + " = Double.valueOf(" + inputObj
+ + ".readDouble());\n";
} else if (javaType.equals("Boolean")) {
- return " this." + colName + " = Boolean.valueOf(" + inputObj + ".readBoolean());\n";
+ return " this." + colName + " = Boolean.valueOf(" + inputObj
+ + ".readBoolean());\n";
} else if (javaType.equals("String")) {
return " this." + colName + " = Text.readString(" + inputObj + ");\n";
} else if (javaType.equals("java.sql.Date")) {
- return " this." + colName + " = new Date(" + inputObj + ".readLong());\n";
+ return " this." + colName + " = new Date(" + inputObj
+ + ".readLong());\n";
} else if (javaType.equals("java.sql.Time")) {
- return " this." + colName + " = new Time(" + inputObj + ".readLong());\n";
+ return " this." + colName + " = new Time(" + inputObj
+ + ".readLong());\n";
} else if (javaType.equals("java.sql.Timestamp")) {
- return " this." + colName + " = new Timestamp(" + inputObj + ".readLong());\n"
- + " this." + colName + ".setNanos(" + inputObj + ".readInt());\n";
+ return " this." + colName + " = new Timestamp(" + inputObj
+ + ".readLong());\n" + " this." + colName + ".setNanos("
+ + inputObj + ".readInt());\n";
} else if (javaType.equals("java.math.BigDecimal")) {
- return " this." + colName + " = " + BigDecimalSerializer.class.getCanonicalName()
+ return " this." + colName + " = "
+ + BigDecimalSerializer.class.getCanonicalName()
+ ".readFields(" + inputObj + ");\n";
} else if (javaType.equals(ClobRef.class.getName())) {
- return " this." + colName + " = " + LobSerializer.class.getCanonicalName()
+ return " this." + colName + " = "
+ + LobSerializer.class.getCanonicalName()
+ ".readClobFields(" + inputObj + ");\n";
} else if (javaType.equals(BlobRef.class.getName())) {
- return " this." + colName + " = " + LobSerializer.class.getCanonicalName()
+ return " this." + colName + " = "
+ + LobSerializer.class.getCanonicalName()
+ ".readBlobFields(" + inputObj + ");\n";
} else if (javaType.equals(BytesWritable.class.getName())) {
return " this." + colName + " = new BytesWritable();\n"
@@ -312,13 +330,14 @@ private String rpcGetterForType(String javaType, String inputObj, String colName
}
/**
- * Deserialize a possibly-null value from the DataInput stream
+ * Deserialize a possibly-null value from the DataInput stream.
* @param javaType name of the type to deserialize if it's not null.
* @param inputObj name of the DataInput to read from
* @param colName the column name to read.
* @return
*/
- private String rpcGetterForMaybeNull(String javaType, String inputObj, String colName) {
+ private String rpcGetterForMaybeNull(String javaType, String inputObj,
+ String colName) {
return " if (" + inputObj + ".readBoolean()) { \n"
+ " this." + colName + " = null;\n"
+ " } else {\n"
@@ -330,10 +349,11 @@ private String rpcGetterForMaybeNull(String javaType, String inputObj, String co
* @param javaType the type to write
* @param inputObj the name of the DataOutput to write to
* @param colName the column name to write
- * @return the line of code involving a DataOutput object to write an entry with
- * a given java type.
+ * @return the line of code involving a DataOutput object to write an entry
+ * with a given java type.
*/
- private String rpcSetterForType(String javaType, String outputObj, String colName) {
+ private String rpcSetterForType(String javaType, String outputObj,
+ String colName) {
if (javaType.equals("Integer")) {
return " " + outputObj + ".writeInt(this." + colName + ");\n";
} else if (javaType.equals("Long")) {
@@ -347,12 +367,15 @@ private String rpcSetterForType(String javaType, String outputObj, String colNam
} else if (javaType.equals("String")) {
return " Text.writeString(" + outputObj + ", " + colName + ");\n";
} else if (javaType.equals("java.sql.Date")) {
- return " " + outputObj + ".writeLong(this." + colName + ".getTime());\n";
+ return " " + outputObj + ".writeLong(this." + colName
+ + ".getTime());\n";
} else if (javaType.equals("java.sql.Time")) {
- return " " + outputObj + ".writeLong(this." + colName + ".getTime());\n";
+ return " " + outputObj + ".writeLong(this." + colName
+ + ".getTime());\n";
} else if (javaType.equals("java.sql.Timestamp")) {
- return " " + outputObj + ".writeLong(this." + colName + ".getTime());\n"
- + " " + outputObj + ".writeInt(this." + colName + ".getNanos());\n";
+ return " " + outputObj + ".writeLong(this." + colName
+ + ".getTime());\n" + " " + outputObj + ".writeInt(this." + colName
+ + ".getNanos());\n";
} else if (javaType.equals(BytesWritable.class.getName())) {
return " this." + colName + ".write(" + outputObj + ");\n";
} else if (javaType.equals("java.math.BigDecimal")) {
@@ -378,7 +401,8 @@ private String rpcSetterForType(String javaType, String outputObj, String colNam
* @param colName the column name to read.
* @return
*/
- private String rpcSetterForMaybeNull(String javaType, String outputObj, String colName) {
+ private String rpcSetterForMaybeNull(String javaType, String outputObj,
+ String colName) {
return " if (null == this." + colName + ") { \n"
+ " " + outputObj + ".writeBoolean(true);\n"
+ " } else {\n"
@@ -388,13 +412,13 @@ private String rpcSetterForMaybeNull(String javaType, String outputObj, String c
}
/**
- * Generate a member field and getter method for each column
+ * Generate a member field and getter method for each column.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateFields(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateFields(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
for (String col : colNames) {
int sqlType = columnTypes.get(col);
@@ -412,15 +436,16 @@ private void generateFields(Map columnTypes, String [] colNames
}
/**
- * Generate the readFields() method used by the database
+ * Generate the readFields() method used by the database.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateDbRead(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateDbRead(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
- sb.append(" public void readFields(ResultSet __dbResults) throws SQLException {\n");
+ sb.append(" public void readFields(ResultSet __dbResults) ");
+ sb.append("throws SQLException {\n");
// Save ResultSet object cursor for use in LargeObjectLoader
// if necessary.
@@ -462,7 +487,8 @@ private void generateLoadLargeObjects(Map columnTypes,
// readFields() method generated by generateDbRead().
sb.append(" public void loadLargeObjects(LargeObjectLoader __loader)\n");
- sb.append(" throws SQLException, IOException, InterruptedException {\n");
+ sb.append(" throws SQLException, IOException, ");
+ sb.append("InterruptedException {\n");
int fieldNum = 0;
@@ -479,9 +505,9 @@ private void generateLoadLargeObjects(Map columnTypes,
String getterMethod = dbGetterForType(javaType);
if ("readClobRef".equals(getterMethod)
|| "readBlobRef".equals(getterMethod)) {
- // This field is a blob/clob field with delayed loading.
- // Call the appropriate LargeObjectLoader method (which has the
- // same name as a JdbcWritableBridge method).
+ // This field is a blob/clob field with delayed loading. Call the
+ // appropriate LargeObjectLoader method (which has the same name as a
+ // JdbcWritableBridge method).
sb.append(" this." + col + " = __loader." + getterMethod
+ "(" + fieldNum + ", this.__cur_result_set);\n");
}
@@ -491,19 +517,21 @@ private void generateLoadLargeObjects(Map columnTypes,
/**
- * Generate the write() method used by the database
+ * Generate the write() method used by the database.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateDbWrite(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateDbWrite(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
- sb.append(" public void write(PreparedStatement __dbStmt) throws SQLException {\n");
+ sb.append(" public void write(PreparedStatement __dbStmt) "
+ + "throws SQLException {\n");
sb.append(" write(__dbStmt, 0);\n");
sb.append(" }\n\n");
- sb.append(" public int write(PreparedStatement __dbStmt, int __off) throws SQLException {\n");
+ sb.append(" public int write(PreparedStatement __dbStmt, int __off) "
+ + "throws SQLException {\n");
int fieldNum = 0;
@@ -533,15 +561,16 @@ private void generateDbWrite(Map columnTypes, String [] colName
/**
- * Generate the readFields() method used by the Hadoop RPC system
+ * Generate the readFields() method used by the Hadoop RPC system.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateHadoopRead(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateHadoopRead(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
- sb.append(" public void readFields(DataInput __dataIn) throws IOException {\n");
+ sb.append(" public void readFields(DataInput __dataIn) "
+ + "throws IOException {\n");
for (String col : colNames) {
int sqlType = columnTypes.get(col);
@@ -610,27 +639,27 @@ private void generateCloneMethod(Map columnTypes,
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateToString(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateToString(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
// Embed the delimiters into the class, as characters...
- sb.append(" private static final char __OUTPUT_FIELD_DELIM_CHAR = " +
+ sb.append(" private static final char __OUTPUT_FIELD_DELIM_CHAR = "
+ (int)options.getOutputFieldDelim() + ";\n");
- sb.append(" private static final char __OUTPUT_RECORD_DELIM_CHAR = "
+ sb.append(" private static final char __OUTPUT_RECORD_DELIM_CHAR = "
+ (int)options.getOutputRecordDelim() + ";\n");
// as strings...
- sb.append(" private static final String __OUTPUT_FIELD_DELIM = \"\" + (char) "
- + (int) options.getOutputFieldDelim() + ";\n");
- sb.append(" private static final String __OUTPUT_RECORD_DELIM = \"\" + (char) "
- + (int) options.getOutputRecordDelim() + ";\n");
- sb.append(" private static final String __OUTPUT_ENCLOSED_BY = \"\" + (char) "
- + (int) options.getOutputEnclosedBy() + ";\n");
- sb.append(" private static final String __OUTPUT_ESCAPED_BY = \"\" + (char) "
- + (int) options.getOutputEscapedBy() + ";\n");
+ sb.append(" private static final String __OUTPUT_FIELD_DELIM = "
+ + "\"\" + (char) " + (int) options.getOutputFieldDelim() + ";\n");
+ sb.append(" private static final String __OUTPUT_RECORD_DELIM = "
+ + "\"\" + (char) " + (int) options.getOutputRecordDelim() + ";\n");
+ sb.append(" private static final String __OUTPUT_ENCLOSED_BY = "
+ + "\"\" + (char) " + (int) options.getOutputEnclosedBy() + ";\n");
+ sb.append(" private static final String __OUTPUT_ESCAPED_BY = "
+ + "\"\" + (char) " + (int) options.getOutputEscapedBy() + ";\n");
// and some more options.
- sb.append(" private static final boolean __OUTPUT_ENCLOSE_REQUIRED = "
+ sb.append(" private static final boolean __OUTPUT_ENCLOSE_REQUIRED = "
+ options.isOutputEncloseRequired() + ";\n");
sb.append(" private static final char [] __OUTPUT_DELIMITER_LIST = { "
+ "__OUTPUT_FIELD_DELIM_CHAR, __OUTPUT_RECORD_DELIM_CHAR };\n\n");
@@ -662,8 +691,8 @@ private void generateToString(Map columnTypes, String [] colNam
}
sb.append(" __sb.append(FieldFormatter.escapeAndEnclose(" + stringExpr
- + ", __OUTPUT_ESCAPED_BY, __OUTPUT_ENCLOSED_BY, __OUTPUT_DELIMITER_LIST, "
- + "__OUTPUT_ENCLOSE_REQUIRED));\n");
+ + ", __OUTPUT_ESCAPED_BY, __OUTPUT_ENCLOSED_BY, "
+ + "__OUTPUT_DELIMITER_LIST, __OUTPUT_ENCLOSE_REQUIRED));\n");
}
@@ -675,17 +704,21 @@ private void generateToString(Map columnTypes, String [] colNam
/**
- * Helper method for generateParser(). Writes out the parse() method for one particular
- * type we support as an input string-ish type.
+ * Helper method for generateParser(). Writes out the parse() method for one
+ * particular type we support as an input string-ish type.
*/
private void generateParseMethod(String typ, StringBuilder sb) {
- sb.append(" public void parse(" + typ + " __record) throws RecordParser.ParseError {\n");
+ sb.append(" public void parse(" + typ + " __record) "
+ + "throws RecordParser.ParseError {\n");
sb.append(" if (null == this.__parser) {\n");
- sb.append(" this.__parser = new RecordParser(__INPUT_FIELD_DELIM_CHAR, ");
- sb.append("__INPUT_RECORD_DELIM_CHAR, __INPUT_ENCLOSED_BY_CHAR, __INPUT_ESCAPED_BY_CHAR, ");
+ sb.append(" this.__parser = new RecordParser("
+ + "__INPUT_FIELD_DELIM_CHAR, ");
+ sb.append("__INPUT_RECORD_DELIM_CHAR, __INPUT_ENCLOSED_BY_CHAR, "
+ + "__INPUT_ESCAPED_BY_CHAR, ");
sb.append("__INPUT_ENCLOSE_REQUIRED);\n");
sb.append(" }\n");
- sb.append(" List __fields = this.__parser.parseRecord(__record);\n");
+ sb.append(" List __fields = "
+ + "this.__parser.parseRecord(__record);\n");
sb.append(" __loadFromFields(__fields);\n");
sb.append(" }\n\n");
}
@@ -701,18 +734,20 @@ private void parseNullVal(String colName, StringBuilder sb) {
}
/**
- * Helper method for generateParser(). Generates the code that loads one field of
- * a specified name and type from the next element of the field strings list.
+ * Helper method for generateParser(). Generates the code that loads one
+ * field of a specified name and type from the next element of the field
+ * strings list.
*/
private void parseColumn(String colName, int colType, StringBuilder sb) {
- // assume that we have __it and __cur_str vars, based on __loadFromFields() code.
+ // assume that we have __it and __cur_str vars, based on
+ // __loadFromFields() code.
sb.append(" __cur_str = __it.next();\n");
String javaType = connManager.toJavaType(colType);
parseNullVal(colName, sb);
if (javaType.equals("String")) {
- // TODO(aaron): Distinguish between 'null' and null. Currently they both set the
- // actual object to null.
+ // TODO(aaron): Distinguish between 'null' and null. Currently they both
+ // set the actual object to null.
sb.append(" this." + colName + " = __cur_str;\n");
} else if (javaType.equals("Integer")) {
sb.append(" this." + colName + " = Integer.valueOf(__cur_str);\n");
@@ -725,13 +760,17 @@ private void parseColumn(String colName, int colType, StringBuilder sb) {
} else if (javaType.equals("Boolean")) {
sb.append(" this." + colName + " = Boolean.valueOf(__cur_str);\n");
} else if (javaType.equals("java.sql.Date")) {
- sb.append(" this." + colName + " = java.sql.Date.valueOf(__cur_str);\n");
+ sb.append(" this." + colName
+ + " = java.sql.Date.valueOf(__cur_str);\n");
} else if (javaType.equals("java.sql.Time")) {
- sb.append(" this." + colName + " = java.sql.Time.valueOf(__cur_str);\n");
+ sb.append(" this." + colName
+ + " = java.sql.Time.valueOf(__cur_str);\n");
} else if (javaType.equals("java.sql.Timestamp")) {
- sb.append(" this." + colName + " = java.sql.Timestamp.valueOf(__cur_str);\n");
+ sb.append(" this." + colName
+ + " = java.sql.Timestamp.valueOf(__cur_str);\n");
} else if (javaType.equals("java.math.BigDecimal")) {
- sb.append(" this." + colName + " = new java.math.BigDecimal(__cur_str);\n");
+ sb.append(" this." + colName
+ + " = new java.math.BigDecimal(__cur_str);\n");
} else if (javaType.equals(ClobRef.class.getName())) {
sb.append(" this." + colName + " = ClobRef.parse(__cur_str);\n");
} else if (javaType.equals(BlobRef.class.getName())) {
@@ -744,18 +783,19 @@ private void parseColumn(String colName, int colType, StringBuilder sb) {
}
/**
- * Generate the parse() method
+ * Generate the parse() method.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateParser(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateParser(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
- // Embed into the class the delimiter characters to use when parsing input records.
- // Note that these can differ from the delims to use as output via toString(), if
- // the user wants to use this class to convert one format to another.
- sb.append(" private static final char __INPUT_FIELD_DELIM_CHAR = " +
+ // Embed into the class the delimiter characters to use when parsing input
+ // records. Note that these can differ from the delims to use as output
+ // via toString(), if the user wants to use this class to convert one
+ // format to another.
+ sb.append(" private static final char __INPUT_FIELD_DELIM_CHAR = "
+ (int)options.getInputFieldDelim() + ";\n");
sb.append(" private static final char __INPUT_RECORD_DELIM_CHAR = "
+ (int)options.getInputRecordDelim() + ";\n");
@@ -778,9 +818,9 @@ private void generateParser(Map columnTypes, String [] colNames
generateParseMethod("ByteBuffer", sb);
generateParseMethod("CharBuffer", sb);
- // The wrapper methods call __loadFromFields() to actually interpret the raw
- // field data as string, int, boolean, etc. The generation of this method is
- // type-dependent for the fields.
+ // The wrapper methods call __loadFromFields() to actually interpret the
+ // raw field data as string, int, boolean, etc. The generation of this
+ // method is type-dependent for the fields.
sb.append(" private void __loadFromFields(List fields) {\n");
sb.append(" Iterator __it = fields.listIterator();\n");
sb.append(" String __cur_str;\n");
@@ -792,15 +832,16 @@ private void generateParser(Map columnTypes, String [] colNames
}
/**
- * Generate the write() method used by the Hadoop RPC system
+ * Generate the write() method used by the Hadoop RPC system.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @param sb - StringBuilder to append code to
*/
- private void generateHadoopWrite(Map columnTypes, String [] colNames,
- StringBuilder sb) {
+ private void generateHadoopWrite(Map columnTypes,
+ String [] colNames, StringBuilder sb) {
- sb.append(" public void write(DataOutput __dataOut) throws IOException {\n");
+ sb.append(" public void write(DataOutput __dataOut) "
+ + "throws IOException {\n");
for (String col : colNames) {
int sqlType = columnTypes.get(col);
@@ -840,21 +881,22 @@ public void generate() throws IOException {
String identifier = toIdentifier(col);
cleanedColNames[i] = identifier;
- // make sure the col->type mapping holds for the
+ // Make sure the col->type mapping holds for the
// new identifier name, too.
columnTypes.put(identifier, columnTypes.get(col));
}
- // Generate the Java code
+ // Generate the Java code.
StringBuilder sb = generateClassForColumns(columnTypes, cleanedColNames);
// Write this out to a file.
String codeOutDir = options.getCodeOutputDir();
- // Get the class name to generate, which includes package components
+ // Get the class name to generate, which includes package components.
String className = new TableClassName(options).getClassForTable(tableName);
- // convert the '.' characters to '/' characters
- String sourceFilename = className.replace('.', File.separatorChar) + ".java";
+ // Convert the '.' characters to '/' characters.
+ String sourceFilename = className.replace('.', File.separatorChar)
+ + ".java";
String filename = codeOutDir + sourceFilename;
if (LOG.isDebugEnabled()) {
@@ -908,7 +950,7 @@ public void generate() throws IOException {
}
/**
- * Generate the ORM code for a table object containing the named columns
+ * Generate the ORM code for a table object containing the named columns.
* @param columnTypes - mapping from column names to sql types
* @param colNames - ordered list of column names for table.
* @return - A StringBuilder that contains the text of the class code.
@@ -917,7 +959,8 @@ public StringBuilder generateClassForColumns(Map columnTypes,
String [] colNames) {
StringBuilder sb = new StringBuilder();
sb.append("// ORM class for " + tableName + "\n");
- sb.append("// WARNING: This class is AUTO-GENERATED. Modify at your own risk.\n");
+ sb.append("// WARNING: This class is AUTO-GENERATED. "
+ + "Modify at your own risk.\n");
TableClassName tableNameInfo = new TableClassName(options);
@@ -958,7 +1001,8 @@ public StringBuilder generateClassForColumns(Map columnTypes,
String className = tableNameInfo.getShortClassForTable(tableName);
sb.append("public class " + className
+ " implements DBWritable, SqoopRecord, Writable {\n");
- sb.append(" public static final int PROTOCOL_VERSION = " + CLASS_WRITER_VERSION + ";\n");
+ sb.append(" public static final int PROTOCOL_VERSION = "
+ + CLASS_WRITER_VERSION + ";\n");
sb.append(" protected ResultSet __cur_result_set;\n");
generateFields(columnTypes, colNames, sb);
generateDbRead(columnTypes, colNames, sb);
@@ -970,7 +1014,8 @@ public StringBuilder generateClassForColumns(Map columnTypes,
generateParser(columnTypes, colNames, sb);
generateCloneMethod(columnTypes, colNames, sb);
- // TODO(aaron): Generate hashCode(), compareTo(), equals() so it can be a WritableComparable
+ // TODO(aaron): Generate hashCode(), compareTo(), equals() so it can be a
+ // WritableComparable
sb.append("}\n");
diff --git a/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java b/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java
index e69a6ea2..c7ebd700 100644
--- a/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java
+++ b/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java
@@ -38,7 +38,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.sqoop.SqoopOptions;
@@ -49,15 +48,13 @@
* Manages the compilation of a bunch of .java files into .class files
* and eventually a jar.
*
- * Also embeds this program's jar into the lib/ directory inside the compiled jar
- * to ensure that the job runs correctly.
- *
- *
- *
+ * Also embeds this program's jar into the lib/ directory inside the compiled
+ * jar to ensure that the job runs correctly.
*/
public class CompilationManager {
- public static final Log LOG = LogFactory.getLog(CompilationManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ CompilationManager.class.getName());
private SqoopOptions options;
private List sources;
@@ -98,7 +95,8 @@ private String findHadoopCoreJar() {
}
for (File f : entries) {
- if (f.getName().startsWith("hadoop-") && f.getName().endsWith("-core.jar")) {
+ if (f.getName().startsWith("hadoop-")
+ && f.getName().endsWith("-core.jar")) {
LOG.info("Found hadoop core jar at: " + f.getAbsolutePath());
return f.getAbsolutePath();
}
@@ -128,9 +126,9 @@ public void compile() throws IOException {
// find hadoop-*-core.jar for classpath.
String coreJar = findHadoopCoreJar();
if (null == coreJar) {
- // Couldn't find a core jar to insert into the CP for compilation.
- // If, however, we're running this from a unit test, then the path
- // to the .class files might be set via the hadoop.alt.classpath property
+ // Couldn't find a core jar to insert into the CP for compilation. If,
+ // however, we're running this from a unit test, then the path to the
+ // .class files might be set via the hadoop.alt.classpath property
// instead. Check there first.
String coreClassesPath = System.getProperty("hadoop.alt.classpath");
if (null == coreClassesPath) {
@@ -200,7 +198,7 @@ public void compile() throws IOException {
}
/**
- * @return the complete filename of the .jar file to generate */
+ * @return the complete filename of the .jar file to generate. */
public String getJarFilename() {
String jarOutDir = options.getJarOutputDir();
String tableName = options.getTableName();
@@ -235,11 +233,11 @@ private void addClassFilesFromDir(File dir, JarOutputStream jstream)
baseDirName = baseDirName + File.separator;
}
- // for each input class file, create a zipfile entry for it,
+ // For each input class file, create a zipfile entry for it,
// read the file into a buffer, and write it to the jar file.
for (File entry : dirEntries) {
if (!entry.isDirectory()) {
- // chomp off the portion of the full path that is shared
+ // Chomp off the portion of the full path that is shared
// with the base directory where class files were put;
// we only record the subdir parts in the zip entry.
String fullPath = entry.getAbsolutePath();
@@ -247,7 +245,8 @@ private void addClassFilesFromDir(File dir, JarOutputStream jstream)
boolean include = chompedPath.endsWith(".class")
&& sources.contains(
- chompedPath.substring(0, chompedPath.length() - ".class".length()) + ".java");
+ chompedPath.substring(0, chompedPath.length() - ".class".length())
+ + ".java");
if (include) {
// include this file.
@@ -262,7 +261,7 @@ private void addClassFilesFromDir(File dir, JarOutputStream jstream)
}
/**
- * Create an output jar file to use when executing MapReduce jobs
+ * Create an output jar file to use when executing MapReduce jobs.
*/
public void jar() throws IOException {
String jarOutDir = options.getJarOutputDir();
@@ -293,7 +292,8 @@ public void jar() throws IOException {
addLibJar(thisJarFile, jstream);
} else {
// couldn't find our own jar (we were running from .class files?)
- LOG.warn("Could not find jar for Sqoop; MapReduce jobs may not run correctly.");
+ LOG.warn("Could not find jar for Sqoop; "
+ + "MapReduce jobs may not run correctly.");
}
String shimJarFile = findShimJar();
@@ -347,12 +347,13 @@ private void addLibJar(String jarFilename, JarOutputStream jstream)
private static final int BUFFER_SZ = 4096;
/**
- * utility method to copy a .class file into the jar stream.
+ * Utility method to copy a .class file into the jar stream.
* @param f
* @param ostream
* @throws IOException
*/
- private void copyFileToStream(File f, OutputStream ostream) throws IOException {
+ private void copyFileToStream(File f, OutputStream ostream)
+ throws IOException {
FileInputStream fis = new FileInputStream(f);
byte [] buffer = new byte[BUFFER_SZ];
try {
@@ -381,7 +382,7 @@ private String findShimJar() {
return findJarForClass(h.getClass());
}
- // method mostly cloned from o.a.h.mapred.JobConf.findContainingJar()
+ // Method mostly cloned from o.a.h.mapred.JobConf.findContainingJar().
private String findJarForClass(Class extends Object> classObj) {
ClassLoader loader = classObj.getClassLoader();
String classFile = classObj.getName().replaceAll("\\.", "/") + ".class";
diff --git a/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java b/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java
index f0f7434a..abed91c7 100644
--- a/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java
+++ b/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java
@@ -25,18 +25,20 @@
/**
* Reconciles the table name being imported with the class naming information
- * specified in SqoopOptions to determine the actual package and class name
- * to use for a table.
+ * specified in SqoopOptions to determine the actual package and class name to
+ * use for a table.
*/
public class TableClassName {
- public static final Log LOG = LogFactory.getLog(TableClassName.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TableClassName.class.getName());
private final SqoopOptions options;
public TableClassName(final SqoopOptions opts) {
if (null == opts) {
- throw new NullPointerException("Cannot instantiate a TableClassName on null options.");
+ throw new NullPointerException(
+ "Cannot instantiate a TableClassName on null options.");
} else {
this.options = opts;
}
@@ -47,18 +49,19 @@ public TableClassName(final SqoopOptions opts) {
* package-part which will be used for a class. The actual table name being
* generated-for is irrelevant; so not an argument.
*
- * @return the package where generated ORM classes go. Will be null for top-level.
+ * @return the package where generated ORM classes go. Will be null for
+ * top-level.
*/
public String getPackageForTable() {
String predefinedClass = options.getClassName();
if (null != predefinedClass) {
- // if the predefined classname contains a package-part, return that.
+ // If the predefined classname contains a package-part, return that.
int lastDot = predefinedClass.lastIndexOf('.');
if (-1 == lastDot) {
- // no package part.
+ // No package part.
return null;
} else {
- // return the string up to but not including the last dot.
+ // Return the string up to but not including the last dot.
return predefinedClass.substring(0, lastDot);
}
} else {
@@ -69,8 +72,8 @@ public String getPackageForTable() {
}
/**
- * @param tableName the name of the table being imported
- * @return the full name of the class to generate/use to import a table
+ * @param tableName the name of the table being imported.
+ * @return the full name of the class to generate/use to import a table.
*/
public String getClassForTable(String tableName) {
if (null == tableName) {
@@ -95,7 +98,8 @@ public String getClassForTable(String tableName) {
}
/**
- * @return just the last spegment of the class name -- all package info stripped.
+ * @return just the last spegment of the class name -- all package info
+ * stripped.
*/
public String getShortClassForTable(String tableName) {
String fullClass = getClassForTable(tableName);
diff --git a/src/java/org/apache/hadoop/sqoop/shims/HadoopShim.java b/src/java/org/apache/hadoop/sqoop/shims/HadoopShim.java
index 655fcad6..599eb2a1 100644
--- a/src/java/org/apache/hadoop/sqoop/shims/HadoopShim.java
+++ b/src/java/org/apache/hadoop/sqoop/shims/HadoopShim.java
@@ -77,7 +77,8 @@ public abstract long getNumMapInputRecords(Job job)
/**
* Set the mapper speculative execution property for a job.
*/
- public abstract void setJobMapSpeculativeExecution(Job job, boolean isEnabled);
+ public abstract void setJobMapSpeculativeExecution(Job job,
+ boolean isEnabled);
/**
* Sets the Jobtracker address to use for a job.
@@ -123,7 +124,7 @@ public abstract long getNumMapInputRecords(Job job)
public abstract MapContext getMapContextForIOPath(
Configuration conf, Path p);
- public final static synchronized HadoopShim get() {
+ public static final synchronized HadoopShim get() {
return ShimLoader.getHadoopShim(null);
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/shims/ShimLoader.java b/src/java/org/apache/hadoop/sqoop/shims/ShimLoader.java
index d86207ae..5e433cb5 100644
--- a/src/java/org/apache/hadoop/sqoop/shims/ShimLoader.java
+++ b/src/java/org/apache/hadoop/sqoop/shims/ShimLoader.java
@@ -174,8 +174,8 @@ private static T loadShim(List matchExprs,
return shim;
} catch (Exception e) {
- throw new RuntimeException("Could not load shim in class " +
- className, e);
+ throw new RuntimeException("Could not load shim in class "
+ + className, e);
}
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java b/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java
index 319c5edc..c99db56f 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java
@@ -52,7 +52,8 @@
*/
public abstract class BaseSqoopTool extends SqoopTool {
- public static final Log LOG = LogFactory.getLog(BaseSqoopTool.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ BaseSqoopTool.class.getName());
public static final String HELP_STR = "\nTry --help for usage instructions.";
@@ -248,7 +249,8 @@ protected RelatedOptions getCommonOptions() {
.hasArg().withDescription("Set authentication password")
.withLongOpt(PASSWORD_ARG)
.create());
- commonOpts.addOption(OptionBuilder.withDescription("Read password from console")
+ commonOpts.addOption(OptionBuilder
+ .withDescription("Read password from console")
.create(PASSWORD_PROMPT_ARG));
commonOpts.addOption(OptionBuilder.withArgName("dir")
@@ -342,10 +344,11 @@ protected RelatedOptions getOutputFormatOptions() {
}
/**
- * @return options governing input format delimiters
+ * @return options governing input format delimiters.
*/
protected RelatedOptions getInputFormatOptions() {
- RelatedOptions inputFormatOpts = new RelatedOptions("Input parsing arguments");
+ RelatedOptions inputFormatOpts =
+ new RelatedOptions("Input parsing arguments");
inputFormatOpts.addOption(OptionBuilder.withArgName("char")
.hasArg()
.withDescription("Sets the input field separator")
@@ -380,7 +383,8 @@ protected RelatedOptions getInputFormatOptions() {
* @return options related to code generation.
*/
protected RelatedOptions getCodeGenOpts(boolean multiTable) {
- RelatedOptions codeGenOpts = new RelatedOptions("Code generation arguments");
+ RelatedOptions codeGenOpts =
+ new RelatedOptions("Code generation arguments");
codeGenOpts.addOption(OptionBuilder.withArgName("dir")
.hasArg()
.withDescription("Output directory for generated code")
diff --git a/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java b/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java
index 8201d2d4..d19d93aa 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java
@@ -62,7 +62,7 @@ public List getGeneratedJarFiles() {
}
/**
- * Generate the .class and .jar files
+ * Generate the .class and .jar files.
* @return the filename of the emitted jar file.
* @throws IOException
*/
diff --git a/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java b/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java
index 4ed3e897..09afa44a 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java
@@ -45,7 +45,6 @@
import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.shims.ShimLoader;
-import org.apache.hadoop.sqoop.tool.SqoopTool;
import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException;
@@ -77,7 +76,8 @@ private void exportTable(SqoopOptions options, String tableName)
// Generate the ORM code for the tables.
jarFile = codeGenerator.generateORM(options, tableName);
- ExportJobContext context = new ExportJobContext(tableName, jarFile, options);
+ ExportJobContext context = new ExportJobContext(tableName, jarFile,
+ options);
manager.exportTable(context);
}
@@ -94,7 +94,8 @@ public int run(SqoopOptions options) {
try {
exportTable(options, options.getTableName());
} catch (IOException ioe) {
- LOG.error("Encountered IOException running export job: " + ioe.toString());
+ LOG.error("Encountered IOException running export job: "
+ + ioe.toString());
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
throw new RuntimeException(ioe);
} else {
diff --git a/src/java/org/apache/hadoop/sqoop/tool/HelpTool.java b/src/java/org/apache/hadoop/sqoop/tool/HelpTool.java
index c2cfe64d..1bab3dca 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/HelpTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/HelpTool.java
@@ -21,7 +21,6 @@
import java.util.Set;
import org.apache.hadoop.sqoop.SqoopOptions;
-import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions;
/**
diff --git a/src/java/org/apache/hadoop/sqoop/tool/ImportAllTablesTool.java b/src/java/org/apache/hadoop/sqoop/tool/ImportAllTablesTool.java
index f1dbb883..acad062f 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/ImportAllTablesTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/ImportAllTablesTool.java
@@ -19,32 +19,13 @@
package org.apache.hadoop.sqoop.tool;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.sqoop.Sqoop;
import org.apache.hadoop.sqoop.SqoopOptions;
-import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
import org.apache.hadoop.sqoop.hive.HiveImport;
-import org.apache.hadoop.sqoop.manager.ConnManager;
-import org.apache.hadoop.sqoop.manager.ExportJobContext;
-import org.apache.hadoop.sqoop.manager.ImportJobContext;
-import org.apache.hadoop.sqoop.orm.ClassWriter;
-import org.apache.hadoop.sqoop.orm.CompilationManager;
-import org.apache.hadoop.sqoop.shims.ShimLoader;
-import org.apache.hadoop.sqoop.tool.SqoopTool;
-import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException;
/**
@@ -84,7 +65,8 @@ public int run(SqoopOptions options) {
}
}
} catch (IOException ioe) {
- LOG.error("Encountered IOException running import job: " + ioe.toString());
+ LOG.error("Encountered IOException running import job: "
+ + ioe.toString());
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
throw new RuntimeException(ioe);
} else {
diff --git a/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java b/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java
index 0e0a9c78..9bb79f9c 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java
@@ -84,7 +84,8 @@ protected void importTable(SqoopOptions options, String tableName,
jarFile = codeGenerator.generateORM(options, tableName);
// Do the actual import.
- ImportJobContext context = new ImportJobContext(tableName, jarFile, options);
+ ImportJobContext context = new ImportJobContext(tableName, jarFile,
+ options);
manager.importTable(context);
// If the user wants this table to be in Hive, perform that post-load.
@@ -120,7 +121,8 @@ public int run(SqoopOptions options) {
// Import a single table the user specified.
importTable(options, options.getTableName(), hiveImport);
} catch (IOException ioe) {
- LOG.error("Encountered IOException running import job: " + ioe.toString());
+ LOG.error("Encountered IOException running import job: "
+ + ioe.toString());
if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
throw new RuntimeException(ioe);
} else {
diff --git a/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java b/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java
index b806494c..d2d6f715 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java
@@ -28,7 +28,7 @@
import org.apache.hadoop.sqoop.cli.ToolOptions;
/**
- * Tool that lists available databases on a server
+ * Tool that lists available databases on a server.
*/
public class ListDatabasesTool extends BaseSqoopTool {
diff --git a/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java b/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java
index b554e3f4..c7cdd7d2 100644
--- a/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java
+++ b/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java
@@ -83,7 +83,7 @@ public abstract class SqoopTool {
* @param cls the class providing the tool.
* @param description a user-friendly description of the tool's function.
*/
- private static final void registerTool(String toolName,
+ private static void registerTool(String toolName,
Class extends SqoopTool> cls, String description) {
TOOLS.put(toolName, cls);
DESCRIPTIONS.put(toolName, description);
@@ -169,7 +169,8 @@ public void configureOptions(ToolOptions opts) {
* @param opts the configured tool options
*/
public void printHelp(ToolOptions opts) {
- System.out.println("usage: sqoop " + getToolName() + " [GENERIC-ARGS] [TOOL-ARGS]");
+ System.out.println("usage: sqoop " + getToolName()
+ + " [GENERIC-ARGS] [TOOL-ARGS]");
System.out.println("");
opts.printHelp();
diff --git a/src/java/org/apache/hadoop/sqoop/util/AsyncSink.java b/src/java/org/apache/hadoop/sqoop/util/AsyncSink.java
index 1096f778..292a0bdf 100644
--- a/src/java/org/apache/hadoop/sqoop/util/AsyncSink.java
+++ b/src/java/org/apache/hadoop/sqoop/util/AsyncSink.java
@@ -37,7 +37,8 @@ public abstract class AsyncSink {
/**
* Wait until the stream has been processed.
- * @return a status code indicating success or failure. 0 is typical for success.
+ * @return a status code indicating success or failure. 0 is typical for
+ * success.
*/
public abstract int join() throws InterruptedException;
}
diff --git a/src/java/org/apache/hadoop/sqoop/util/ClassLoaderStack.java b/src/java/org/apache/hadoop/sqoop/util/ClassLoaderStack.java
index 7744ef22..fbdff986 100644
--- a/src/java/org/apache/hadoop/sqoop/util/ClassLoaderStack.java
+++ b/src/java/org/apache/hadoop/sqoop/util/ClassLoaderStack.java
@@ -29,19 +29,17 @@
/**
* Allows you to add and remove jar-files from the running JVM by
* instantiating classloaders for them.
- *
- *
- *
*/
public final class ClassLoaderStack {
- public static final Log LOG = LogFactory.getLog(ClassLoaderStack.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ ClassLoaderStack.class.getName());
private ClassLoaderStack() {
}
/**
- * Sets the classloader for the current thread
+ * Sets the classloader for the current thread.
*/
public static void setCurrentClassLoader(ClassLoader cl) {
LOG.debug("Restoring classloader: " + cl.toString());
@@ -49,23 +47,27 @@ public static void setCurrentClassLoader(ClassLoader cl) {
}
/**
- * Adds a ClassLoader to the top of the stack that will load from the Jar file
- * of your choice. Returns the previous classloader so you can restore it
- * if need be, later.
+ * Adds a ClassLoader to the top of the stack that will load from the Jar
+ * file of your choice. Returns the previous classloader so you can restore
+ * it if need be, later.
*
- * @param jarFile The filename of a jar file that you want loaded into this JVM
- * @param testClassName The name of the class to load immediately (optional)
+ * @param jarFile The filename of a jar file that you want loaded into this
+ * JVM.
+ * @param testClassName The name of the class to load immediately
+ * (optional).
*/
public static ClassLoader addJarFile(String jarFile, String testClassName)
throws IOException {
- // load the classes from the ORM JAR file into the current VM
- ClassLoader prevClassLoader = Thread.currentThread().getContextClassLoader();
+ // load the classes from the ORM JAR file into the current VM.
+ ClassLoader prevClassLoader =
+ Thread.currentThread().getContextClassLoader();
String urlPath = "jar:file://" + new File(jarFile).getAbsolutePath() + "!/";
LOG.debug("Attempting to load jar through URL: " + urlPath);
LOG.debug("Previous classloader is " + prevClassLoader);
URL [] jarUrlArray = {new URL(urlPath)};
- URLClassLoader cl = URLClassLoader.newInstance(jarUrlArray, prevClassLoader);
+ URLClassLoader cl = URLClassLoader.newInstance(jarUrlArray,
+ prevClassLoader);
try {
if (null != testClassName) {
// try to load a class from the jar to force loading now.
@@ -74,7 +76,8 @@ public static ClassLoader addJarFile(String jarFile, String testClassName)
}
LOG.debug("Loaded jar into current JVM: " + urlPath);
} catch (ClassNotFoundException cnfe) {
- throw new IOException("Could not load jar " + jarFile + " into JVM. (Could not find class "
+ throw new IOException("Could not load jar " + jarFile
+ + " into JVM. (Could not find class "
+ testClassName + ".)", cnfe);
}
diff --git a/src/java/org/apache/hadoop/sqoop/util/ErrorableAsyncSink.java b/src/java/org/apache/hadoop/sqoop/util/ErrorableAsyncSink.java
index b3000813..ca7e8eb7 100644
--- a/src/java/org/apache/hadoop/sqoop/util/ErrorableAsyncSink.java
+++ b/src/java/org/apache/hadoop/sqoop/util/ErrorableAsyncSink.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.sqoop.util;
-import java.io.InputStream;
-
/**
* Partial implementation of AsyncSink that relies on ErrorableThread to
* provide a status bit for the join() method.
diff --git a/src/java/org/apache/hadoop/sqoop/util/FileListing.java b/src/java/org/apache/hadoop/sqoop/util/FileListing.java
index 4df3c8e6..dbd62b07 100644
--- a/src/java/org/apache/hadoop/sqoop/util/FileListing.java
+++ b/src/java/org/apache/hadoop/sqoop/util/FileListing.java
@@ -61,15 +61,16 @@ public static void main(String... aArgs) throws FileNotFoundException {
*
* @param aStartingDir is a valid directory, which can be read.
*/
- public static List getFileListing(File aStartingDir) throws FileNotFoundException {
+ public static List getFileListing(File aStartingDir)
+ throws FileNotFoundException {
validateDirectory(aStartingDir);
List result = getFileListingNoSort(aStartingDir);
Collections.sort(result);
return result;
}
- // PRIVATE //
- private static List getFileListingNoSort(File aStartingDir) throws FileNotFoundException {
+ private static List getFileListingNoSort(File aStartingDir)
+ throws FileNotFoundException {
List result = new ArrayList();
File[] filesAndDirs = aStartingDir.listFiles();
List filesDirs = Arrays.asList(filesAndDirs);
@@ -88,23 +89,26 @@ private static List getFileListingNoSort(File aStartingDir) throws FileNot
/**
* Directory is valid if it exists, does not represent a file, and can be read.
*/
- private static void validateDirectory(File aDirectory) throws FileNotFoundException {
+ private static void validateDirectory(File aDirectory)
+ throws FileNotFoundException {
if (aDirectory == null) {
throw new IllegalArgumentException("Directory should not be null.");
}
if (!aDirectory.exists()) {
- throw new FileNotFoundException("Directory does not exist: " + aDirectory);
+ throw new FileNotFoundException("Directory does not exist: "
+ + aDirectory);
}
if (!aDirectory.isDirectory()) {
throw new IllegalArgumentException("Is not a directory: " + aDirectory);
}
if (!aDirectory.canRead()) {
- throw new IllegalArgumentException("Directory cannot be read: " + aDirectory);
+ throw new IllegalArgumentException("Directory cannot be read: "
+ + aDirectory);
}
}
/**
- * Recursively delete a directory and all its children
+ * Recursively delete a directory and all its children.
* @param dir is a valid directory.
*/
public static void recursiveDeleteDir(File dir) throws IOException {
diff --git a/src/java/org/apache/hadoop/sqoop/util/JdbcUrl.java b/src/java/org/apache/hadoop/sqoop/util/JdbcUrl.java
index 4342b303..802aa182 100644
--- a/src/java/org/apache/hadoop/sqoop/util/JdbcUrl.java
+++ b/src/java/org/apache/hadoop/sqoop/util/JdbcUrl.java
@@ -38,8 +38,8 @@ private JdbcUrl() {
}
/**
- * @return the database name from the connect string, which is typically the 'path'
- * component, or null if we can't.
+ * @return the database name from the connect string, which is typically the
+ * 'path' component, or null if we can't.
*/
public static String getDatabaseName(String connectString) {
try {
@@ -48,7 +48,8 @@ public static String getDatabaseName(String connectString) {
if (-1 == schemeEndOffset) {
// couldn't find one? try our best here.
sanitizedString = "http://" + connectString;
- LOG.warn("Could not find database access scheme in connect string " + connectString);
+ LOG.warn("Could not find database access scheme in connect string "
+ + connectString);
} else {
sanitizedString = "http" + connectString.substring(schemeEndOffset);
}
@@ -81,7 +82,8 @@ public static String getHostName(String connectString) {
String sanitizedString = null;
int schemeEndOffset = connectString.indexOf("://");
if (-1 == schemeEndOffset) {
- // couldn't find one? ok, then there's no problem, it should work as a URL.
+ // Couldn't find one? ok, then there's no problem, it should work as a
+ // URL.
sanitizedString = connectString;
} else {
sanitizedString = "http" + connectString.substring(schemeEndOffset);
@@ -104,7 +106,8 @@ public static int getPort(String connectString) {
String sanitizedString = null;
int schemeEndOffset = connectString.indexOf("://");
if (-1 == schemeEndOffset) {
- // couldn't find one? ok, then there's no problem, it should work as a URL.
+ // Couldn't find one? ok, then there's no problem, it should work as a
+ // URL.
sanitizedString = connectString;
} else {
sanitizedString = "http" + connectString.substring(schemeEndOffset);
diff --git a/src/java/org/apache/hadoop/sqoop/util/LoggingAsyncSink.java b/src/java/org/apache/hadoop/sqoop/util/LoggingAsyncSink.java
index 6acc7160..bbf10a0e 100644
--- a/src/java/org/apache/hadoop/sqoop/util/LoggingAsyncSink.java
+++ b/src/java/org/apache/hadoop/sqoop/util/LoggingAsyncSink.java
@@ -32,7 +32,8 @@
*/
public class LoggingAsyncSink extends AsyncSink {
- public static final Log LOG = LogFactory.getLog(LoggingAsyncSink.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ LoggingAsyncSink.class.getName());
private Log contextLog;
diff --git a/src/java/org/apache/hadoop/sqoop/util/NullAsyncSink.java b/src/java/org/apache/hadoop/sqoop/util/NullAsyncSink.java
index b324ddda..94c7270e 100644
--- a/src/java/org/apache/hadoop/sqoop/util/NullAsyncSink.java
+++ b/src/java/org/apache/hadoop/sqoop/util/NullAsyncSink.java
@@ -31,7 +31,8 @@
*/
public class NullAsyncSink extends AsyncSink {
- public static final Log LOG = LogFactory.getLog(NullAsyncSink.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ NullAsyncSink.class.getName());
private Thread child;
@@ -69,7 +70,8 @@ public void run() {
}
}
} catch (IOException ioe) {
- LOG.warn("IOException reading from (ignored) stream: " + ioe.toString());
+ LOG.warn("IOException reading from (ignored) stream: "
+ + ioe.toString());
}
try {
diff --git a/src/java/org/apache/hadoop/sqoop/util/PerfCounters.java b/src/java/org/apache/hadoop/sqoop/util/PerfCounters.java
index 499a8258..671e1974 100644
--- a/src/java/org/apache/hadoop/sqoop/util/PerfCounters.java
+++ b/src/java/org/apache/hadoop/sqoop/util/PerfCounters.java
@@ -47,7 +47,7 @@ public void stopClock() {
private static final double ONE_BILLION = 1000.0 * 1000.0 * 1000.0;
- /** maximum number of digits after the decimal place */
+ /** Maximum number of digits after the decimal place. */
private static final int MAX_PLACES = 4;
/**
@@ -63,8 +63,8 @@ private Double inSeconds(long nanos) {
/**
- * @return a string of the form "xxxx bytes" or "xxxxx KB" or "xxxx GB", scaled
- * as is appropriate for the current value.
+ * @return a string of the form "xxxx bytes" or "xxxxx KB" or "xxxx GB",
+ * scaled as is appropriate for the current value.
*/
private String formatBytes() {
double val;
@@ -125,7 +125,8 @@ private String formatSpeed() {
}
public String toString() {
- return formatBytes() + " in " + formatTimeInSeconds() + " (" + formatSpeed() + ")";
+ return formatBytes() + " in " + formatTimeInSeconds() + " ("
+ + formatSpeed() + ")";
}
}
diff --git a/src/java/org/apache/hadoop/sqoop/util/ResultSetPrinter.java b/src/java/org/apache/hadoop/sqoop/util/ResultSetPrinter.java
index ba2ac70e..d13969bb 100644
--- a/src/java/org/apache/hadoop/sqoop/util/ResultSetPrinter.java
+++ b/src/java/org/apache/hadoop/sqoop/util/ResultSetPrinter.java
@@ -29,17 +29,18 @@
import org.apache.hadoop.util.StringUtils;
/**
- * Utility methods to format and print ResultSet objects
+ * Utility methods to format and print ResultSet objects.
*/
public class ResultSetPrinter {
- public static final Log LOG = LogFactory.getLog(ResultSetPrinter.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ ResultSetPrinter.class.getName());
// max output width to allocate to any column of the printed results.
private static final int MAX_COL_WIDTH = 20;
/**
- * Print 'str' to the string builder, padded to 'width' chars
+ * Print 'str' to the string builder, padded to 'width' chars.
*/
private static void printPadded(StringBuilder sb, String str, int width) {
int numPad;
@@ -72,7 +73,8 @@ public final void printResultSet(PrintWriter pw, ResultSet results)
ResultSetMetaData metadata = results.getMetaData();
for (int i = 1; i < cols + 1; i++) {
String colName = metadata.getColumnName(i);
- colWidths[i - 1] = Math.min(metadata.getColumnDisplaySize(i), MAX_COL_WIDTH);
+ colWidths[i - 1] = Math.min(metadata.getColumnDisplaySize(i),
+ MAX_COL_WIDTH);
if (colName == null || colName.equals("")) {
colName = metadata.getColumnLabel(i) + "*";
}
diff --git a/src/perftest/ExportStressTest.java b/src/perftest/ExportStressTest.java
index d95365f7..a2a18fb3 100644
--- a/src/perftest/ExportStressTest.java
+++ b/src/perftest/ExportStressTest.java
@@ -30,19 +30,20 @@
/**
* Stress test export procedure by running a large-scale export to MySQL.
- * This requires MySQL be configured with a database that can be accessed
- * by the specified username without a password. The user must be able to
- * create and drop tables in the database.
+ * This requires MySQL be configured with a database that can be accessed by
+ * the specified username without a password. The user must be able to create
+ * and drop tables in the database.
*
- * Run with: src/scripts/run-perftest.sh ExportStressTest (connect-str) (username)
+ * Run with: src/scripts/run-perftest.sh ExportStressTest \
+ * (connect-str) (username)
*/
public class ExportStressTest extends Configured implements Tool {
// Export 10 GB of data. Each record is ~100 bytes.
- public final static int NUM_FILES = 10;
- public final static int RECORDS_PER_FILE = 10 * 1024 * 1024;
+ public static final int NUM_FILES = 10;
+ public static final int RECORDS_PER_FILE = 10 * 1024 * 1024;
- public final static String ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
+ public static final String ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
public ExportStressTest() {
}
@@ -88,7 +89,8 @@ public void createTable(String connectStr, String username) throws Exception {
Connection conn = DriverManager.getConnection(connectStr, username, null);
conn.setAutoCommit(false);
PreparedStatement stmt = conn.prepareStatement(
- "DROP TABLE IF EXISTS ExportStressTestTable", ResultSet.TYPE_FORWARD_ONLY,
+ "DROP TABLE IF EXISTS ExportStressTestTable",
+ ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY);
stmt.executeUpdate();
stmt.close();
@@ -103,7 +105,9 @@ public void createTable(String connectStr, String username) throws Exception {
conn.close();
}
- /** Actually run the export of the generated data to the user-created table. */
+ /**
+ * Actually run the export of the generated data to the user-created table.
+ */
public void runExport(String connectStr, String username) throws Exception {
SqoopOptions options = new SqoopOptions(getConf());
options.setConnectString(connectStr);
diff --git a/src/perftest/LobFilePerfTest.java b/src/perftest/LobFilePerfTest.java
index 8d080cbe..28d16d43 100644
--- a/src/perftest/LobFilePerfTest.java
+++ b/src/perftest/LobFilePerfTest.java
@@ -25,16 +25,17 @@
* A simple benchmark to performance test LobFile reader/writer speed.
* Writes out 10 GB of data to the local disk and then reads it back.
* Run with:
- * HADOOP_OPTS=-agentlib:hprof=cpu=samples src/scripts/run-perftest.sh LobFilePerfTest
+ * HADOOP_OPTS=-agentlib:hprof=cpu=samples \
+ * src/scripts/run-perftest.sh LobFilePerfTest
*/
public class LobFilePerfTest {
- long recordLen = 20 * 1024 * 1024; // 20 MB records
- int numRecords = 500;
- Configuration conf;
- Path p;
- long startTime;
- byte [] record;
+ private long recordLen = 20 * 1024 * 1024; // 20 MB records
+ private int numRecords = 500;
+ private Configuration conf;
+ private Path p;
+ private long startTime;
+ private byte [] record;
public LobFilePerfTest() {
conf = new Configuration();
@@ -99,7 +100,7 @@ private void readFile() throws Exception {
System.out.println("Read " + recordSize + " bytes");
}
- private void run() throws Exception {
+ public void run() throws Exception {
makeRecordBody();
writeFile();
readFile();
diff --git a/src/perftest/LobFileStressTest.java b/src/perftest/LobFileStressTest.java
index bf9bf947..ee197284 100644
--- a/src/perftest/LobFileStressTest.java
+++ b/src/perftest/LobFileStressTest.java
@@ -30,14 +30,14 @@
public class LobFileStressTest {
// Big records in testBigFile() are 5 GB each.
- public final static long LARGE_RECORD_LEN = 5L * 1024L * 1024L * 1024L;
+ public static final long LARGE_RECORD_LEN = 5L * 1024L * 1024L * 1024L;
- int numRandomTrials = 1000000;
- Configuration conf;
- boolean allPassed;
+ private int numRandomTrials = 1000000;
+ private Configuration conf;
+ private boolean allPassed;
- long lastCompressPos; // start offset of the last record in the file.
- long lastRawPos;
+ private long lastCompressPos; // start offset of the last record in the file.
+ private long lastRawPos;
public LobFileStressTest() {
conf = new Configuration();
@@ -314,7 +314,7 @@ private void testBigFile(boolean compress) throws Exception {
+ compress + ". ");
Path p = getBigFilePath(compress);
- long startOffsets [] = new long[NUM_RECORDS];
+ long [] startOffsets = new long[NUM_RECORDS];
// Write the file. Five records, 5 GB a piece.
System.out.print("Testing write. ");
@@ -364,8 +364,7 @@ private void testBigFile(boolean compress) throws Exception {
}
}
-
- private void run() throws Exception {
+ public void run() throws Exception {
writeIntegerFile(true);
writeIntegerFile(false);
testSequentialScan(false);
diff --git a/src/shims/cloudera/org/apache/hadoop/sqoop/shims/CDH3Shim.java b/src/shims/cloudera/org/apache/hadoop/sqoop/shims/CDH3Shim.java
index 250d0142..e697fd75 100644
--- a/src/shims/cloudera/org/apache/hadoop/sqoop/shims/CDH3Shim.java
+++ b/src/shims/cloudera/org/apache/hadoop/sqoop/shims/CDH3Shim.java
@@ -33,7 +33,7 @@
/**
- * Hadoop Shim for CDH3 (based on 0.20.2)
+ * Hadoop Shim for CDH3 (based on 0.20.2).
*/
public class CDH3Shim extends CommonHadoopShim {
@Override
diff --git a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/CombineShimRecordReader.java b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/CombineShimRecordReader.java
index 0012ebb4..15c8671e 100644
--- a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/CombineShimRecordReader.java
+++ b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/CombineShimRecordReader.java
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
@@ -65,10 +64,10 @@ public CombineShimRecordReader(CombineFileSplit split,
}
@Override
- public void initialize(InputSplit split, TaskAttemptContext context)
+ public void initialize(InputSplit curSplit, TaskAttemptContext curContext)
throws IOException, InterruptedException {
- this.split = (CombineFileSplit) split;
- this.context = context;
+ this.split = (CombineFileSplit) curSplit;
+ this.context = curContext;
if (null == rr) {
createChildReader();
@@ -77,7 +76,7 @@ public void initialize(InputSplit split, TaskAttemptContext context)
FileSplit fileSplit = new FileSplit(this.split.getPath(index),
this.split.getOffset(index), this.split.getLength(index),
this.split.getLocations());
- this.rr.initialize(fileSplit, context);
+ this.rr.initialize(fileSplit, this.context);
}
@Override
diff --git a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportInputFormat.java b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportInputFormat.java
index c59eeb1f..0e6ac8f8 100644
--- a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportInputFormat.java
+++ b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportInputFormat.java
@@ -18,18 +18,13 @@
package org.apache.hadoop.sqoop.mapreduce;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
diff --git a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportOutputFormat.java b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportOutputFormat.java
index b3831ea7..fc649f37 100644
--- a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportOutputFormat.java
+++ b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/ExportOutputFormat.java
@@ -1,8 +1,8 @@
/**
- * Licensed to the Apache Software Foundation (ASF) under one
+ * Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
@@ -29,7 +29,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
@@ -52,7 +51,7 @@
* Uses DBOutputFormat/DBConfiguration for configuring the output.
*/
public class ExportOutputFormat
- extends OutputFormat {
+ extends OutputFormat {
/** conf key: number of rows to export per INSERT statement. */
public static final String RECORDS_PER_STATEMENT_KEY =
@@ -72,6 +71,7 @@ public class ExportOutputFormat
private static final Log LOG = LogFactory.getLog(ExportOutputFormat.class);
+ @Override
/** {@inheritDoc} */
public void checkOutputSpecs(JobContext context)
throws IOException, InterruptedException {
@@ -90,6 +90,7 @@ public void checkOutputSpecs(JobContext context)
}
}
+ @Override
/** {@inheritDoc} */
public OutputCommitter getOutputCommitter(TaskAttemptContext context)
throws IOException, InterruptedException {
@@ -105,6 +106,7 @@ public void setupTask(TaskAttemptContext taskContext) { }
};
}
+ @Override
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
@@ -454,6 +456,7 @@ private void insertRows(boolean closeConn)
}
}
+ @Override
/** {@inheritDoc} */
public void close(TaskAttemptContext context)
throws IOException, InterruptedException {
@@ -474,6 +477,7 @@ public void close(TaskAttemptContext context)
}
}
+ @Override
/** {@inheritDoc} */
public void write(K key, V value)
throws InterruptedException, IOException {
diff --git a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/MySQLDumpInputFormat.java b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/MySQLDumpInputFormat.java
index c2b51895..f0792fd1 100644
--- a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/MySQLDumpInputFormat.java
+++ b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/MySQLDumpInputFormat.java
@@ -18,40 +18,14 @@
package org.apache.hadoop.sqoop.mapreduce;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.nio.CharBuffer;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
-import org.apache.hadoop.sqoop.SqoopOptions;
-import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
-import org.apache.hadoop.sqoop.lib.FieldFormatter;
-import org.apache.hadoop.sqoop.lib.RecordParser;
-import org.apache.hadoop.sqoop.util.AsyncSink;
-import org.apache.hadoop.sqoop.util.DirectImportUtils;
-import org.apache.hadoop.sqoop.util.ErrorableAsyncSink;
-import org.apache.hadoop.sqoop.util.ErrorableThread;
-import org.apache.hadoop.sqoop.util.ImportException;
-import org.apache.hadoop.sqoop.util.JdbcUrl;
-import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
-import org.apache.hadoop.sqoop.util.PerfCounters;
/**
* InputFormat designed to take data-driven splits and feed them to a mysqldump
diff --git a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/OracleExportOutputFormat.java b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/OracleExportOutputFormat.java
index dd2d0830..13571675 100644
--- a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/OracleExportOutputFormat.java
+++ b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/OracleExportOutputFormat.java
@@ -1,8 +1,8 @@
/**
- * Licensed to the Apache Software Foundation (ASF) under one
+ * Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
@@ -30,8 +30,9 @@
* Oracle-specific SQL formatting overrides default ExportOutputFormat's.
*/
public class OracleExportOutputFormat
- extends ExportOutputFormat {
+ extends ExportOutputFormat {
+ @Override
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
diff --git a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/RawKeyTextOutputFormat.java b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/RawKeyTextOutputFormat.java
index 2698de5f..b26b94dd 100644
--- a/src/shims/common/org/apache/hadoop/sqoop/mapreduce/RawKeyTextOutputFormat.java
+++ b/src/shims/common/org/apache/hadoop/sqoop/mapreduce/RawKeyTextOutputFormat.java
@@ -28,7 +28,6 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
@@ -40,7 +39,7 @@
public class RawKeyTextOutputFormat extends FileOutputFormat {
protected static class RawKeyRecordWriter extends RecordWriter {
- private static final String utf8 = "UTF-8";
+ private static final String UTF8 = "UTF-8";
protected DataOutputStream out;
@@ -59,7 +58,7 @@ private void writeObject(Object o) throws IOException {
Text to = (Text) o;
out.write(to.getBytes(), 0, to.getLength());
} else {
- out.write(o.toString().getBytes(utf8));
+ out.write(o.toString().getBytes(UTF8));
}
}
@@ -67,7 +66,8 @@ public synchronized void write(K key, V value) throws IOException {
writeObject(key);
}
- public synchronized void close(TaskAttemptContext context) throws IOException {
+ public synchronized void close(TaskAttemptContext context)
+ throws IOException {
out.close();
}
}
diff --git a/src/test/checkstyle-java-header.txt b/src/test/checkstyle-java-header.txt
new file mode 100644
index 00000000..42d0cd8c
--- /dev/null
+++ b/src/test/checkstyle-java-header.txt
@@ -0,0 +1,17 @@
+/**
+ * Licensed to Cloudera, Inc. under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Cloudera, Inc. licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
diff --git a/src/test/checkstyle-noframes.xsl b/src/test/checkstyle-noframes.xsl
new file mode 100644
index 00000000..bb10b878
--- /dev/null
+++ b/src/test/checkstyle-noframes.xsl
@@ -0,0 +1,196 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ a
+ b
+
+
+
+
+
diff --git a/src/test/checkstyle.xml b/src/test/checkstyle.xml
new file mode 100644
index 00000000..ad174a80
--- /dev/null
+++ b/src/test/checkstyle.xml
@@ -0,0 +1,209 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/test/org/apache/hadoop/sqoop/AllTests.java b/src/test/org/apache/hadoop/sqoop/AllTests.java
index 5d9b2618..2f28a9b2 100644
--- a/src/test/org/apache/hadoop/sqoop/AllTests.java
+++ b/src/test/org/apache/hadoop/sqoop/AllTests.java
@@ -22,7 +22,7 @@
import junit.framework.TestSuite;
/**
- * All tests for Sqoop (org.apache.hadoop.sqoop)
+ * All tests for Sqoop (org.apache.hadoop.sqoop).
*/
public final class AllTests {
diff --git a/src/test/org/apache/hadoop/sqoop/SmokeTests.java b/src/test/org/apache/hadoop/sqoop/SmokeTests.java
index cb0c766b..08efe16b 100644
--- a/src/test/org/apache/hadoop/sqoop/SmokeTests.java
+++ b/src/test/org/apache/hadoop/sqoop/SmokeTests.java
@@ -38,7 +38,7 @@
import junit.framework.TestSuite;
/**
- * Smoke tests for Sqoop (org.apache.hadoop.sqoop)
+ * Smoke tests for Sqoop (org.apache.hadoop.sqoop).
*/
public final class SmokeTests {
diff --git a/src/test/org/apache/hadoop/sqoop/TestAllTables.java b/src/test/org/apache/hadoop/sqoop/TestAllTables.java
index 75b7002e..f747aa4c 100644
--- a/src/test/org/apache/hadoop/sqoop/TestAllTables.java
+++ b/src/test/org/apache/hadoop/sqoop/TestAllTables.java
@@ -42,7 +42,7 @@
public class TestAllTables extends ImportJobTestCase {
/**
- * Create the argv to pass to Sqoop
+ * Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
private String [] getArgv(boolean includeHadoopFlags) {
@@ -67,7 +67,7 @@ public class TestAllTables extends ImportJobTestCase {
/** the names of the tables we're creating. */
private List tableNames;
- /** The strings to inject in the (ordered) tables */
+ /** The strings to inject in the (ordered) tables. */
private List expectedStrings;
@Before
@@ -123,7 +123,8 @@ public void testMultiTableImport() throws IOException {
this.expectedStrings.remove(0);
BufferedReader reader = new BufferedReader(
- new InputStreamReader(new FileInputStream(new File(filePath.toString()))));
+ new InputStreamReader(new FileInputStream(
+ new File(filePath.toString()))));
try {
String line = reader.readLine();
assertEquals("Table " + tableName + " expected a different string",
diff --git a/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java b/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java
index 2e19ba5d..11496912 100644
--- a/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java
+++ b/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java
@@ -40,7 +40,8 @@
*/
public class TestColumnTypes extends ManagerCompatTestCase {
- public static final Log LOG = LogFactory.getLog(TestColumnTypes.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TestColumnTypes.class.getName());
@Override
protected Log getLogger() {
diff --git a/src/test/org/apache/hadoop/sqoop/TestConnFactory.java b/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
index 908557ee..6690b205 100644
--- a/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
+++ b/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
@@ -38,7 +38,8 @@ public class TestConnFactory extends TestCase {
public void testCustomFactory() throws IOException {
Configuration conf = new Configuration();
- conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, AlwaysDummyFactory.class.getName());
+ conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY,
+ AlwaysDummyFactory.class.getName());
ConnFactory factory = new ConnFactory(conf);
ConnManager manager = factory.getManager(new SqoopOptions());
@@ -76,6 +77,10 @@ public void testMultipleManagers() throws IOException {
////// mock classes used for test cases above //////
+ /**
+ * Factory that always returns a DummyManager, regardless of the
+ * configuration.
+ */
public static class AlwaysDummyFactory extends ManagerFactory {
public ConnManager accept(SqoopOptions opts) {
// Always return a new DummyManager
@@ -83,6 +88,9 @@ public ConnManager accept(SqoopOptions opts) {
}
}
+ /**
+ * ManagerFactory that accepts no configurations.
+ */
public static class EmptyFactory extends ManagerFactory {
public ConnManager accept(SqoopOptions opts) {
// Never instantiate a proper ConnManager;
@@ -114,7 +122,7 @@ public String getPrimaryKey(String tableName) {
}
/**
- * Default implementation
+ * Default implementation.
* @param sqlType sql data type
* @return java data type
*/
@@ -123,7 +131,7 @@ public String toJavaType(int sqlType) {
}
/**
- * Default implementation
+ * Default implementation.
* @param sqlType sql data type
* @return hive data type
*/
diff --git a/src/test/org/apache/hadoop/sqoop/TestExport.java b/src/test/org/apache/hadoop/sqoop/TestExport.java
index ec0e4e70..8ef67a40 100644
--- a/src/test/org/apache/hadoop/sqoop/TestExport.java
+++ b/src/test/org/apache/hadoop/sqoop/TestExport.java
@@ -120,22 +120,22 @@ private String getRecordLine(int recordNum, ColumnGenerator... extraCols) {
the database should look like.
*/
public interface ColumnGenerator {
- /** for a row with id rowNum, what should we write into that
+ /** For a row with id rowNum, what should we write into that
line of the text file to export?
*/
- public String getExportText(int rowNum);
+ String getExportText(int rowNum);
- /** for a row with id rowNum, what should the database return
+ /** For a row with id rowNum, what should the database return
for the given column's value?
*/
- public String getVerifyText(int rowNum);
+ String getVerifyText(int rowNum);
- /** Return the column type to put in the CREATE TABLE statement */
- public String getType();
+ /** Return the column type to put in the CREATE TABLE statement. */
+ String getType();
}
/**
- * Create a data file that gets exported to the db
+ * Create a data file that gets exported to the db.
* @param fileNum the number of the file (for multi-file export)
* @param numRecords how many records to write to the file.
* @param gzip is true if the file should be gzipped.
@@ -173,7 +173,8 @@ private void createTextFile(int fileNum, int numRecords, boolean gzip,
}
}
- private void verifyCompressedFile(Path f, int expectedNumLines) throws IOException {
+ private void verifyCompressedFile(Path f, int expectedNumLines)
+ throws IOException {
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
FileSystem fs = FileSystem.get(conf);
@@ -185,7 +186,8 @@ private void verifyCompressedFile(Path f, int expectedNumLines) throws IOExcepti
if (null == decompressor) {
LOG.info("Verifying gzip sanity with null decompressor");
} else {
- LOG.info("Verifying gzip sanity with decompressor: " + decompressor.toString());
+ LOG.info("Verifying gzip sanity with decompressor: "
+ + decompressor.toString());
}
is = codec.createInputStream(is, decompressor);
BufferedReader r = new BufferedReader(new InputStreamReader(is));
@@ -205,7 +207,7 @@ private void verifyCompressedFile(Path f, int expectedNumLines) throws IOExcepti
}
/**
- * Create a data file in SequenceFile format that gets exported to the db
+ * Create a data file in SequenceFile format that gets exported to the db.
* @param fileNum the number of the file (for multi-file export).
* @param numRecords how many records to write to the file.
* @param className the table class name to instantiate and populate
@@ -303,7 +305,7 @@ public void createTable(ColumnGenerator... extraColumns) throws SQLException {
}
}
- /** Removing an existing table directory from the filesystem */
+ /** Removing an existing table directory from the filesystem. */
private void removeTablePath() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
@@ -346,7 +348,8 @@ private void assertColMinAndMax(String colName, ColumnGenerator generator)
int minId = getMinRowId();
int maxId = getMaxRowId();
- LOG.info("Checking min/max for column " + colName + " with type " + generator.getType());
+ LOG.info("Checking min/max for column " + colName + " with type "
+ + generator.getType());
String expectedMin = generator.getVerifyText(minId);
String expectedMax = generator.getVerifyText(maxId);
@@ -414,7 +417,7 @@ public void testEmptyExport() throws IOException, SQLException {
multiFileTest(1, 0, 1);
}
- /** Export 10 rows, make sure they load in correctly */
+ /** Export 10 rows, make sure they load in correctly. */
public void testTextExport() throws IOException, SQLException {
multiFileTest(1, 10, 1);
}
@@ -427,7 +430,7 @@ public void testMultiFilesOneMapper() throws IOException, SQLException {
}
/** Make sure we can use CombineFileInputFormat to handle multiple
- * files and multiple maps
+ * files and multiple maps.
*/
public void testMultiFilesMultiMaps() throws IOException, SQLException {
multiFileTest(2, 10, 2);
@@ -481,7 +484,7 @@ public void testUnlimitedTransactionSize() throws IOException, SQLException {
verifyExport(TOTAL_RECORDS);
}
- /** Run 2 mappers, make sure all records load in correctly */
+ /** Run 2 mappers, make sure all records load in correctly. */
public void testMultiMapTextExport() throws IOException, SQLException {
final int RECORDS_PER_MAP = 10;
@@ -496,12 +499,13 @@ public void testMultiMapTextExport() throws IOException, SQLException {
verifyExport(RECORDS_PER_MAP * NUM_FILES);
}
- /** Export some rows from a SequenceFile, make sure they import correctly */
+ /** Export some rows from a SequenceFile, make sure they import correctly. */
public void testSequenceFileExport() throws Exception {
final int TOTAL_RECORDS = 10;
- // First, generate class and jar files that represent the table we're exporting to.
+ // First, generate class and jar files that represent the table
+ // we're exporting to.
LOG.info("Creating initial schema for SeqFile test");
createTable();
LOG.info("Generating code...");
@@ -531,7 +535,8 @@ public void testSequenceFileExport() throws Exception {
String jarBaseName = jarPath.getName();
assertTrue(jarBaseName.endsWith(".jar"));
assertTrue(jarBaseName.length() > ".jar".length());
- String className = jarBaseName.substring(0, jarBaseName.length() - ".jar".length());
+ String className = jarBaseName.substring(0, jarBaseName.length()
+ - ".jar".length());
LOG.info("Using jar filename: " + jarFileName);
LOG.info("Using class name: " + className);
@@ -621,7 +626,7 @@ protected String pad(int n) {
}
/**
- * Get a column generator for DATE columns
+ * Get a column generator for DATE columns.
*/
protected ColumnGenerator getDateColumnGenerator() {
return new ColumnGenerator() {
diff --git a/src/test/org/apache/hadoop/sqoop/TestMultiCols.java b/src/test/org/apache/hadoop/sqoop/TestMultiCols.java
index c3675273..dfd0b43e 100644
--- a/src/test/org/apache/hadoop/sqoop/TestMultiCols.java
+++ b/src/test/org/apache/hadoop/sqoop/TestMultiCols.java
@@ -31,10 +31,11 @@
*/
public class TestMultiCols extends ImportJobTestCase {
- public static final Log LOG = LogFactory.getLog(TestMultiCols.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TestMultiCols.class.getName());
/**
- * Do a full import verification test on a table containing one row
+ * Do a full import verification test on a table containing one row.
* @param types the types of the columns to insert
* @param insertVals the SQL text to use to insert each value
* @param validateVals the text to expect when retrieving each value from
@@ -44,12 +45,12 @@ public class TestMultiCols extends ImportJobTestCase {
* @param importColumns The list of columns to import
*/
private void verifyTypes(String [] types , String [] insertVals,
- String validateVals [], String validateLine) {
+ String [] validateVals, String validateLine) {
verifyTypes(types, insertVals, validateVals, validateLine, null);
}
private void verifyTypes(String [] types , String [] insertVals,
- String validateVals [], String validateLine, String [] importColumns) {
+ String [] validateVals, String validateLine, String [] importColumns) {
createTableWithColTypes(types, insertVals);
diff --git a/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java b/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
index 1cd53b22..99906a16 100644
--- a/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
+++ b/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
@@ -47,7 +47,7 @@
public class TestMultiMaps extends ImportJobTestCase {
/**
- * Create the argv to pass to Sqoop
+ * Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
@@ -103,7 +103,7 @@ protected List getDataFilePaths() throws IOException {
}
/**
- * Given a comma-delimited list of integers, grab and parse the first int
+ * Given a comma-delimited list of integers, grab and parse the first int.
* @param str a comma-delimited list of values, the first of which is an int.
* @return the first field in the string, cast to int
*/
@@ -130,13 +130,15 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
CompilationManager compileMgr = new CompilationManager(opts);
String jarFileName = compileMgr.getJarFilename();
- prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName());
+ prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+ getTableName());
List paths = getDataFilePaths();
Configuration conf = new Configuration();
int curSum = 0;
- assertTrue("Found only " + paths.size() + " path(s); expected > 1.", paths.size() > 1);
+ assertTrue("Found only " + paths.size() + " path(s); expected > 1.",
+ paths.size() > 1);
// We expect multiple files. We need to open all the files and sum up the
// first column across all of them.
@@ -147,11 +149,12 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
- // We know that these values are two ints separated by a ',' character.
- // Since this is all dynamic, though, we don't want to actually link against
- // the class and use its methods. So we just parse this back into int fields manually.
- // Sum them up and ensure that we get the expected total for the first column, to
- // verify that we got all the results from the db into the file.
+ // We know that these values are two ints separated by a ','
+ // character. Since this is all dynamic, though, we don't want to
+ // actually link against the class and use its methods. So we just
+ // parse this back into int fields manually. Sum them up and ensure
+ // that we get the expected total for the first column, to verify that
+ // we got all the results from the db into the file.
// now sum up everything in the file.
while (reader.next(key) != null) {
@@ -163,7 +166,8 @@ public void runMultiMapTest(String splitByCol, int expectedSum)
reader = null;
}
- assertEquals("Total sum of first db column mismatch", expectedSum, curSum);
+ assertEquals("Total sum of first db column mismatch", expectedSum,
+ curSum);
} catch (InvalidOptionsException ioe) {
fail(ioe.toString());
} catch (ParseException pe) {
diff --git a/src/test/org/apache/hadoop/sqoop/TestSplitBy.java b/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
index 2e45f9fa..afe8f231 100644
--- a/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
+++ b/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
@@ -37,12 +37,12 @@
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
/**
- * Test that --split-by works
+ * Test that --split-by works.
*/
public class TestSplitBy extends ImportJobTestCase {
/**
- * Create the argv to pass to Sqoop
+ * Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
@@ -82,7 +82,7 @@ protected String getTableName() {
/**
- * Given a comma-delimited list of integers, grab and parse the first int
+ * Given a comma-delimited list of integers, grab and parse the first int.
* @param str a comma-delimited list of values, the first of which is an int.
* @return the first field in the string, cast to int
*/
@@ -109,7 +109,8 @@ public void runSplitByTest(String splitByCol, int expectedSum)
String jarFileName = compileMgr.getJarFilename();
LOG.debug("Got jar from import job: " + jarFileName);
- prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName());
+ prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+ getTableName());
reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
@@ -119,10 +120,11 @@ public void runSplitByTest(String splitByCol, int expectedSum)
Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
// We know that these values are two ints separated by a ',' character.
- // Since this is all dynamic, though, we don't want to actually link against
- // the class and use its methods. So we just parse this back into int fields manually.
- // Sum them up and ensure that we get the expected total for the first column, to
- // verify that we got all the results from the db into the file.
+ // Since this is all dynamic, though, we don't want to actually link
+ // against the class and use its methods. So we just parse this back
+ // into int fields manually. Sum them up and ensure that we get the
+ // expected total for the first column, to verify that we got all the
+ // results from the db into the file.
// Sum up everything in the file.
int curSum = 0;
@@ -131,7 +133,8 @@ public void runSplitByTest(String splitByCol, int expectedSum)
curSum += getFirstInt(val.toString());
}
- assertEquals("Total sum of first db column mismatch", expectedSum, curSum);
+ assertEquals("Total sum of first db column mismatch", expectedSum,
+ curSum);
} catch (InvalidOptionsException ioe) {
fail(ioe.toString());
} catch (ParseException pe) {
diff --git a/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java b/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java
index 28af472a..681d0128 100644
--- a/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java
+++ b/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java
@@ -24,7 +24,7 @@
/**
- * Test aspects of the SqoopOptions class
+ * Test aspects of the SqoopOptions class.
*/
public class TestSqoopOptions extends TestCase {
@@ -158,8 +158,8 @@ private SqoopOptions parse(String [] argv) throws Exception {
// test that setting output delimiters also sets input delimiters
public void testDelimitersInherit() throws Exception {
String [] args = {
- "--fields-terminated-by",
- "|"
+ "--fields-terminated-by",
+ "|",
};
SqoopOptions opts = parse(args);
@@ -167,13 +167,14 @@ public void testDelimitersInherit() throws Exception {
assertEquals('|', opts.getOutputFieldDelim());
}
- // test that setting output delimiters and setting input delims separately works
+ // Test that setting output delimiters and setting input delims
+ // separately works.
public void testDelimOverride1() throws Exception {
String [] args = {
- "--fields-terminated-by",
- "|",
- "--input-fields-terminated-by",
- "*"
+ "--fields-terminated-by",
+ "|",
+ "--input-fields-terminated-by",
+ "*",
};
SqoopOptions opts = parse(args);
@@ -184,10 +185,10 @@ public void testDelimOverride1() throws Exception {
// test that the order in which delims are specified doesn't matter
public void testDelimOverride2() throws Exception {
String [] args = {
- "--input-fields-terminated-by",
- "*",
- "--fields-terminated-by",
- "|"
+ "--input-fields-terminated-by",
+ "*",
+ "--fields-terminated-by",
+ "|",
};
SqoopOptions opts = parse(args);
@@ -198,7 +199,7 @@ public void testDelimOverride2() throws Exception {
public void testBadNumMappers1() throws Exception {
String [] args = {
"--num-mappers",
- "x"
+ "x",
};
try {
@@ -212,7 +213,7 @@ public void testBadNumMappers1() throws Exception {
public void testBadNumMappers2() throws Exception {
String [] args = {
"-m",
- "x"
+ "x",
};
try {
@@ -226,7 +227,7 @@ public void testBadNumMappers2() throws Exception {
public void testGoodNumMappers() throws Exception {
String [] args = {
"-m",
- "4"
+ "4",
};
SqoopOptions opts = parse(args);
diff --git a/src/test/org/apache/hadoop/sqoop/TestWhere.java b/src/test/org/apache/hadoop/sqoop/TestWhere.java
index 7f080ed6..8999b487 100644
--- a/src/test/org/apache/hadoop/sqoop/TestWhere.java
+++ b/src/test/org/apache/hadoop/sqoop/TestWhere.java
@@ -45,7 +45,7 @@
public class TestWhere extends ImportJobTestCase {
/**
- * Create the argv to pass to Sqoop
+ * Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
@@ -87,7 +87,7 @@ protected String getTableName() {
/**
- * Given a comma-delimited list of integers, grab and parse the first int
+ * Given a comma-delimited list of integers, grab and parse the first int.
* @param str a comma-delimited list of values, the first of which is an int.
* @return the first field in the string, cast to int
*/
@@ -96,8 +96,8 @@ private int getFirstInt(String str) {
return Integer.parseInt(parts[0]);
}
- public void runWhereTest(String whereClause, String firstValStr, int numExpectedResults,
- int expectedSum) throws IOException {
+ public void runWhereTest(String whereClause, String firstValStr,
+ int numExpectedResults, int expectedSum) throws IOException {
String [] columns = HsqldbTestServer.getFieldNames();
ClassLoader prevClassLoader = null;
@@ -113,7 +113,8 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
CompilationManager compileMgr = new CompilationManager(opts);
String jarFileName = compileMgr.getJarFilename();
- prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, getTableName());
+ prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+ getTableName());
reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
@@ -128,13 +129,15 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
// make sure that the value we think should be at the top, is.
reader.getCurrentValue(val);
- assertEquals("Invalid ordering within sorted SeqFile", firstValStr, val.toString());
+ assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
+ val.toString());
// We know that these values are two ints separated by a ',' character.
- // Since this is all dynamic, though, we don't want to actually link against
- // the class and use its methods. So we just parse this back into int fields manually.
- // Sum them up and ensure that we get the expected total for the first column, to
- // verify that we got all the results from the db into the file.
+ // Since this is all dynamic, though, we don't want to actually link
+ // against the class and use its methods. So we just parse this back
+ // into int fields manually. Sum them up and ensure that we get the
+ // expected total for the first column, to verify that we got all the
+ // results from the db into the file.
int curSum = getFirstInt(val.toString());
int totalResults = 1;
@@ -145,8 +148,10 @@ public void runWhereTest(String whereClause, String firstValStr, int numExpected
totalResults++;
}
- assertEquals("Total sum of first db column mismatch", expectedSum, curSum);
- assertEquals("Incorrect number of results for query", numExpectedResults, totalResults);
+ assertEquals("Total sum of first db column mismatch", expectedSum,
+ curSum);
+ assertEquals("Incorrect number of results for query", numExpectedResults,
+ totalResults);
} catch (InvalidOptionsException ioe) {
fail(ioe.toString());
} catch (ParseException pe) {
diff --git a/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java b/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java
index c51deed1..b8d9538b 100644
--- a/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java
+++ b/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java
@@ -33,9 +33,10 @@
import org.apache.hadoop.sqoop.manager.PostgresqlTest;
/**
- * Test battery including all tests of vendor-specific ConnManager implementations.
- * These tests likely aren't run by Apache Hudson, because they require configuring
- * and using Oracle, MySQL, etc., which may have incompatible licenses with Apache.
+ * Test battery including all tests of vendor-specific ConnManager
+ * implementations. These tests likely aren't run by Apache Hudson, because
+ * they require configuring and using Oracle, MySQL, etc., which may have
+ * incompatible licenses with Apache.
*/
public final class ThirdPartyTests extends TestCase {
diff --git a/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java b/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
index ecc64d0d..bc10d2ed 100644
--- a/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
+++ b/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
@@ -42,7 +42,8 @@
*/
public class TestHiveImport extends ImportJobTestCase {
- public static final Log LOG = LogFactory.getLog(TestHiveImport.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TestHiveImport.class.getName());
/**
* Sets the expected number of columns in the table being manipulated
@@ -60,7 +61,7 @@ private void setNumCols(int numCols) {
}
/**
- * Create the argv to pass to Sqoop
+ * Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
@@ -147,8 +148,9 @@ private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
return opts;
}
- private void runImportTest(String tableName, String [] types, String [] values,
- String verificationScript, String [] args, SqoopTool tool) throws IOException {
+ private void runImportTest(String tableName, String [] types,
+ String [] values, String verificationScript, String [] args,
+ SqoopTool tool) throws IOException {
// create a table and populate it with a row...
createTableWithColTypes(types, values);
@@ -158,7 +160,8 @@ private void runImportTest(String tableName, String [] types, String [] values,
SqoopOptions options = getSqoopOptions(args, tool);
String hiveHome = options.getHiveHome();
assertNotNull("hive.home was not set", hiveHome);
- Path testDataPath = new Path(new Path(hiveHome), "scripts/" + verificationScript);
+ Path testDataPath = new Path(new Path(hiveHome),
+ "scripts/" + verificationScript);
System.setProperty("expected.script", testDataPath.toString());
// verify that we can import it correctly into hive.
@@ -202,7 +205,7 @@ public void testGenerateOnly() throws IOException {
}
- /** Test that strings and ints are handled in the normal fashion */
+ /** Test that strings and ints are handled in the normal fashion. */
@Test
public void testNormalHiveImport() throws IOException {
final String TABLE_NAME = "NORMAL_HIVE_IMPORT";
@@ -214,7 +217,7 @@ public void testNormalHiveImport() throws IOException {
getArgv(false, null), new ImportTool());
}
- /** Test that table is created in hive with no data import */
+ /** Test that table is created in hive with no data import. */
@Test
public void testCreateOnlyHiveImport() throws IOException {
final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT";
@@ -227,7 +230,10 @@ public void testCreateOnlyHiveImport() throws IOException {
new CreateHiveTableTool());
}
- /** Test that table is created in hive and replaces the existing table if any */
+ /**
+ * Test that table is created in hive and replaces the existing table if
+ * any.
+ */
@Test
public void testCreateOverwriteHiveImport() throws IOException {
final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT";
@@ -241,7 +247,7 @@ public void testCreateOverwriteHiveImport() throws IOException {
new CreateHiveTableTool());
}
- /** Test that dates are coerced properly to strings */
+ /** Test that dates are coerced properly to strings. */
@Test
public void testDate() throws IOException {
final String TABLE_NAME = "DATE_HIVE_IMPORT";
@@ -253,7 +259,7 @@ public void testDate() throws IOException {
getArgv(false, null), new ImportTool());
}
- /** Test that NUMERICs are coerced to doubles */
+ /** Test that NUMERICs are coerced to doubles. */
@Test
public void testNumeric() throws IOException {
final String TABLE_NAME = "NUMERIC_HIVE_IMPORT";
@@ -265,7 +271,7 @@ public void testNumeric() throws IOException {
getArgv(false, null), new ImportTool());
}
- /** If bin/hive returns an error exit status, we should get an IOException */
+ /** If bin/hive returns an error exit status, we should get an IOException. */
@Test
public void testHiveExitFails() {
// The expected script is different than the one which would be generated
@@ -285,7 +291,7 @@ public void testHiveExitFails() {
}
}
- /** Test that we can set delimiters how we want them */
+ /** Test that we can set delimiters how we want them. */
@Test
public void testCustomDelimiters() throws IOException {
final String TABLE_NAME = "CUSTOM_DELIM_IMPORT";
@@ -293,8 +299,10 @@ public void testCustomDelimiters() throws IOException {
setNumCols(3);
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
String [] vals = { "'test'", "42", "'somestring'" };
- String [] extraArgs = { "--fields-terminated-by", ",",
- "--lines-terminated-by", "|" };
+ String [] extraArgs = {
+ "--fields-terminated-by", ",",
+ "--lines-terminated-by", "|",
+ };
runImportTest(TABLE_NAME, types, vals, "customDelimImport.q",
getArgv(false, extraArgs), new ImportTool());
}
diff --git a/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java b/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
index d8aea39d..56a29f6e 100644
--- a/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
@@ -25,7 +25,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.SqoopOptions;
-import org.junit.Test;
import junit.framework.TestCase;
@@ -34,7 +33,8 @@
*/
public class TestTableDefWriter extends TestCase {
- public static final Log LOG = LogFactory.getLog(TestTableDefWriter.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TestTableDefWriter.class.getName());
// Test getHiveOctalCharCode and expect an IllegalArgumentException.
private void expectExceptionInCharCode(int charCode) {
@@ -73,7 +73,8 @@ public void testDifferentTableNames() throws Exception {
LOG.debug("Load data stmt: " + loadData);
// Assert that the statements generated have the form we expect.
- assertTrue(createTable.indexOf("CREATE TABLE IF NOT EXISTS outputTable") != -1);
+ assertTrue(createTable.indexOf(
+ "CREATE TABLE IF NOT EXISTS outputTable") != -1);
assertTrue(loadData.indexOf("INTO TABLE outputTable") != -1);
assertTrue(loadData.indexOf("/inputTable'") != -1);
}
diff --git a/src/test/org/apache/hadoop/sqoop/io/TestLobFile.java b/src/test/org/apache/hadoop/sqoop/io/TestLobFile.java
index 3d53a6b2..cd4cf542 100644
--- a/src/test/org/apache/hadoop/sqoop/io/TestLobFile.java
+++ b/src/test/org/apache/hadoop/sqoop/io/TestLobFile.java
@@ -89,7 +89,8 @@ public void setUp() throws Exception {
return offsets;
}
- private void verifyClobFile(Path p, String... expectedRecords) throws Exception {
+ private void verifyClobFile(Path p, String... expectedRecords)
+ throws Exception {
LobFile.Reader reader = LobFile.open(p, conf);
@@ -236,16 +237,16 @@ public void testVeryShortRead() throws Exception {
// quantity in the readahead buffer.
Path p = new Path(TEMP_BASE_DIR, "shortread.lob");
- final String firstLine = "line1";
- final String secondLine =
+ final String FIRST_LINE = "line1";
+ final String SECOND_LINE =
"This contains much more in the record than just one line.";
- final String record2 = "here is the second record.";
- final String record3 = "The 3rd record, which we won't actually read.";
+ final String RECORD2 = "here is the second record.";
+ final String RECORD3 = "The 3rd record, which we won't actually read.";
- runLineAndRecordTest(p, firstLine,
- firstLine + "\n" + secondLine,
- record2,
- record3);
+ runLineAndRecordTest(p, FIRST_LINE,
+ FIRST_LINE + "\n" + SECOND_LINE,
+ RECORD2,
+ RECORD3);
}
@@ -254,15 +255,15 @@ public void testIncompleteOverread() throws Exception {
// next record start mark; make sure we realign properly.
Path p = new Path(TEMP_BASE_DIR, "longread.lob");
- final String firstLine = "this is a really long line of text to read!";
- final String secondLine = "not this.";
- final String record2 = "Here is yet another record to verify.";
- final String record3 = "Nobody cares about record 3.";
+ final String FIRST_LINE = "this is a really long line of text to read!";
+ final String SECOND_LINE = "not this.";
+ final String RECORD2 = "Here is yet another record to verify.";
+ final String RECORD3 = "Nobody cares about record 3.";
- runLineAndRecordTest(p, firstLine,
- firstLine + "\n" + secondLine,
- record2,
- record3);
+ runLineAndRecordTest(p, FIRST_LINE,
+ FIRST_LINE + "\n" + SECOND_LINE,
+ RECORD2,
+ RECORD3);
}
public void testSeekToRecord() throws Exception {
@@ -272,7 +273,7 @@ public void testSeekToRecord() throws Exception {
String [] records = {
"this is the first record!",
"here comes record number two. It is a bit longer.",
- "this is the third record. we can read it."
+ "this is the third record. we can read it.",
};
// Write the file and memorize when the third record starts.
@@ -355,7 +356,7 @@ public void testManySeeks() throws Exception {
"rec6 is yet another record",
"rec7 is starting to feel boring",
"rec8 is at the end of seg 1",
- "rec9 is all by itself in seg 2"
+ "rec9 is all by itself in seg 2",
};
// Write the records to a file, save their offsets.
@@ -547,9 +548,9 @@ private void runCompressedTest(String codec) throws Exception {
LOG.info("Testing with codec: " + codec);
Path p = new Path(TEMP_BASE_DIR, "compressed-" + codec + ".lob");
String [] records = {
- "this is the first record, It should be compressed a lot!",
- "record 2 record 2 record 2 record 2 2 2 2 2 2 2 2 2 2 2 2",
- "and a third and a third yes this is the third"
+ "this is the first record, It should be compressed a lot!",
+ "record 2 record 2 record 2 record 2 2 2 2 2 2 2 2 2 2 2 2",
+ "and a third and a third yes this is the third",
};
runClobFileTest(p, codec, records);
diff --git a/src/test/org/apache/hadoop/sqoop/io/TestSplittableBufferedWriter.java b/src/test/org/apache/hadoop/sqoop/io/TestSplittableBufferedWriter.java
index 52687443..57154de5 100644
--- a/src/test/org/apache/hadoop/sqoop/io/TestSplittableBufferedWriter.java
+++ b/src/test/org/apache/hadoop/sqoop/io/TestSplittableBufferedWriter.java
@@ -24,7 +24,6 @@
import java.io.InputStreamReader;
import java.io.File;
import java.io.FileInputStream;
-import java.io.OutputStream;
import java.util.zip.GZIPInputStream;
import org.apache.commons.logging.Log;
@@ -238,7 +237,7 @@ public void testSplittingTextFile() throws IOException {
// Now ensure all the data got there.
String [] expectedLines0 = {
- "This is a string!"
+ "This is a string!",
};
InputStream fis = new FileInputStream(new File(getWriteDir(),
"split-00000"));
@@ -268,7 +267,7 @@ public void testSplittingTextFile() throws IOException {
}
public void testSplittingGzipFile() throws IOException {
- SplittingOutputStream os = new SplittingOutputStream(getConf(),
+ SplittingOutputStream os = new SplittingOutputStream(getConf(),
getWritePath(), "splitz-", 3, true);
SplittableBufferedWriter w = new SplittableBufferedWriter(os, true);
try {
@@ -289,7 +288,7 @@ public void testSplittingGzipFile() throws IOException {
// Now ensure all the data got there.
String [] expectedLines0 = {
- "This is a string!"
+ "This is a string!",
};
verifyFileContents(
new GZIPInputStream(new FileInputStream(new File(getWriteDir(),
diff --git a/src/test/org/apache/hadoop/sqoop/lib/TestBlobRef.java b/src/test/org/apache/hadoop/sqoop/lib/TestBlobRef.java
index d76746f9..69e6094f 100644
--- a/src/test/org/apache/hadoop/sqoop/lib/TestBlobRef.java
+++ b/src/test/org/apache/hadoop/sqoop/lib/TestBlobRef.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.sqoop.lib;
import java.io.*;
-import java.util.ArrayList;
-import java.util.List;
import junit.framework.TestCase;
@@ -92,7 +90,7 @@ public void testExternalSubdir() throws IOException {
}
}
- private void doExternalTest(final byte [] DATA, final String FILENAME)
+ private void doExternalTest(final byte [] data, final String filename)
throws IOException {
Configuration conf = new Configuration();
@@ -101,7 +99,7 @@ private void doExternalTest(final byte [] DATA, final String FILENAME)
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path tmpPath = new Path(tmpDir);
- Path blobFile = new Path(tmpPath, FILENAME);
+ Path blobFile = new Path(tmpPath, filename);
// make any necessary parent dirs.
Path blobParent = blobFile.getParent();
@@ -112,13 +110,13 @@ private void doExternalTest(final byte [] DATA, final String FILENAME)
LobFile.Writer lw = LobFile.create(blobFile, conf, false);
try {
long off = lw.tell();
- long len = DATA.length;
+ long len = data.length;
OutputStream os = lw.writeBlobRecord(len);
- os.write(DATA, 0, DATA.length);
+ os.write(data, 0, data.length);
os.close();
lw.close();
- String refString = "externalLob(lf," + FILENAME
+ String refString = "externalLob(lf," + filename
+ "," + off + "," + len + ")";
BlobRef blob = BlobRef.parse(refString);
assertTrue(blob.isExternal());
@@ -130,9 +128,9 @@ private void doExternalTest(final byte [] DATA, final String FILENAME)
int bytes = is.read(buf, 0, 4096);
is.close();
- assertEquals(DATA.length, bytes);
+ assertEquals(data.length, bytes);
for (int i = 0; i < bytes; i++) {
- assertEquals(DATA[i], buf[i]);
+ assertEquals(data[i], buf[i]);
}
} finally {
fs.delete(blobFile, false);
diff --git a/src/test/org/apache/hadoop/sqoop/lib/TestClobRef.java b/src/test/org/apache/hadoop/sqoop/lib/TestClobRef.java
index 241452b9..f1e153c7 100644
--- a/src/test/org/apache/hadoop/sqoop/lib/TestClobRef.java
+++ b/src/test/org/apache/hadoop/sqoop/lib/TestClobRef.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.sqoop.lib;
import java.io.*;
-import java.util.ArrayList;
-import java.util.List;
import junit.framework.TestCase;
@@ -106,7 +104,7 @@ public void testExternalSubdir() throws IOException {
}
}
- private void doExternalTest(final String DATA, final String FILENAME)
+ private void doExternalTest(final String data, final String filename)
throws IOException {
Configuration conf = new Configuration();
@@ -115,7 +113,7 @@ private void doExternalTest(final String DATA, final String FILENAME)
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path tmpPath = new Path(tmpDir);
- Path clobFile = new Path(tmpPath, FILENAME);
+ Path clobFile = new Path(tmpPath, filename);
// make any necessary parent dirs.
Path clobParent = clobFile.getParent();
@@ -126,13 +124,13 @@ private void doExternalTest(final String DATA, final String FILENAME)
LobFile.Writer lw = LobFile.create(clobFile, conf, true);
try {
long off = lw.tell();
- long len = DATA.length();
+ long len = data.length();
Writer w = lw.writeClobRecord(len);
- w.append(DATA);
+ w.append(data);
w.close();
lw.close();
- String refString = "externalLob(lf," + FILENAME
+ String refString = "externalLob(lf," + filename
+ "," + off + "," + len + ")";
ClobRef clob = ClobRef.parse(refString);
assertTrue(clob.isExternal());
@@ -145,7 +143,7 @@ private void doExternalTest(final String DATA, final String FILENAME)
r.close();
String str = new String(buf, 0, chars);
- assertEquals(DATA, str);
+ assertEquals(data, str);
} finally {
fs.delete(clobFile, false);
}
diff --git a/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java b/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java
index 48a3e674..47746817 100644
--- a/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java
+++ b/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.sqoop.lib;
-import java.util.ArrayList;
-import java.util.List;
import junit.framework.TestCase;
/**
- * Test that the field formatter works in a variety of configurations
+ * Test that the field formatter works in a variety of configurations.
*/
public class TestFieldFormatter extends TestCase {
@@ -35,7 +33,8 @@ public void testAllEmpty() {
}
public void testNullArgs() {
- String result = FieldFormatter.escapeAndEnclose("", null, null, null, false);
+ String result = FieldFormatter.escapeAndEnclose("", null, null, null,
+ false);
assertEquals("", result);
char [] encloseFor = { '\"' };
@@ -44,38 +43,44 @@ public void testNullArgs() {
}
public void testBasicStr() {
- String result = FieldFormatter.escapeAndEnclose("foo", null, null, null, false);
+ String result = FieldFormatter.escapeAndEnclose("foo", null, null, null,
+ false);
assertEquals("foo", result);
}
public void testEscapeSlash() {
- String result = FieldFormatter.escapeAndEnclose("foo\\bar", "\\", "\"", null, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\\bar", "\\", "\"",
+ null, false);
assertEquals("foo\\\\bar", result);
}
public void testMustEnclose() {
- String result = FieldFormatter.escapeAndEnclose("foo", null, "\"", null, true);
+ String result = FieldFormatter.escapeAndEnclose("foo", null, "\"",
+ null, true);
assertEquals("\"foo\"", result);
}
public void testEncloseComma1() {
char [] chars = { ',' };
- String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"", chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"",
+ chars, false);
assertEquals("\"foo,bar\"", result);
}
public void testEncloseComma2() {
char [] chars = { '\n', ',' };
- String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"", chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"",
+ chars, false);
assertEquals("\"foo,bar\"", result);
}
public void testEncloseComma3() {
char [] chars = { ',', '\n' };
- String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"", chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"",
+ chars, false);
assertEquals("\"foo,bar\"", result);
}
@@ -91,7 +96,8 @@ public void testCannotEnclose1() {
char [] chars = { ',', '\n' };
// can't enclose because encloser is ""
- String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "", chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "",
+ chars, false);
assertEquals("foo,bar", result);
}
@@ -99,12 +105,14 @@ public void testCannotEnclose2() {
char [] chars = { ',', '\n' };
// can't enclose because encloser is null
- String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", null, chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", null,
+ chars, false);
assertEquals("foo,bar", result);
}
public void testEmptyCharToEscapeString() {
- // test what happens when the escape char is null. It should encode the null char.
+ // test what happens when the escape char is null. It should encode the
+ // null char.
char nul = '\000';
String s = "" + nul;
@@ -112,36 +120,42 @@ public void testEmptyCharToEscapeString() {
}
public void testEscapeCentralQuote() {
- String result = FieldFormatter.escapeAndEnclose("foo\"bar", "\\", "\"", null, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\"bar", "\\", "\"",
+ null, false);
assertEquals("foo\\\"bar", result);
}
public void testEscapeMultiCentralQuote() {
- String result = FieldFormatter.escapeAndEnclose("foo\"\"bar", "\\", "\"", null, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\"\"bar", "\\", "\"",
+ null, false);
assertEquals("foo\\\"\\\"bar", result);
}
public void testDoubleEscape() {
- String result = FieldFormatter.escapeAndEnclose("foo\\\"bar", "\\", "\"", null, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\\\"bar", "\\", "\"",
+ null, false);
assertEquals("foo\\\\\\\"bar", result);
}
public void testReverseEscape() {
- String result = FieldFormatter.escapeAndEnclose("foo\"\\bar", "\\", "\"", null, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\"\\bar", "\\", "\"",
+ null, false);
assertEquals("foo\\\"\\\\bar", result);
}
public void testQuotedEncloser() {
char [] chars = { ',', '\n' };
- String result = FieldFormatter.escapeAndEnclose("foo\",bar", "\\", "\"", chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\",bar", "\\", "\"",
+ chars, false);
assertEquals("\"foo\\\",bar\"", result);
}
public void testQuotedEscape() {
char [] chars = { ',', '\n' };
- String result = FieldFormatter.escapeAndEnclose("foo\\,bar", "\\", "\"", chars, false);
+ String result = FieldFormatter.escapeAndEnclose("foo\\,bar", "\\", "\"",
+ chars, false);
assertEquals("\"foo\\\\,bar\"", result);
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/lib/TestLargeObjectLoader.java b/src/test/org/apache/hadoop/sqoop/lib/TestLargeObjectLoader.java
index 204af882..efb2843c 100644
--- a/src/test/org/apache/hadoop/sqoop/lib/TestLargeObjectLoader.java
+++ b/src/test/org/apache/hadoop/sqoop/lib/TestLargeObjectLoader.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.sqoop.lib;
import java.io.*;
-import java.util.ArrayList;
-import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@@ -29,13 +27,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.MapContext;
-import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.testutil.MockResultSet;
@@ -105,7 +98,7 @@ public void testReadBlobRef()
assertNotNull(blob);
assertFalse(blob.isExternal());
byte [] data = blob.getData();
- byte [] blobData = MockResultSet.BLOB_DATA();
+ byte [] blobData = MockResultSet.blobData();
assertEquals(blobData.length, data.length);
for (int i = 0; i < data.length; i++) {
assertEquals(blobData[i], data[i]);
diff --git a/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java b/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java
index c281472e..cf1729bc 100644
--- a/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java
+++ b/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java
@@ -28,7 +28,8 @@
*/
public class TestRecordParser extends TestCase {
- private void assertListsEqual(String msg, List expected, List actual) {
+ private void assertListsEqual(String msg, List expected,
+ List actual) {
if (expected == null && actual != null) {
if (null == msg) {
msg = "expected null list";
@@ -130,55 +131,63 @@ public void testOneField2() throws RecordParser.ParseError {
public void testQuotedField1() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the field" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the field\"\n"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the field\"\n"));
}
public void testQuotedField2() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the field" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the field\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the field\""));
}
public void testQuotedField3() throws RecordParser.ParseError {
// quoted containing EOF
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the ,field" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the ,field\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the ,field\""));
}
public void testQuotedField4() throws RecordParser.ParseError {
// quoted containing multiple EOFs
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the ,,field" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the ,,field\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the ,,field\""));
}
public void testQuotedField5() throws RecordParser.ParseError {
// quoted containing EOF and EOR
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the ,\nfield" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the ,\nfield\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the ,\nfield\""));
}
public void testQuotedField6() throws RecordParser.ParseError {
// quoted containing EOR
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the \nfield" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the \nfield\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the \nfield\""));
}
public void testQuotedField7() throws RecordParser.ParseError {
// quoted containing multiple EORs
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the \n\nfield" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the \n\nfield\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the \n\nfield\""));
}
public void testQuotedField8() throws RecordParser.ParseError {
// quoted containing escaped quoted char
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "the \"field" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"the \\\"field\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"the \\\"field\""));
}
public void testUnquotedEscape1() throws RecordParser.ParseError {
@@ -204,37 +213,43 @@ public void testTwoFields1() throws RecordParser.ParseError {
public void testTwoFields2() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "field1", "field2" };
- assertListsEqual(null, list(strings), parser.parseRecord("field1,field2\n"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("field1,field2\n"));
}
public void testTwoFields3() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "field1", "field2" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"field1\",field2\n"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"field1\",field2\n"));
}
public void testTwoFields4() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "field1", "field2" };
- assertListsEqual(null, list(strings), parser.parseRecord("field1,\"field2\"\n"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("field1,\"field2\"\n"));
}
public void testTwoFields5() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
String [] strings = { "field1", "field2" };
- assertListsEqual(null, list(strings), parser.parseRecord("field1,\"field2\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("field1,\"field2\""));
}
public void testRequiredQuotes0() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
String [] strings = { "field1", "field2" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"field1\",\"field2\"\n"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"field1\",\"field2\"\n"));
}
public void testRequiredQuotes1() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
String [] strings = { "field1", "field2" };
- assertListsEqual(null, list(strings), parser.parseRecord("\"field1\",\"field2\""));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("\"field1\",\"field2\""));
}
public void testRequiredQuotes2() throws RecordParser.ParseError {
@@ -336,22 +351,26 @@ public void testLeadingEscape() throws RecordParser.ParseError {
public void testEofIsEor() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', ',', '\"', '\\', false);
String [] strings = { "three", "different", "fields" };
- assertListsEqual(null, list(strings), parser.parseRecord("three,different,fields"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("three,different,fields"));
}
public void testEofIsEor2() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', ',', '\"', '\\', false);
String [] strings = { "three", "different", "fields" };
- assertListsEqual(null, list(strings), parser.parseRecord("three,\"different\",fields"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("three,\"different\",fields"));
}
public void testRepeatedParse() throws RecordParser.ParseError {
RecordParser parser = new RecordParser(',', ',', '\"', '\\', false);
String [] strings = { "three", "different", "fields" };
- assertListsEqual(null, list(strings), parser.parseRecord("three,\"different\",fields"));
+ assertListsEqual(null, list(strings),
+ parser.parseRecord("three,\"different\",fields"));
String [] strings2 = { "foo", "bar" };
- assertListsEqual(null, list(strings2), parser.parseRecord("foo,\"bar\""));
+ assertListsEqual(null, list(strings2),
+ parser.parseRecord("foo,\"bar\""));
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLExportTest.java b/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLExportTest.java
index 047769b6..b14c6455 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLExportTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLExportTest.java
@@ -21,13 +21,11 @@
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import java.sql.Statement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Before;
-import org.junit.Test;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.TestExport;
diff --git a/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLTest.java b/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLTest.java
index b94a9b46..b5f1257c 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/DirectMySQLTest.java
@@ -20,7 +20,6 @@
import java.io.BufferedReader;
import java.io.IOException;
-import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.FileInputStream;
import java.io.File;
@@ -65,7 +64,8 @@
*/
public class DirectMySQLTest extends ImportJobTestCase {
- public static final Log LOG = LogFactory.getLog(DirectMySQLTest.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ DirectMySQLTest.class.getName());
static final String TABLE_PREFIX = "EMPLOYEES_MYSQL_";
@@ -225,8 +225,8 @@ private void doImport(boolean mysqlOutputDelims, boolean isDirect,
public void testDirectBulkImportWithDefaultDelims() throws IOException {
// no quoting of strings allowed.
String [] expectedResults = {
- "2,Bob,2009-04-20,400,sales",
- "3,Fred,2009-01-23,15,marketing"
+ "2,Bob,2009-04-20,400,sales",
+ "3,Fred,2009-01-23,15,marketing",
};
doImport(false, true, getTableName(), expectedResults, null);
@@ -236,8 +236,8 @@ public void testDirectBulkImportWithDefaultDelims() throws IOException {
public void testWithExtraParams() throws IOException {
// no quoting of strings allowed.
String [] expectedResults = {
- "2,Bob,2009-04-20,400,sales",
- "3,Fred,2009-01-23,15,marketing"
+ "2,Bob,2009-04-20,400,sales",
+ "3,Fred,2009-01-23,15,marketing",
};
String [] extraArgs = { "--", "--lock-tables" };
@@ -249,8 +249,8 @@ public void testWithExtraParams() throws IOException {
public void testMultiMappers() throws IOException {
// no quoting of strings allowed.
String [] expectedResults = {
- "2,Bob,2009-04-20,400,sales",
- "3,Fred,2009-01-23,15,marketing"
+ "2,Bob,2009-04-20,400,sales",
+ "3,Fred,2009-01-23,15,marketing",
};
String [] extraArgs = { "-m", "2" };
@@ -264,8 +264,8 @@ public void testJdbcColumnSubset() throws IOException {
LOG.info("Starting JDBC Column Subset test.");
String [] expectedResults = {
- "2,Bob,400.0",
- "3,Fred,15.0"
+ "2,Bob,400.0",
+ "3,Fred,15.0",
};
String [] extraArgs = { "--columns", "id,name,salary" };
@@ -279,8 +279,8 @@ public void testDirectColumnSubset() throws IOException {
LOG.info("Starting Direct Column Subset test.");
String [] expectedResults = {
- "2,Bob,400.0",
- "3,Fred,15.0"
+ "2,Bob,400.0",
+ "3,Fred,15.0",
};
String [] extraArgs = { "--columns", "id,name,salary" };
@@ -291,8 +291,8 @@ public void testDirectColumnSubset() throws IOException {
public void testDirectBulkImportWithMySQLQuotes() throws IOException {
// mysql quotes all string-based output.
String [] expectedResults = {
- "2,'Bob','2009-04-20',400,'sales'",
- "3,'Fred','2009-01-23',15,'marketing'"
+ "2,'Bob','2009-04-20',400,'sales'",
+ "3,'Fred','2009-01-23',15,'marketing'",
};
doImport(true, true, getTableName(), expectedResults, null);
@@ -301,8 +301,8 @@ public void testDirectBulkImportWithMySQLQuotes() throws IOException {
@Test
public void testMySQLJdbcImport() throws IOException {
String [] expectedResults = {
- "2,Bob,2009-04-20,400.0,sales",
- "3,Fred,2009-01-23,15.0,marketing"
+ "2,Bob,2009-04-20,400.0,sales",
+ "3,Fred,2009-01-23,15.0,marketing",
};
doImport(false, false, getTableName(), expectedResults, null);
@@ -312,9 +312,9 @@ public void testMySQLJdbcImport() throws IOException {
public void testJdbcEscapedTableName() throws Exception {
// Test a JDBC-based import of a table whose name is
// a reserved sql keyword (and is thus `quoted`)
- final String reservedTableName = "TABLE";
+ final String RESERVED_TABLE_NAME = "TABLE";
SqoopOptions options = new SqoopOptions(MySQLTestUtils.CONNECT_STRING,
- reservedTableName);
+ RESERVED_TABLE_NAME);
options.setUsername(MySQLTestUtils.getCurrentUser());
ConnManager mgr = new MySQLManager(options);
@@ -327,15 +327,15 @@ public void testJdbcEscapedTableName() throws Exception {
st = connection.createStatement();
// create the database table and populate it with data.
- st.executeUpdate("DROP TABLE IF EXISTS `" + reservedTableName + "`");
- st.executeUpdate("CREATE TABLE `" + reservedTableName + "` ("
+ st.executeUpdate("DROP TABLE IF EXISTS `" + RESERVED_TABLE_NAME + "`");
+ st.executeUpdate("CREATE TABLE `" + RESERVED_TABLE_NAME + "` ("
+ "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+ "name VARCHAR(24) NOT NULL, "
+ "start_date DATE, "
+ "salary FLOAT, "
+ "dept VARCHAR(32))");
- st.executeUpdate("INSERT INTO `" + reservedTableName + "` VALUES("
+ st.executeUpdate("INSERT INTO `" + RESERVED_TABLE_NAME + "` VALUES("
+ "2,'Aaron','2009-05-14',1000000.00,'engineering')");
connection.commit();
} finally {
@@ -349,20 +349,20 @@ public void testJdbcEscapedTableName() throws Exception {
}
String [] expectedResults = {
- "2,Aaron,2009-05-14,1000000.0,engineering"
+ "2,Aaron,2009-05-14,1000000.0,engineering",
};
- doImport(false, false, reservedTableName, expectedResults, null);
+ doImport(false, false, RESERVED_TABLE_NAME, expectedResults, null);
}
@Test
public void testJdbcEscapedColumnName() throws Exception {
// Test a JDBC-based import of a table with a column whose name is
- // a reserved sql keyword (and is thus `quoted`)
- final String tableName = "mysql_escaped_col_table";
- setCurTableName(tableName);
+ // a reserved sql keyword (and is thus `quoted`).
+ final String TABLE_NAME = "mysql_escaped_col_table";
+ setCurTableName(TABLE_NAME);
SqoopOptions options = new SqoopOptions(MySQLTestUtils.CONNECT_STRING,
- tableName);
+ TABLE_NAME);
options.setUsername(MySQLTestUtils.getCurrentUser());
ConnManager mgr = new MySQLManager(options);
@@ -375,15 +375,15 @@ public void testJdbcEscapedColumnName() throws Exception {
st = connection.createStatement();
// create the database table and populate it with data.
- st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
- st.executeUpdate("CREATE TABLE " + tableName + " ("
+ st.executeUpdate("DROP TABLE IF EXISTS " + TABLE_NAME);
+ st.executeUpdate("CREATE TABLE " + TABLE_NAME + " ("
+ "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+ "`table` VARCHAR(24) NOT NULL, "
+ "`CREATE` DATE, "
+ "salary FLOAT, "
+ "dept VARCHAR(32))");
- st.executeUpdate("INSERT INTO " + tableName + " VALUES("
+ st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
+ "2,'Aaron','2009-05-14',1000000.00,'engineering')");
connection.commit();
} finally {
@@ -397,9 +397,9 @@ public void testJdbcEscapedColumnName() throws Exception {
}
String [] expectedResults = {
- "2,Aaron,2009-05-14,1000000.0,engineering"
+ "2,Aaron,2009-05-14,1000000.0,engineering",
};
- doImport(false, false, tableName, expectedResults, null);
+ doImport(false, false, TABLE_NAME, expectedResults, null);
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/manager/JdbcMySQLExportTest.java b/src/test/org/apache/hadoop/sqoop/manager/JdbcMySQLExportTest.java
index 5783abb4..8f35be10 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/JdbcMySQLExportTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/JdbcMySQLExportTest.java
@@ -18,21 +18,17 @@
package org.apache.hadoop.sqoop.manager;
-import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import java.sql.Statement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.StringUtils;
import org.junit.After;
import org.junit.Before;
-import org.junit.Test;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.TestExport;
-import org.apache.hadoop.sqoop.mapreduce.MySQLExportMapper;
/**
* Test the MySQLManager implementation's exportJob() functionality.
diff --git a/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java b/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
index 8aa27f0a..c33c851b 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
@@ -20,7 +20,6 @@
import java.io.BufferedReader;
import java.io.IOException;
-import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.FileInputStream;
import java.io.File;
@@ -29,8 +28,6 @@
import java.sql.Statement;
import java.util.ArrayList;
-import junit.framework.TestCase;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
@@ -58,13 +55,15 @@
*
* CREATE DATABASE sqooppasstest;
* use mysql;
- * GRANT ALL PRIVILEGES on sqooppasstest.* TO 'sqooptest'@'localhost' IDENTIFIED BY '12345';
+ * GRANT ALL PRIVILEGES on sqooppasstest.* TO 'sqooptest'@'localhost'
+ * IDENTIFIED BY '12345';
* flush privileges;
*
*/
public class MySQLAuthTest extends ImportJobTestCase {
- public static final Log LOG = LogFactory.getLog(MySQLAuthTest.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ MySQLAuthTest.class.getName());
static final String HOST_URL = "jdbc:mysql://localhost/";
@@ -85,7 +84,8 @@ protected boolean useHsqldbTestServer() {
@Before
public void setUp() {
super.setUp();
- SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
+ SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING,
+ AUTH_TABLE_NAME);
options.setUsername(AUTH_TEST_USER);
options.setPassword(AUTH_TEST_PASS);
@@ -269,10 +269,11 @@ public void doZeroTimestampTest(int testNum, boolean expectSuccess,
LOG.info("Beginning zero-timestamp test #" + testNum);
try {
- final String tableName = "mysqlTimestampTable" + Integer.toString(testNum);
+ final String TABLE_NAME = "mysqlTimestampTable"
+ + Integer.toString(testNum);
// Create a table containing a full-zeros timestamp.
- SqoopOptions options = new SqoopOptions(connectString, tableName);
+ SqoopOptions options = new SqoopOptions(connectString, TABLE_NAME);
options.setUsername(AUTH_TEST_USER);
options.setPassword(AUTH_TEST_PASS);
@@ -286,19 +287,19 @@ public void doZeroTimestampTest(int testNum, boolean expectSuccess,
st = connection.createStatement();
// create the database table and populate it with data.
- st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
- st.executeUpdate("CREATE TABLE " + tableName + " ("
+ st.executeUpdate("DROP TABLE IF EXISTS " + TABLE_NAME);
+ st.executeUpdate("CREATE TABLE " + TABLE_NAME + " ("
+ "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+ "ts TIMESTAMP NOT NULL)");
- st.executeUpdate("INSERT INTO " + tableName + " VALUES("
+ st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
+ "NULL,'0000-00-00 00:00:00.0')");
connection.commit();
st.close();
connection.close();
// Run the import.
- String [] argv = getArgv(true, false, connectString, tableName);
+ String [] argv = getArgv(true, false, connectString, TABLE_NAME);
try {
runImport(argv);
} catch (Exception e) {
@@ -313,7 +314,7 @@ public void doZeroTimestampTest(int testNum, boolean expectSuccess,
// Make sure the result file is there.
Path warehousePath = new Path(this.getWarehouseDir());
- Path tablePath = new Path(warehousePath, tableName);
+ Path tablePath = new Path(warehousePath, TABLE_NAME);
Path filePath = new Path(tablePath, "part-m-00000");
File f = new File(filePath.toString());
diff --git a/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java b/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java
index 6b51d4ee..ab9f8f92 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java
@@ -35,7 +35,8 @@
*/
public class MySQLCompatTest extends ManagerCompatTestCase {
- public static final Log LOG = LogFactory.getLog(MySQLCompatTest.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ MySQLCompatTest.class.getName());
@Override
protected Log getLogger() {
diff --git a/src/test/org/apache/hadoop/sqoop/manager/MySQLTestUtils.java b/src/test/org/apache/hadoop/sqoop/manager/MySQLTestUtils.java
index 2ff9044e..aae2323e 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/MySQLTestUtils.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/MySQLTestUtils.java
@@ -31,7 +31,8 @@
*/
public final class MySQLTestUtils {
- public static final Log LOG = LogFactory.getLog(MySQLTestUtils.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ MySQLTestUtils.class.getName());
public static final String HOST_URL = "jdbc:mysql://localhost/";
@@ -68,7 +69,8 @@ public static String getCurrentUser() {
try {
r.close();
} catch (IOException ioe) {
- LOG.warn("IOException closing input stream from `whoami`: " + ioe.toString());
+ LOG.warn("IOException closing input stream from `whoami`: "
+ + ioe.toString());
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java b/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java
index 884c68cc..21453047 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.sqoop.manager;
import java.io.UnsupportedEncodingException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Formatter;
@@ -29,7 +26,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.testutil.ManagerCompatTestCase;
@@ -38,7 +34,8 @@
*/
public class OracleCompatTest extends ManagerCompatTestCase {
- public static final Log LOG = LogFactory.getLog(OracleCompatTest.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ OracleCompatTest.class.getName());
@Override
protected Log getLogger() {
@@ -89,6 +86,7 @@ public void tearDown() {
try {
Thread.sleep(250);
} catch (InterruptedException ie) {
+ // This delay may run a bit short.. no problem.
}
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/manager/OracleExportTest.java b/src/test/org/apache/hadoop/sqoop/manager/OracleExportTest.java
index 29220058..ddfa1189 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/OracleExportTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/OracleExportTest.java
@@ -18,17 +18,14 @@
package org.apache.hadoop.sqoop.manager;
-import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import java.sql.Statement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.StringUtils;
import org.junit.After;
import org.junit.Before;
-import org.junit.Test;
import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.TestExport;
diff --git a/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java b/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
index 116ec902..b4acc8ad 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
@@ -82,7 +82,8 @@
*/
public class OracleManagerTest extends ImportJobTestCase {
- public static final Log LOG = LogFactory.getLog(OracleManagerTest.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ OracleManagerTest.class.getName());
static final String TABLE_NAME = "EMPLOYEES";
@@ -132,11 +133,17 @@ public void setUp() {
+ "PRIMARY KEY (id))");
st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
- + "1,'Aaron',to_date('2009-05-14','yyyy-mm-dd'),1000000.00,'engineering','29-DEC-09 12.00.00.000000000 PM','29-DEC-09 12.00.00.000000000 PM')");
+ + "1,'Aaron',to_date('2009-05-14','yyyy-mm-dd'),"
+ + "1000000.00,'engineering','29-DEC-09 12.00.00.000000000 PM',"
+ + "'29-DEC-09 12.00.00.000000000 PM')");
st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
- + "2,'Bob',to_date('2009-04-20','yyyy-mm-dd'),400.00,'sales','30-DEC-09 12.00.00.000000000 PM','30-DEC-09 12.00.00.000000000 PM')");
+ + "2,'Bob',to_date('2009-04-20','yyyy-mm-dd'),"
+ + "400.00,'sales','30-DEC-09 12.00.00.000000000 PM',"
+ + "'30-DEC-09 12.00.00.000000000 PM')");
st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
- + "3,'Fred',to_date('2009-01-23','yyyy-mm-dd'),15.00,'marketing','31-DEC-09 12.00.00.000000000 PM','31-DEC-09 12.00.00.000000000 PM')");
+ + "3,'Fred',to_date('2009-01-23','yyyy-mm-dd'),15.00,"
+ + "'marketing','31-DEC-09 12.00.00.000000000 PM',"
+ + "'31-DEC-09 12.00.00.000000000 PM')");
connection.commit();
} catch (SQLException sqlE) {
LOG.error("Encountered SQL Exception: " + sqlE);
@@ -233,28 +240,33 @@ private void runOracleTest(String [] expectedResults) throws IOException {
@Test
public void testOracleImport() throws IOException {
- // no quoting of strings allowed.
- // NOTE: Oracle JDBC 11.1 drivers auto-cast SQL DATE to java.sql.Timestamp.
- // Even if you define your columns as DATE in Oracle, they may still contain
- // time information, so the JDBC drivers lie to us and will never tell us we have
- // a strict DATE type. Thus we include HH:MM:SS.mmmmm below.
+ // no quoting of strings allowed. NOTE: Oracle JDBC 11.1 drivers
+ // auto-cast SQL DATE to java.sql.Timestamp. Even if you define your
+ // columns as DATE in Oracle, they may still contain time information, so
+ // the JDBC drivers lie to us and will never tell us we have a strict DATE
+ // type. Thus we include HH:MM:SS.mmmmm below.
// See http://www.oracle.com/technology/tech/java/sqlj_jdbc/htdocs/jdbc_faq.html#08_01
String [] expectedResults = {
- "1,Aaron,2009-05-14 00:00:00.0,1000000,engineering,2009-12-29 12:00:00.0,2009-12-29 12:00:00.0",
- "2,Bob,2009-04-20 00:00:00.0,400,sales,2009-12-30 12:00:00.0,2009-12-30 12:00:00.0",
- "3,Fred,2009-01-23 00:00:00.0,15,marketing,2009-12-31 12:00:00.0,2009-12-31 12:00:00.0"
+ "1,Aaron,2009-05-14 00:00:00.0,1000000,engineering,"
+ + "2009-12-29 12:00:00.0,2009-12-29 12:00:00.0",
+ "2,Bob,2009-04-20 00:00:00.0,400,sales,"
+ + "2009-12-30 12:00:00.0,2009-12-30 12:00:00.0",
+ "3,Fred,2009-01-23 00:00:00.0,15,marketing,"
+ + "2009-12-31 12:00:00.0,2009-12-31 12:00:00.0",
};
runOracleTest(expectedResults);
}
/**
- * Compare two lines
+ * Compare two lines. Normalize the dates we receive based on the expected
+ * time zone.
* @param expectedLine expected line
* @param receivedLine received line
* @throws IOException exception during lines comparison
*/
- private void compareRecords(String expectedLine, String receivedLine) throws IOException {
+ private void compareRecords(String expectedLine, String receivedLine)
+ throws IOException {
// handle null case
if (expectedLine == null || receivedLine == null) {
return;
@@ -269,8 +281,10 @@ private void compareRecords(String expectedLine, String receivedLine) throws IOE
String [] expectedValues = expectedLine.split(",");
String [] receivedValues = receivedLine.split(",");
if (expectedValues.length != 7 || receivedValues.length != 7) {
- LOG.error("Number of expected fields did not match number of received fields");
- throw new IOException("Number of expected fields did not match number of received fields");
+ LOG.error("Number of expected fields did not match "
+ + "number of received fields");
+ throw new IOException("Number of expected fields did not match "
+ + "number of received fields");
}
// check first 5 values
@@ -279,20 +293,23 @@ private void compareRecords(String expectedLine, String receivedLine) throws IOE
mismatch = !expectedValues[i].equals(receivedValues[i]);
}
if (mismatch) {
- throw new IOException("Expected:<" + expectedLine + "> but was:<" + receivedLine + ">");
+ throw new IOException("Expected:<" + expectedLine + "> but was:<"
+ + receivedLine + ">");
}
Date expectedDate = null;
Date receivedDate = null;
DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S");
- int offset = TimeZone.getDefault().getOffset(System.currentTimeMillis()) / 3600000;
+ int offset = TimeZone.getDefault().getOffset(System.currentTimeMillis())
+ / 3600000;
for (int i = 5; i < 7; i++) {
- // parse expected timestamp
+ // parse expected timestamp.
try {
expectedDate = df.parse(expectedValues[i]);
} catch (ParseException ex) {
LOG.error("Could not parse expected timestamp: " + expectedValues[i]);
- throw new IOException("Could not parse expected timestamp: " + expectedValues[i]);
+ throw new IOException("Could not parse expected timestamp: "
+ + expectedValues[i]);
}
// parse received timestamp
@@ -300,7 +317,8 @@ private void compareRecords(String expectedLine, String receivedLine) throws IOE
receivedDate = df.parse(receivedValues[i]);
} catch (ParseException ex) {
LOG.error("Could not parse received timestamp: " + receivedValues[i]);
- throw new IOException("Could not parse received timestamp: " + receivedValues[i]);
+ throw new IOException("Could not parse received timestamp: "
+ + receivedValues[i]);
}
// compare two timestamps considering timezone offset
@@ -312,7 +330,8 @@ private void compareRecords(String expectedLine, String receivedLine) throws IOE
receivedCal.setTime(receivedDate);
if (!expectedCal.equals(receivedCal)) {
- throw new IOException("Expected:<" + expectedLine + "> but was:<" + receivedLine + ">, while timezone offset is: " + offset);
+ throw new IOException("Expected:<" + expectedLine + "> but was:<"
+ + receivedLine + ">, while timezone offset is: " + offset);
}
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java b/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java
index 9caf8eec..e83ceb3f 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java
@@ -18,31 +18,14 @@
package org.apache.hadoop.sqoop.manager;
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.FileInputStream;
-import java.io.File;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
-import java.util.ArrayList;
-
-import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.sqoop.SqoopOptions;
-import org.apache.hadoop.sqoop.testutil.CommonArgs;
-import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
-import org.apache.hadoop.sqoop.util.FileListing;
/**
* Helper methods for Oracle testing.
@@ -67,7 +50,7 @@ public static void setOracleAuth(SqoopOptions options) {
}
/**
- * Drop a table if it exists
+ * Drop a table if it exists.
*/
public static void dropTable(String tableName, ConnManager manager)
throws SQLException {
diff --git a/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java b/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java
index e3b5513d..c8b91301 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java
@@ -45,34 +45,36 @@
* The former uses the postgres JDBC driver to perform an import;
* the latter uses pg_dump to facilitate it.
*
- * Since this requires a Postgresql installation on your local machine to use, this
- * class is named in such a way that Hadoop's default QA process does not run
- * it. You need to run this manually with -Dtestcase=PostgresqlTest.
+ * Since this requires a Postgresql installation on your local machine to use,
+ * this class is named in such a way that Hadoop's default QA process does not
+ * run it. You need to run this manually with -Dtestcase=PostgresqlTest or
+ * -Dthirdparty=true.
*
- * You need to put Postgresql's JDBC driver library into a location where Hadoop
- * can access it (e.g., $HADOOP_HOME/lib).
+ * You need to put Postgresql's JDBC driver library into a location where
+ * Hadoop can access it (e.g., $HADOOP_HOME/lib).
*
- * To configure a postgresql database to allow local connections, put the following
- * in /etc/postgresql/8.3/main/pg_hba.conf:
+ * To configure a postgresql database to allow local connections, put the
+ * following in /etc/postgresql/8.3/main/pg_hba.conf:
* local all all trust
* host all all 127.0.0.1/32 trust
*
* ... and comment out any other lines referencing 127.0.0.1.
*
- * For postgresql 8.1, this may be in /var/lib/pgsql/data, instead.
- * You may need to restart the postgresql service after modifying this file.
+ * For postgresql 8.1, this may be in /var/lib/pgsql/data, instead. You may
+ * need to restart the postgresql service after modifying this file.
*
* You should also create a sqooptest user and database:
*
* $ sudo -u postgres psql -U postgres template1
- * template1=> CREATE USER sqooptest;
- * template1=> CREATE DATABASE sqooptest;
- * template1=> \q
+ * template1=> CREATE USER sqooptest;
+ * template1=> CREATE DATABASE sqooptest;
+ * template1=> \q
*
*/
public class PostgresqlTest extends ImportJobTestCase {
- public static final Log LOG = LogFactory.getLog(PostgresqlTest.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ PostgresqlTest.class.getName());
static final String HOST_URL = "jdbc:postgresql://localhost/";
@@ -227,8 +229,8 @@ private void doImportAndVerify(boolean isDirect, String [] expectedResults)
@Test
public void testJdbcBasedImport() throws IOException {
String [] expectedResults = {
- "2,Bob,2009-04-20,400.0,sales",
- "3,Fred,2009-01-23,15.0,marketing"
+ "2,Bob,2009-04-20,400.0,sales",
+ "3,Fred,2009-01-23,15.0,marketing",
};
doImportAndVerify(false, expectedResults);
@@ -237,8 +239,8 @@ public void testJdbcBasedImport() throws IOException {
@Test
public void testDirectImport() throws IOException {
String [] expectedResults = {
- "2,Bob,2009-04-20,400,sales",
- "3,Fred,2009-01-23,15,marketing"
+ "2,Bob,2009-04-20,400,sales",
+ "3,Fred,2009-01-23,15,marketing",
};
doImportAndVerify(true, expectedResults);
diff --git a/src/test/org/apache/hadoop/sqoop/manager/TestHsqldbManager.java b/src/test/org/apache/hadoop/sqoop/manager/TestHsqldbManager.java
index 882b3ae5..9e0d7713 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/TestHsqldbManager.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/TestHsqldbManager.java
@@ -31,13 +31,13 @@
import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
/**
- * Test HsqldbManager-specific functionality that overrides SqlManager behavior
- *
- *
+ * Test HsqldbManager-specific functionality that overrides SqlManager
+ * behavior.
*/
public class TestHsqldbManager extends TestCase {
- public static final Log LOG = LogFactory.getLog(TestHsqldbManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TestHsqldbManager.class.getName());
// instance variables populated during setUp, used during tests
private HsqldbTestServer testServer;
@@ -69,7 +69,8 @@ public void tearDown() {
}
}
- // note: hsql returns only the "PUBLIC" schema name; not individual user db names.
+ // note: hsql returns only the "PUBLIC" schema name; not individual user db
+ // names.
@Test
public void testListDatabases() {
String [] databases = manager.listDatabases();
diff --git a/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java b/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
index 4e9f744e..930ff387 100644
--- a/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
+++ b/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
@@ -38,13 +38,11 @@
/**
* Test methods of the generic SqlManager implementation.
- *
- *
- *
*/
public class TestSqlManager extends TestCase {
- public static final Log LOG = LogFactory.getLog(TestSqlManager.class.getName());
+ public static final Log LOG = LogFactory.getLog(
+ TestSqlManager.class.getName());
/** the name of a table that doesn't exist. */
static final String MISSING_TABLE = "MISSING_TABLE";
@@ -81,7 +79,8 @@ public void tearDown() {
@Test
public void testListColNames() {
- String [] colNames = manager.getColumnNames(HsqldbTestServer.getTableName());
+ String [] colNames = manager.getColumnNames(
+ HsqldbTestServer.getTableName());
assertNotNull("manager returned no colname list", colNames);
assertEquals("Table list should be length 2", 2, colNames.length);
String [] knownFields = HsqldbTestServer.getFieldNames();
@@ -92,7 +91,8 @@ public void testListColNames() {
@Test
public void testListColTypes() {
- Map types = manager.getColumnTypes(HsqldbTestServer.getTableName());
+ Map types = manager.getColumnTypes(
+ HsqldbTestServer.getTableName());
assertNotNull("manager returned no types map", types);
assertEquals("Map should be size=2", 2, types.size());
@@ -103,13 +103,15 @@ public void testListColTypes() {
@Test
public void testMissingTableColNames() {
String [] colNames = manager.getColumnNames(MISSING_TABLE);
- assertNull("No column names should be returned for missing table", colNames);
+ assertNull("No column names should be returned for missing table",
+ colNames);
}
@Test
public void testMissingTableColTypes() {
Map colTypes = manager.getColumnTypes(MISSING_TABLE);
- assertNull("No column types should be returned for missing table", colTypes);
+ assertNull("No column types should be returned for missing table",
+ colTypes);
}
@Test
@@ -124,9 +126,9 @@ public void testListTables() {
}
// constants related to testReadTable()
- final static int EXPECTED_NUM_ROWS = 4;
- final static int EXPECTED_COL1_SUM = 16;
- final static int EXPECTED_COL2_SUM = 20;
+ static final int EXPECTED_NUM_ROWS = 4;
+ static final int EXPECTED_COL1_SUM = 16;
+ static final int EXPECTED_COL2_SUM = 20;
@Test
public void testReadTable() {
@@ -141,7 +143,8 @@ public void testReadTable() {
assertNotNull("ResultSetMetadata is null in readTable()", metaData);
// ensure that we get the correct number of columns back
- assertEquals("Number of returned columns was unexpected!", metaData.getColumnCount(),
+ assertEquals("Number of returned columns was unexpected!",
+ metaData.getColumnCount(),
HsqldbTestServer.getFieldNames().length);
// should get back 4 rows. They are:
@@ -149,7 +152,8 @@ public void testReadTable() {
// 3 4
// 5 6
// 7 8
- // .. so while order isn't guaranteed, we should get back 16 on the left and 20 on the right.
+ // .. so while order isn't guaranteed, we should get back 16 on the left
+ // and 20 on the right.
int sumCol1 = 0, sumCol2 = 0, rowCount = 0;
while (results.next()) {
rowCount++;
@@ -181,7 +185,8 @@ public void testReadMissingTable() {
try {
String [] colNames = { "*" };
results = manager.readTable(MISSING_TABLE, colNames);
- assertNull("Expected null resultset from readTable(MISSING_TABLE)", results);
+ assertNull("Expected null resultset from readTable(MISSING_TABLE)",
+ results);
} catch (SQLException sqlException) {
// we actually expect this. pass.
} finally {
@@ -226,7 +231,8 @@ public void getPrimaryKeyFromTable() {
statement.executeUpdate();
statement.close();
} catch (SQLException sqlException) {
- fail("Could not create table with primary key: " + sqlException.toString());
+ fail("Could not create table with primary key: "
+ + sqlException.toString());
} finally {
if (null != conn) {
try {
@@ -238,6 +244,7 @@ public void getPrimaryKeyFromTable() {
}
String primaryKey = manager.getPrimaryKey(TABLE_WITH_KEY);
- assertEquals("Expected null pkey for table without key", primaryKey, KEY_FIELD_NAME);
+ assertEquals("Expected null pkey for table without key", primaryKey,
+ KEY_FIELD_NAME);
}
}
diff --git a/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java b/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
index 28760a5d..947c03d1 100644
--- a/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
+++ b/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
@@ -22,14 +22,15 @@
import junit.framework.TestSuite;
/**
- * All tests for Sqoop new mapreduce-api (org.apache.hadoop.sqoop.mapreduce)
+ * All tests for Sqoop new mapreduce-api (org.apache.hadoop.sqoop.mapreduce).
*/
public final class MapreduceTests {
private MapreduceTests() { }
public static Test suite() {
- TestSuite suite = new TestSuite("Tests for org.apache.hadoop.sqoop.mapreduce");
+ TestSuite suite = new TestSuite(
+ "Tests for org.apache.hadoop.sqoop.mapreduce");
suite.addTestSuite(TestImportJob.class);
return suite;
}
diff --git a/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java b/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
index fa7e6634..6519fa39 100644
--- a/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
+++ b/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
@@ -18,44 +18,28 @@
package org.apache.hadoop.sqoop.mapreduce;
-import java.io.BufferedReader;
import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.junit.Before;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.sqoop.ConnFactory;
import org.apache.hadoop.sqoop.Sqoop;
import org.apache.hadoop.sqoop.manager.ManagerFactory;
-import org.apache.hadoop.sqoop.mapreduce.AutoProgressMapper;
-import org.apache.hadoop.sqoop.mapreduce.ImportJobBase;
-import org.apache.hadoop.sqoop.testutil.CommonArgs;
-import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
import org.apache.hadoop.sqoop.testutil.InjectableManagerFactory;
import org.apache.hadoop.sqoop.testutil.InjectableConnManager;
import org.apache.hadoop.sqoop.tool.ImportTool;
/**
- * Test aspects of the DataDrivenImportJob class
+ * Test aspects of the DataDrivenImportJob class' failure reporting.
*/
public class TestImportJob extends ImportJobTestCase {
@@ -87,7 +71,7 @@ public void testFailedImportDueToIOException() throws IOException {
}
}
- // A mapper that is guaranteed to cause the task to fail.
+ /** A mapper that is guaranteed to cause the task to fail. */
public static class NullDereferenceMapper
extends AutoProgressMapper