5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 00:43:42 +08:00

Fix additional checkstyle issues.

From: Aaron Kimball <aaron@cloudera.com>

git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149900 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Bayer 2011-07-22 20:03:49 +00:00
parent 0b96b5f1c3
commit 20cd34e90c
21 changed files with 44 additions and 124 deletions

View File

@ -605,7 +605,7 @@
file="${cobertura.home}/cobertura.jar" /> file="${cobertura.home}/cobertura.jar" />
</target> </target>
<target name="checkstyle" depends="ivy-retrieve-checkstyle" <target name="checkstyle" depends="ivy-retrieve-checkstyle, compile-all"
description="Check source code conventions"> description="Check source code conventions">
<taskdef resource="checkstyletask.properties"> <taskdef resource="checkstyletask.properties">
<classpath refid="${name}.checkstyle.classpath" /> <classpath refid="${name}.checkstyle.classpath" />
@ -614,6 +614,7 @@
<mkdir dir="${checkstyle.report.dir}" /> <mkdir dir="${checkstyle.report.dir}" />
<checkstyle config="${checkstyle.xml}" failOnViolation="false"> <checkstyle config="${checkstyle.xml}" failOnViolation="false">
<fileset dir="${base.src.dir}" includes="**/*.java" /> <fileset dir="${base.src.dir}" includes="**/*.java" />
<classpath refid="test.classpath"/>
<formatter type="xml" <formatter type="xml"
toFile="${checkstyle.report.dir}/checkstyle-errors.xml" /> toFile="${checkstyle.report.dir}/checkstyle-errors.xml" />
</checkstyle> </checkstyle>

View File

@ -24,17 +24,12 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.ArrayList;
import java.util.Properties; import java.util.Properties;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.lib.LargeObjectLoader; import org.apache.hadoop.sqoop.lib.LargeObjectLoader;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Category;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
/** /**
* Command-line arguments used by Sqoop. * Command-line arguments used by Sqoop.
@ -409,16 +404,16 @@ public String getTableName() {
return tableName; return tableName;
} }
public void setTableName(String tableName) { public void setTableName(String table) {
this.tableName = tableName; this.tableName = table;
} }
public String getExportDir() { public String getExportDir() {
return exportDir; return exportDir;
} }
public void setExportDir(String exportDir) { public void setExportDir(String dir) {
this.exportDir = exportDir; this.exportDir = dir;
} }
public String getExistingJarName() { public String getExistingJarName() {
@ -509,8 +504,8 @@ public int getNumMappers() {
return this.numMappers; return this.numMappers;
} }
public void setNumMappers(int numMappers) { public void setNumMappers(int m) {
this.numMappers = numMappers; this.numMappers = m;
} }
/** /**
@ -520,8 +515,8 @@ public String getClassName() {
return className; return className;
} }
public void setClassName(String className) { public void setClassName(String name) {
this.className = className; this.className = name;
} }
/** /**
@ -532,16 +527,16 @@ public String getPackageName() {
return packageName; return packageName;
} }
public void setPackageName(String packageName) { public void setPackageName(String name) {
this.packageName = packageName; this.packageName = name;
} }
public String getHiveHome() { public String getHiveHome() {
return hiveHome; return hiveHome;
} }
public void setHiveHome(String hiveHome) { public void setHiveHome(String home) {
this.hiveHome = hiveHome; this.hiveHome = home;
} }
/** @return true if we should import the table into Hive. */ /** @return true if we should import the table into Hive. */
@ -549,8 +544,8 @@ public boolean doHiveImport() {
return hiveImport; return hiveImport;
} }
public void setHiveImport(boolean hiveImport) { public void setHiveImport(boolean doImport) {
this.hiveImport = hiveImport; this.hiveImport = doImport;
} }
/** /**
@ -603,8 +598,8 @@ public String getHadoopHome() {
return hadoopHome; return hadoopHome;
} }
public void setHadoopHome(String hadoopHome) { public void setHadoopHome(String home) {
this.hadoopHome = hadoopHome; this.hadoopHome = home;
} }
/** /**
@ -647,8 +642,8 @@ public FileLayout getFileLayout() {
return this.layout; return this.layout;
} }
public void setFileLayout(FileLayout layout) { public void setFileLayout(FileLayout fileLayout) {
this.layout = layout; this.layout = fileLayout;
} }
/** /**
@ -801,8 +796,8 @@ public boolean shouldUseCompression() {
return this.useCompression; return this.useCompression;
} }
public void setUseCompression(boolean useCompression) { public void setUseCompression(boolean compress) {
this.useCompression = useCompression; this.useCompression = compress;
} }
/** /**
@ -816,8 +811,8 @@ public String getHiveTableName() {
} }
} }
public void setHiveTableName(String tableName) { public void setHiveTableName(String name) {
this.hiveTableName = tableName; this.hiveTableName = name;
} }
/** /**

View File

@ -18,12 +18,7 @@
package org.apache.hadoop.sqoop.cli; package org.apache.hadoop.sqoop.cli;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Properties;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;

View File

@ -317,6 +317,9 @@ record sep halts processing.
sb.append(curChar); sb.append(curChar);
state = ParseState.UNENCLOSED_FIELD; state = ParseState.UNENCLOSED_FIELD;
break; break;
default:
throw new ParseError("Unexpected parser state: " + state);
} }
} }

View File

@ -24,8 +24,6 @@
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.util.ExportException; import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;

View File

@ -18,22 +18,10 @@
package org.apache.hadoop.sqoop.manager; package org.apache.hadoop.sqoop.manager;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.nio.CharBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.mapreduce.MySQLDumpImportJob; import org.apache.hadoop.sqoop.mapreduce.MySQLDumpImportJob;
import org.apache.hadoop.sqoop.mapreduce.MySQLExportJob; import org.apache.hadoop.sqoop.mapreduce.MySQLExportJob;

View File

@ -25,7 +25,6 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -33,7 +32,6 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.io.SplittableBufferedWriter; import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
import org.apache.hadoop.sqoop.util.AsyncSink; import org.apache.hadoop.sqoop.util.AsyncSink;

View File

@ -31,7 +31,6 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
@ -140,8 +139,8 @@ protected static void markWarningPrinted() {
* the type to null. * the type to null.
*/ */
private void checkDateTimeBehavior(ImportJobContext context) { private void checkDateTimeBehavior(ImportJobContext context) {
final String zeroBehaviorStr = "zeroDateTimeBehavior"; final String ZERO_BEHAVIOR_STR = "zeroDateTimeBehavior";
final String convertToNull = "=convertToNull"; final String CONVERT_TO_NULL = "=convertToNull";
String connectStr = context.getOptions().getConnectString(); String connectStr = context.getOptions().getConnectString();
if (connectStr.indexOf("jdbc:") != 0) { if (connectStr.indexOf("jdbc:") != 0) {
@ -160,16 +159,16 @@ private void checkDateTimeBehavior(ImportJobContext context) {
// If they haven't set the zeroBehavior option, set it to // If they haven't set the zeroBehavior option, set it to
// squash-null for them. // squash-null for them.
if (null == query) { if (null == query) {
connectStr = connectStr + "?" + zeroBehaviorStr + convertToNull; connectStr = connectStr + "?" + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL;
LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)"); LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)");
} else if (query.length() == 0) { } else if (query.length() == 0) {
connectStr = connectStr + zeroBehaviorStr + convertToNull; connectStr = connectStr + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL;
LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)"); LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)");
} else if (query.indexOf(zeroBehaviorStr) == -1) { } else if (query.indexOf(ZERO_BEHAVIOR_STR) == -1) {
if (!connectStr.endsWith("&")) { if (!connectStr.endsWith("&")) {
connectStr = connectStr + "&"; connectStr = connectStr + "&";
} }
connectStr = connectStr + zeroBehaviorStr + convertToNull; connectStr = connectStr + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL;
LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)"); LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)");
} }

View File

@ -23,8 +23,6 @@
import java.sql.DriverManager; import java.sql.DriverManager;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.lang.reflect.Method; import java.lang.reflect.Method;
@ -32,7 +30,6 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.db.OracleDataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.OracleDataDrivenDBInputFormat;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
@ -299,7 +296,7 @@ public void importTable(ImportJobContext context)
} }
/** /**
* Export data stored in HDFS into a table in a database * Export data stored in HDFS into a table in a database.
*/ */
public void exportTable(ExportJobContext context) public void exportTable(ExportJobContext context)
throws IOException, ExportException { throws IOException, ExportException {
@ -350,7 +347,7 @@ public String toJavaType(int sqlType) {
} }
/** /**
* Attempt to map sql type to java type * Attempt to map sql type to java type.
* @param sqlType sql type * @param sqlType sql type
* @return java type * @return java type
*/ */
@ -377,7 +374,7 @@ private String dbToJavaType(int sqlType) {
} }
/** /**
* Attempt to map sql type to hive type * Attempt to map sql type to hive type.
* @param sqlType sql data type * @param sqlType sql data type
* @return hive data type * @return hive data type
*/ */
@ -387,7 +384,7 @@ public String toHiveType(int sqlType) {
} }
/** /**
* Resolve a database-specific type to Hive type * Resolve a database-specific type to Hive type.
* @param sqlType sql type * @param sqlType sql type
* @return hive type * @return hive type
*/ */

View File

@ -23,12 +23,10 @@
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.Statement; import java.sql.Statement;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;

View File

@ -45,7 +45,6 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -302,24 +301,24 @@ public void importTable(ImportJobContext context)
throws IOException, ImportException { throws IOException, ImportException {
String tableName = context.getTableName(); String tableName = context.getTableName();
String jarFile = context.getJarFile(); String jarFile = context.getJarFile();
SqoopOptions options = context.getOptions(); SqoopOptions opts = context.getOptions();
DataDrivenImportJob importer = DataDrivenImportJob importer =
new DataDrivenImportJob(options, context.getInputFormat()); new DataDrivenImportJob(opts, context.getInputFormat());
String splitCol = getSplitColumn(options, tableName); String splitCol = getSplitColumn(opts, tableName);
if (null == splitCol && options.getNumMappers() > 1) { if (null == splitCol && opts.getNumMappers() > 1) {
// Can't infer a primary key. // Can't infer a primary key.
throw new ImportException("No primary key could be found for table " throw new ImportException("No primary key could be found for table "
+ tableName + ". Please specify one with --split-by or perform " + tableName + ". Please specify one with --split-by or perform "
+ "a sequential import with '-m 1'."); + "a sequential import with '-m 1'.");
} }
importer.runImport(tableName, jarFile, splitCol, options.getConf()); importer.runImport(tableName, jarFile, splitCol, opts.getConf());
} }
/** /**
* executes an arbitrary SQL statement * Executes an arbitrary SQL statement.
* @param stmt The SQL statement to execute * @param stmt The SQL statement to execute
* @return A ResultSet encapsulating the results or null on error * @return A ResultSet encapsulating the results or null on error
*/ */

View File

@ -20,7 +20,6 @@
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -31,25 +30,18 @@
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.sqoop.ConnFactory;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.lib.SqoopRecord; import org.apache.hadoop.sqoop.lib.SqoopRecord;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.ExportJobContext; import org.apache.hadoop.sqoop.manager.ExportJobContext;
import org.apache.hadoop.sqoop.orm.TableClassName; import org.apache.hadoop.sqoop.orm.TableClassName;
import org.apache.hadoop.sqoop.shims.HadoopShim; import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.shims.ShimLoader; import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
import org.apache.hadoop.sqoop.util.ExportException; import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.PerfCounters; import org.apache.hadoop.sqoop.util.PerfCounters;

View File

@ -26,28 +26,18 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.sqoop.ConnFactory;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.orm.TableClassName; import org.apache.hadoop.sqoop.orm.TableClassName;
import org.apache.hadoop.sqoop.shims.HadoopShim; import org.apache.hadoop.sqoop.shims.HadoopShim;
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
import org.apache.hadoop.sqoop.util.PerfCounters; import org.apache.hadoop.sqoop.util.PerfCounters;

View File

@ -20,12 +20,8 @@
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;

View File

@ -34,10 +34,8 @@
import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
import org.apache.hadoop.sqoop.hive.HiveImport; import org.apache.hadoop.sqoop.hive.HiveImport;
import org.apache.hadoop.sqoop.manager.ImportJobContext;
import org.apache.hadoop.sqoop.orm.ClassWriter; import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager; import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.util.ImportException;
/** /**
* Tool that generates code from a database schema. * Tool that generates code from a database schema.

View File

@ -19,8 +19,6 @@
package org.apache.hadoop.sqoop.tool; package org.apache.hadoop.sqoop.tool;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.OptionBuilder;
@ -34,10 +32,6 @@
import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
import org.apache.hadoop.sqoop.hive.HiveImport; import org.apache.hadoop.sqoop.hive.HiveImport;
import org.apache.hadoop.sqoop.manager.ImportJobContext;
import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.util.ImportException;
/** /**
* Tool that creates a Hive table definition. * Tool that creates a Hive table definition.

View File

@ -19,34 +19,20 @@
package org.apache.hadoop.sqoop.tool; package org.apache.hadoop.sqoop.tool;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.sqoop.Sqoop; import org.apache.hadoop.sqoop.Sqoop;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
import org.apache.hadoop.sqoop.hive.HiveImport;
import org.apache.hadoop.sqoop.manager.ConnManager;
import org.apache.hadoop.sqoop.manager.ExportJobContext; import org.apache.hadoop.sqoop.manager.ExportJobContext;
import org.apache.hadoop.sqoop.manager.ImportJobContext;
import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.shims.ShimLoader;
import org.apache.hadoop.sqoop.util.ExportException; import org.apache.hadoop.sqoop.util.ExportException;
import org.apache.hadoop.sqoop.util.ImportException;
/** /**
* Tool that performs HDFS exports to databases. * Tool that performs HDFS exports to databases.
@ -116,7 +102,7 @@ public int run(SqoopOptions options) {
} }
/** /**
* Construct the set of options that control exports * Construct the set of options that control exports.
* @return the RelatedOptions that can be used to parse the export * @return the RelatedOptions that can be used to parse the export
* arguments. * arguments.
*/ */

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.sqoop.tool; package org.apache.hadoop.sqoop.tool;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@ -34,8 +33,6 @@
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
import org.apache.hadoop.sqoop.hive.HiveImport; import org.apache.hadoop.sqoop.hive.HiveImport;
import org.apache.hadoop.sqoop.manager.ImportJobContext; import org.apache.hadoop.sqoop.manager.ImportJobContext;
import org.apache.hadoop.sqoop.orm.ClassWriter;
import org.apache.hadoop.sqoop.orm.CompilationManager;
import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.ImportException;
/** /**

View File

@ -24,7 +24,6 @@
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
/** /**

View File

@ -24,7 +24,6 @@
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
/** /**

View File

@ -29,13 +29,11 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions;
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
import org.apache.hadoop.sqoop.cli.RelatedOptions;
import org.apache.hadoop.sqoop.cli.SqoopParser; import org.apache.hadoop.sqoop.cli.SqoopParser;
import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.cli.ToolOptions;
import org.apache.hadoop.sqoop.shims.ShimLoader; import org.apache.hadoop.sqoop.shims.ShimLoader;