From 20cd34e90ca180c9550f771748d766cd6f0ec189 Mon Sep 17 00:00:00 2001 From: Andrew Bayer Date: Fri, 22 Jul 2011 20:03:49 +0000 Subject: [PATCH] Fix additional checkstyle issues. From: Aaron Kimball git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149900 13f79535-47bb-0310-9956-ffa450edef68 --- build.xml | 3 +- .../org/apache/hadoop/sqoop/SqoopOptions.java | 49 +++++++++---------- .../apache/hadoop/sqoop/cli/SqoopParser.java | 5 -- .../apache/hadoop/sqoop/lib/RecordParser.java | 3 ++ .../hadoop/sqoop/manager/ConnManager.java | 2 - .../sqoop/manager/DirectMySQLManager.java | 12 ----- .../manager/DirectPostgresqlManager.java | 2 - .../hadoop/sqoop/manager/MySQLManager.java | 13 +++-- .../hadoop/sqoop/manager/OracleManager.java | 11 ++--- .../sqoop/manager/PostgresqlManager.java | 2 - .../hadoop/sqoop/manager/SqlManager.java | 13 +++-- .../hadoop/sqoop/mapreduce/ExportJobBase.java | 8 --- .../hadoop/sqoop/mapreduce/ImportJobBase.java | 10 ---- .../hadoop/sqoop/tool/BaseSqoopTool.java | 4 -- .../apache/hadoop/sqoop/tool/CodeGenTool.java | 2 - .../sqoop/tool/CreateHiveTableTool.java | 6 --- .../apache/hadoop/sqoop/tool/ExportTool.java | 16 +----- .../apache/hadoop/sqoop/tool/ImportTool.java | 3 -- .../hadoop/sqoop/tool/ListDatabasesTool.java | 1 - .../hadoop/sqoop/tool/ListTablesTool.java | 1 - .../apache/hadoop/sqoop/tool/SqoopTool.java | 2 - 21 files changed, 44 insertions(+), 124 deletions(-) diff --git a/build.xml b/build.xml index ae068a22..0253c009 100644 --- a/build.xml +++ b/build.xml @@ -605,7 +605,7 @@ file="${cobertura.home}/cobertura.jar" /> - @@ -614,6 +614,7 @@ + diff --git a/src/java/org/apache/hadoop/sqoop/SqoopOptions.java b/src/java/org/apache/hadoop/sqoop/SqoopOptions.java index 1aaf7b2b..41fb1aee 100644 --- a/src/java/org/apache/hadoop/sqoop/SqoopOptions.java +++ b/src/java/org/apache/hadoop/sqoop/SqoopOptions.java @@ -24,17 +24,12 @@ import java.io.IOException; import java.io.InputStream; import java.util.Arrays; -import java.util.ArrayList; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.sqoop.lib.LargeObjectLoader; -import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Category; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; /** * Command-line arguments used by Sqoop. @@ -409,16 +404,16 @@ public String getTableName() { return tableName; } - public void setTableName(String tableName) { - this.tableName = tableName; + public void setTableName(String table) { + this.tableName = table; } public String getExportDir() { return exportDir; } - public void setExportDir(String exportDir) { - this.exportDir = exportDir; + public void setExportDir(String dir) { + this.exportDir = dir; } public String getExistingJarName() { @@ -509,8 +504,8 @@ public int getNumMappers() { return this.numMappers; } - public void setNumMappers(int numMappers) { - this.numMappers = numMappers; + public void setNumMappers(int m) { + this.numMappers = m; } /** @@ -520,8 +515,8 @@ public String getClassName() { return className; } - public void setClassName(String className) { - this.className = className; + public void setClassName(String name) { + this.className = name; } /** @@ -532,16 +527,16 @@ public String getPackageName() { return packageName; } - public void setPackageName(String packageName) { - this.packageName = packageName; + public void setPackageName(String name) { + this.packageName = name; } public String getHiveHome() { return hiveHome; } - public void setHiveHome(String hiveHome) { - this.hiveHome = hiveHome; + public void setHiveHome(String home) { + this.hiveHome = home; } /** @return true if we should import the table into Hive. */ @@ -549,8 +544,8 @@ public boolean doHiveImport() { return hiveImport; } - public void setHiveImport(boolean hiveImport) { - this.hiveImport = hiveImport; + public void setHiveImport(boolean doImport) { + this.hiveImport = doImport; } /** @@ -603,8 +598,8 @@ public String getHadoopHome() { return hadoopHome; } - public void setHadoopHome(String hadoopHome) { - this.hadoopHome = hadoopHome; + public void setHadoopHome(String home) { + this.hadoopHome = home; } /** @@ -647,8 +642,8 @@ public FileLayout getFileLayout() { return this.layout; } - public void setFileLayout(FileLayout layout) { - this.layout = layout; + public void setFileLayout(FileLayout fileLayout) { + this.layout = fileLayout; } /** @@ -801,8 +796,8 @@ public boolean shouldUseCompression() { return this.useCompression; } - public void setUseCompression(boolean useCompression) { - this.useCompression = useCompression; + public void setUseCompression(boolean compress) { + this.useCompression = compress; } /** @@ -816,8 +811,8 @@ public String getHiveTableName() { } } - public void setHiveTableName(String tableName) { - this.hiveTableName = tableName; + public void setHiveTableName(String name) { + this.hiveTableName = name; } /** diff --git a/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java b/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java index 670b48ed..9c913aa2 100644 --- a/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java +++ b/src/java/org/apache/hadoop/sqoop/cli/SqoopParser.java @@ -18,12 +18,7 @@ package org.apache.hadoop.sqoop.cli; -import java.util.Arrays; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; import java.util.ListIterator; -import java.util.Properties; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Option; diff --git a/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java b/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java index fce0579b..cb6c24a4 100644 --- a/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java +++ b/src/java/org/apache/hadoop/sqoop/lib/RecordParser.java @@ -317,6 +317,9 @@ record sep halts processing. sb.append(curChar); state = ParseState.UNENCLOSED_FIELD; break; + + default: + throw new ParseError("Unexpected parser state: " + state); } } diff --git a/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java b/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java index e8c20651..d147403f 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java @@ -24,8 +24,6 @@ import java.sql.SQLException; import java.util.Map; -import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.sqoop.util.ExportException; import org.apache.hadoop.sqoop.util.ImportException; diff --git a/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java b/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java index b5b97df6..dafe75f8 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/DirectMySQLManager.java @@ -18,22 +18,10 @@ package org.apache.hadoop.sqoop.manager; -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.nio.CharBuffer; -import java.util.ArrayList; -import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.mapreduce.MySQLDumpImportJob; import org.apache.hadoop.sqoop.mapreduce.MySQLExportJob; diff --git a/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java b/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java index 88adf3da..dccdad7d 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.List; @@ -33,7 +32,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.io.SplittableBufferedWriter; import org.apache.hadoop.sqoop.util.AsyncSink; diff --git a/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java b/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java index a1c819ae..7153c2f9 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java @@ -31,7 +31,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.util.ImportException; @@ -140,8 +139,8 @@ protected static void markWarningPrinted() { * the type to null. */ private void checkDateTimeBehavior(ImportJobContext context) { - final String zeroBehaviorStr = "zeroDateTimeBehavior"; - final String convertToNull = "=convertToNull"; + final String ZERO_BEHAVIOR_STR = "zeroDateTimeBehavior"; + final String CONVERT_TO_NULL = "=convertToNull"; String connectStr = context.getOptions().getConnectString(); if (connectStr.indexOf("jdbc:") != 0) { @@ -160,16 +159,16 @@ private void checkDateTimeBehavior(ImportJobContext context) { // If they haven't set the zeroBehavior option, set it to // squash-null for them. if (null == query) { - connectStr = connectStr + "?" + zeroBehaviorStr + convertToNull; + connectStr = connectStr + "?" + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL; LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)"); } else if (query.length() == 0) { - connectStr = connectStr + zeroBehaviorStr + convertToNull; + connectStr = connectStr + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL; LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)"); - } else if (query.indexOf(zeroBehaviorStr) == -1) { + } else if (query.indexOf(ZERO_BEHAVIOR_STR) == -1) { if (!connectStr.endsWith("&")) { connectStr = connectStr + "&"; } - connectStr = connectStr + zeroBehaviorStr + convertToNull; + connectStr = connectStr + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL; LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)"); } diff --git a/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java b/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java index 19afe059..f3ec1fdc 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java @@ -23,8 +23,6 @@ import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Types; -import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.lang.reflect.Method; @@ -32,7 +30,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.lib.db.OracleDataDrivenDBInputFormat; import org.apache.hadoop.sqoop.SqoopOptions; @@ -299,7 +296,7 @@ public void importTable(ImportJobContext context) } /** - * Export data stored in HDFS into a table in a database + * Export data stored in HDFS into a table in a database. */ public void exportTable(ExportJobContext context) throws IOException, ExportException { @@ -350,7 +347,7 @@ public String toJavaType(int sqlType) { } /** - * Attempt to map sql type to java type + * Attempt to map sql type to java type. * @param sqlType sql type * @return java type */ @@ -377,7 +374,7 @@ private String dbToJavaType(int sqlType) { } /** - * Attempt to map sql type to hive type + * Attempt to map sql type to hive type. * @param sqlType sql data type * @return hive data type */ @@ -387,7 +384,7 @@ public String toHiveType(int sqlType) { } /** - * Resolve a database-specific type to Hive type + * Resolve a database-specific type to Hive type. * @param sqlType sql type * @return hive type */ diff --git a/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java b/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java index 2b120bb6..41a4d211 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java @@ -23,12 +23,10 @@ import java.sql.ResultSet; import java.sql.Statement; import java.sql.SQLException; -import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.util.ImportException; diff --git a/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java b/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java index 4ce0f1e0..8e7179d8 100644 --- a/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java +++ b/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java @@ -45,7 +45,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.util.StringUtils; @@ -302,24 +301,24 @@ public void importTable(ImportJobContext context) throws IOException, ImportException { String tableName = context.getTableName(); String jarFile = context.getJarFile(); - SqoopOptions options = context.getOptions(); + SqoopOptions opts = context.getOptions(); DataDrivenImportJob importer = - new DataDrivenImportJob(options, context.getInputFormat()); + new DataDrivenImportJob(opts, context.getInputFormat()); - String splitCol = getSplitColumn(options, tableName); - if (null == splitCol && options.getNumMappers() > 1) { + String splitCol = getSplitColumn(opts, tableName); + if (null == splitCol && opts.getNumMappers() > 1) { // Can't infer a primary key. throw new ImportException("No primary key could be found for table " + tableName + ". Please specify one with --split-by or perform " + "a sequential import with '-m 1'."); } - importer.runImport(tableName, jarFile, splitCol, options.getConf()); + importer.runImport(tableName, jarFile, splitCol, opts.getConf()); } /** - * executes an arbitrary SQL statement + * Executes an arbitrary SQL statement. * @param stmt The SQL statement to execute * @return A ResultSet encapsulating the results or null on error */ diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java b/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java index e58b8bc8..b6d54f10 100644 --- a/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java +++ b/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJobBase.java @@ -20,7 +20,6 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.sql.SQLException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -31,25 +30,18 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; -import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.sqoop.ConnFactory; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.lib.SqoopRecord; -import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.manager.ExportJobContext; import org.apache.hadoop.sqoop.orm.TableClassName; import org.apache.hadoop.sqoop.shims.HadoopShim; import org.apache.hadoop.sqoop.shims.ShimLoader; -import org.apache.hadoop.sqoop.util.ClassLoaderStack; import org.apache.hadoop.sqoop.util.ExportException; import org.apache.hadoop.sqoop.util.PerfCounters; diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java b/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java index ea54296a..d9f0c496 100644 --- a/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java +++ b/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java @@ -26,28 +26,18 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.io.Text; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.GzipCodec; -import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; -import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; -import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat; -import org.apache.hadoop.mapreduce.lib.db.DBWritable; -import org.apache.hadoop.sqoop.ConnFactory; import org.apache.hadoop.sqoop.SqoopOptions; -import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.orm.TableClassName; import org.apache.hadoop.sqoop.shims.HadoopShim; -import org.apache.hadoop.sqoop.util.ClassLoaderStack; import org.apache.hadoop.sqoop.util.ImportException; import org.apache.hadoop.sqoop.util.PerfCounters; diff --git a/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java b/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java index c99db56f..1b73b4ba 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/BaseSqoopTool.java @@ -20,12 +20,8 @@ import java.sql.SQLException; import java.util.Arrays; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Options; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java b/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java index d19d93aa..cae91f66 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/CodeGenTool.java @@ -34,10 +34,8 @@ import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.hive.HiveImport; -import org.apache.hadoop.sqoop.manager.ImportJobContext; import org.apache.hadoop.sqoop.orm.ClassWriter; import org.apache.hadoop.sqoop.orm.CompilationManager; -import org.apache.hadoop.sqoop.util.ImportException; /** * Tool that generates code from a database schema. diff --git a/src/java/org/apache/hadoop/sqoop/tool/CreateHiveTableTool.java b/src/java/org/apache/hadoop/sqoop/tool/CreateHiveTableTool.java index 24af6359..4a17ffe8 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/CreateHiveTableTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/CreateHiveTableTool.java @@ -19,8 +19,6 @@ package org.apache.hadoop.sqoop.tool; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.OptionBuilder; @@ -34,10 +32,6 @@ import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.hive.HiveImport; -import org.apache.hadoop.sqoop.manager.ImportJobContext; -import org.apache.hadoop.sqoop.orm.ClassWriter; -import org.apache.hadoop.sqoop.orm.CompilationManager; -import org.apache.hadoop.sqoop.util.ImportException; /** * Tool that creates a Hive table definition. diff --git a/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java b/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java index 09afa44a..72bc4218 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/ExportTool.java @@ -19,34 +19,20 @@ package org.apache.hadoop.sqoop.tool; import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.sqoop.Sqoop; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.ToolOptions; -import org.apache.hadoop.sqoop.hive.HiveImport; -import org.apache.hadoop.sqoop.manager.ConnManager; import org.apache.hadoop.sqoop.manager.ExportJobContext; -import org.apache.hadoop.sqoop.manager.ImportJobContext; -import org.apache.hadoop.sqoop.orm.ClassWriter; -import org.apache.hadoop.sqoop.orm.CompilationManager; -import org.apache.hadoop.sqoop.shims.ShimLoader; import org.apache.hadoop.sqoop.util.ExportException; -import org.apache.hadoop.sqoop.util.ImportException; /** * Tool that performs HDFS exports to databases. @@ -116,7 +102,7 @@ public int run(SqoopOptions options) { } /** - * Construct the set of options that control exports + * Construct the set of options that control exports. * @return the RelatedOptions that can be used to parse the export * arguments. */ diff --git a/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java b/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java index 9bb79f9c..6aea0090 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/ImportTool.java @@ -19,7 +19,6 @@ package org.apache.hadoop.sqoop.tool; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import org.apache.commons.cli.CommandLine; @@ -34,8 +33,6 @@ import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.hive.HiveImport; import org.apache.hadoop.sqoop.manager.ImportJobContext; -import org.apache.hadoop.sqoop.orm.ClassWriter; -import org.apache.hadoop.sqoop.orm.CompilationManager; import org.apache.hadoop.sqoop.util.ImportException; /** diff --git a/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java b/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java index d2d6f715..a8a6796b 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/ListDatabasesTool.java @@ -24,7 +24,6 @@ import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; -import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.ToolOptions; /** diff --git a/src/java/org/apache/hadoop/sqoop/tool/ListTablesTool.java b/src/java/org/apache/hadoop/sqoop/tool/ListTablesTool.java index e7a055d8..e0526808 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/ListTablesTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/ListTablesTool.java @@ -24,7 +24,6 @@ import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; -import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.ToolOptions; /** diff --git a/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java b/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java index c7cdd7d2..3954fab4 100644 --- a/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java +++ b/src/java/org/apache/hadoop/sqoop/tool/SqoopTool.java @@ -29,13 +29,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.sqoop.SqoopOptions; import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException; -import org.apache.hadoop.sqoop.cli.RelatedOptions; import org.apache.hadoop.sqoop.cli.SqoopParser; import org.apache.hadoop.sqoop.cli.ToolOptions; import org.apache.hadoop.sqoop.shims.ShimLoader;