mirror of
https://github.com/apache/sqoop.git
synced 2025-05-03 12:20:17 +08:00
Fix additional checkstyle issues.
From: Aaron Kimball <aaron@cloudera.com> git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149900 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0b96b5f1c3
commit
20cd34e90c
@ -605,7 +605,7 @@
|
||||
file="${cobertura.home}/cobertura.jar" />
|
||||
</target>
|
||||
|
||||
<target name="checkstyle" depends="ivy-retrieve-checkstyle"
|
||||
<target name="checkstyle" depends="ivy-retrieve-checkstyle, compile-all"
|
||||
description="Check source code conventions">
|
||||
<taskdef resource="checkstyletask.properties">
|
||||
<classpath refid="${name}.checkstyle.classpath" />
|
||||
@ -614,6 +614,7 @@
|
||||
<mkdir dir="${checkstyle.report.dir}" />
|
||||
<checkstyle config="${checkstyle.xml}" failOnViolation="false">
|
||||
<fileset dir="${base.src.dir}" includes="**/*.java" />
|
||||
<classpath refid="test.classpath"/>
|
||||
<formatter type="xml"
|
||||
toFile="${checkstyle.report.dir}/checkstyle-errors.xml" />
|
||||
</checkstyle>
|
||||
|
@ -24,17 +24,12 @@
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.sqoop.lib.LargeObjectLoader;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.log4j.Category;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
/**
|
||||
* Command-line arguments used by Sqoop.
|
||||
@ -409,16 +404,16 @@ public String getTableName() {
|
||||
return tableName;
|
||||
}
|
||||
|
||||
public void setTableName(String tableName) {
|
||||
this.tableName = tableName;
|
||||
public void setTableName(String table) {
|
||||
this.tableName = table;
|
||||
}
|
||||
|
||||
public String getExportDir() {
|
||||
return exportDir;
|
||||
}
|
||||
|
||||
public void setExportDir(String exportDir) {
|
||||
this.exportDir = exportDir;
|
||||
public void setExportDir(String dir) {
|
||||
this.exportDir = dir;
|
||||
}
|
||||
|
||||
public String getExistingJarName() {
|
||||
@ -509,8 +504,8 @@ public int getNumMappers() {
|
||||
return this.numMappers;
|
||||
}
|
||||
|
||||
public void setNumMappers(int numMappers) {
|
||||
this.numMappers = numMappers;
|
||||
public void setNumMappers(int m) {
|
||||
this.numMappers = m;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -520,8 +515,8 @@ public String getClassName() {
|
||||
return className;
|
||||
}
|
||||
|
||||
public void setClassName(String className) {
|
||||
this.className = className;
|
||||
public void setClassName(String name) {
|
||||
this.className = name;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -532,16 +527,16 @@ public String getPackageName() {
|
||||
return packageName;
|
||||
}
|
||||
|
||||
public void setPackageName(String packageName) {
|
||||
this.packageName = packageName;
|
||||
public void setPackageName(String name) {
|
||||
this.packageName = name;
|
||||
}
|
||||
|
||||
public String getHiveHome() {
|
||||
return hiveHome;
|
||||
}
|
||||
|
||||
public void setHiveHome(String hiveHome) {
|
||||
this.hiveHome = hiveHome;
|
||||
public void setHiveHome(String home) {
|
||||
this.hiveHome = home;
|
||||
}
|
||||
|
||||
/** @return true if we should import the table into Hive. */
|
||||
@ -549,8 +544,8 @@ public boolean doHiveImport() {
|
||||
return hiveImport;
|
||||
}
|
||||
|
||||
public void setHiveImport(boolean hiveImport) {
|
||||
this.hiveImport = hiveImport;
|
||||
public void setHiveImport(boolean doImport) {
|
||||
this.hiveImport = doImport;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -603,8 +598,8 @@ public String getHadoopHome() {
|
||||
return hadoopHome;
|
||||
}
|
||||
|
||||
public void setHadoopHome(String hadoopHome) {
|
||||
this.hadoopHome = hadoopHome;
|
||||
public void setHadoopHome(String home) {
|
||||
this.hadoopHome = home;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -647,8 +642,8 @@ public FileLayout getFileLayout() {
|
||||
return this.layout;
|
||||
}
|
||||
|
||||
public void setFileLayout(FileLayout layout) {
|
||||
this.layout = layout;
|
||||
public void setFileLayout(FileLayout fileLayout) {
|
||||
this.layout = fileLayout;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -801,8 +796,8 @@ public boolean shouldUseCompression() {
|
||||
return this.useCompression;
|
||||
}
|
||||
|
||||
public void setUseCompression(boolean useCompression) {
|
||||
this.useCompression = useCompression;
|
||||
public void setUseCompression(boolean compress) {
|
||||
this.useCompression = compress;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -816,8 +811,8 @@ public String getHiveTableName() {
|
||||
}
|
||||
}
|
||||
|
||||
public void setHiveTableName(String tableName) {
|
||||
this.hiveTableName = tableName;
|
||||
public void setHiveTableName(String name) {
|
||||
this.hiveTableName = name;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -18,12 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.sqoop.cli;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.cli.GnuParser;
|
||||
import org.apache.commons.cli.Option;
|
||||
|
@ -317,6 +317,9 @@ record sep halts processing.
|
||||
sb.append(curChar);
|
||||
state = ParseState.UNENCLOSED_FIELD;
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new ParseError("Unexpected parser state: " + state);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,8 +24,6 @@
|
||||
import java.sql.SQLException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
import org.apache.hadoop.sqoop.util.ExportException;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
|
@ -18,22 +18,10 @@
|
||||
|
||||
package org.apache.hadoop.sqoop.manager;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.nio.CharBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.mapreduce.MySQLDumpImportJob;
|
||||
import org.apache.hadoop.sqoop.mapreduce.MySQLExportJob;
|
||||
|
@ -25,7 +25,6 @@
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@ -33,7 +32,6 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
|
||||
import org.apache.hadoop.sqoop.util.AsyncSink;
|
||||
|
@ -31,7 +31,6 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
@ -140,8 +139,8 @@ protected static void markWarningPrinted() {
|
||||
* the type to null.
|
||||
*/
|
||||
private void checkDateTimeBehavior(ImportJobContext context) {
|
||||
final String zeroBehaviorStr = "zeroDateTimeBehavior";
|
||||
final String convertToNull = "=convertToNull";
|
||||
final String ZERO_BEHAVIOR_STR = "zeroDateTimeBehavior";
|
||||
final String CONVERT_TO_NULL = "=convertToNull";
|
||||
|
||||
String connectStr = context.getOptions().getConnectString();
|
||||
if (connectStr.indexOf("jdbc:") != 0) {
|
||||
@ -160,16 +159,16 @@ private void checkDateTimeBehavior(ImportJobContext context) {
|
||||
// If they haven't set the zeroBehavior option, set it to
|
||||
// squash-null for them.
|
||||
if (null == query) {
|
||||
connectStr = connectStr + "?" + zeroBehaviorStr + convertToNull;
|
||||
connectStr = connectStr + "?" + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL;
|
||||
LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)");
|
||||
} else if (query.length() == 0) {
|
||||
connectStr = connectStr + zeroBehaviorStr + convertToNull;
|
||||
connectStr = connectStr + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL;
|
||||
LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)");
|
||||
} else if (query.indexOf(zeroBehaviorStr) == -1) {
|
||||
} else if (query.indexOf(ZERO_BEHAVIOR_STR) == -1) {
|
||||
if (!connectStr.endsWith("&")) {
|
||||
connectStr = connectStr + "&";
|
||||
}
|
||||
connectStr = connectStr + zeroBehaviorStr + convertToNull;
|
||||
connectStr = connectStr + ZERO_BEHAVIOR_STR + CONVERT_TO_NULL;
|
||||
LOG.info("Setting zero DATETIME behavior to convertToNull (mysql)");
|
||||
}
|
||||
|
||||
|
@ -23,8 +23,6 @@
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.lang.reflect.Method;
|
||||
@ -32,7 +30,6 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.db.OracleDataDrivenDBInputFormat;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
@ -299,7 +296,7 @@ public void importTable(ImportJobContext context)
|
||||
}
|
||||
|
||||
/**
|
||||
* Export data stored in HDFS into a table in a database
|
||||
* Export data stored in HDFS into a table in a database.
|
||||
*/
|
||||
public void exportTable(ExportJobContext context)
|
||||
throws IOException, ExportException {
|
||||
@ -350,7 +347,7 @@ public String toJavaType(int sqlType) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to map sql type to java type
|
||||
* Attempt to map sql type to java type.
|
||||
* @param sqlType sql type
|
||||
* @return java type
|
||||
*/
|
||||
@ -377,7 +374,7 @@ private String dbToJavaType(int sqlType) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to map sql type to hive type
|
||||
* Attempt to map sql type to hive type.
|
||||
* @param sqlType sql data type
|
||||
* @return hive data type
|
||||
*/
|
||||
@ -387,7 +384,7 @@ public String toHiveType(int sqlType) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a database-specific type to Hive type
|
||||
* Resolve a database-specific type to Hive type.
|
||||
* @param sqlType sql type
|
||||
* @return hive type
|
||||
*/
|
||||
|
@ -23,12 +23,10 @@
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.Statement;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
|
@ -45,7 +45,6 @@
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
@ -302,24 +301,24 @@ public void importTable(ImportJobContext context)
|
||||
throws IOException, ImportException {
|
||||
String tableName = context.getTableName();
|
||||
String jarFile = context.getJarFile();
|
||||
SqoopOptions options = context.getOptions();
|
||||
SqoopOptions opts = context.getOptions();
|
||||
|
||||
DataDrivenImportJob importer =
|
||||
new DataDrivenImportJob(options, context.getInputFormat());
|
||||
new DataDrivenImportJob(opts, context.getInputFormat());
|
||||
|
||||
String splitCol = getSplitColumn(options, tableName);
|
||||
if (null == splitCol && options.getNumMappers() > 1) {
|
||||
String splitCol = getSplitColumn(opts, tableName);
|
||||
if (null == splitCol && opts.getNumMappers() > 1) {
|
||||
// Can't infer a primary key.
|
||||
throw new ImportException("No primary key could be found for table "
|
||||
+ tableName + ". Please specify one with --split-by or perform "
|
||||
+ "a sequential import with '-m 1'.");
|
||||
}
|
||||
|
||||
importer.runImport(tableName, jarFile, splitCol, options.getConf());
|
||||
importer.runImport(tableName, jarFile, splitCol, opts.getConf());
|
||||
}
|
||||
|
||||
/**
|
||||
* executes an arbitrary SQL statement
|
||||
* Executes an arbitrary SQL statement.
|
||||
* @param stmt The SQL statement to execute
|
||||
* @return A ResultSet encapsulating the results or null on error
|
||||
*/
|
||||
|
@ -20,7 +20,6 @@
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@ -31,25 +30,18 @@
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
|
||||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||
|
||||
import org.apache.hadoop.sqoop.ConnFactory;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.lib.SqoopRecord;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.manager.ExportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.TableClassName;
|
||||
import org.apache.hadoop.sqoop.shims.HadoopShim;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
import org.apache.hadoop.sqoop.util.ExportException;
|
||||
import org.apache.hadoop.sqoop.util.PerfCounters;
|
||||
|
||||
|
@ -26,28 +26,18 @@
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
import org.apache.hadoop.io.compress.GzipCodec;
|
||||
import org.apache.hadoop.mapreduce.Counters;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.JobContext;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
|
||||
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
|
||||
|
||||
import org.apache.hadoop.sqoop.ConnFactory;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.orm.TableClassName;
|
||||
import org.apache.hadoop.sqoop.shims.HadoopShim;
|
||||
import org.apache.hadoop.sqoop.util.ClassLoaderStack;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
import org.apache.hadoop.sqoop.util.PerfCounters;
|
||||
|
||||
|
@ -20,12 +20,8 @@
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -34,10 +34,8 @@
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that generates code from a database schema.
|
||||
|
@ -19,8 +19,6 @@
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
@ -34,10 +32,6 @@
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that creates a Hive table definition.
|
||||
|
@ -19,34 +19,20 @@
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.sqoop.Sqoop;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ConnManager;
|
||||
import org.apache.hadoop.sqoop.manager.ExportJobContext;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
import org.apache.hadoop.sqoop.util.ExportException;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
* Tool that performs HDFS exports to databases.
|
||||
@ -116,7 +102,7 @@ public int run(SqoopOptions options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the set of options that control exports
|
||||
* Construct the set of options that control exports.
|
||||
* @return the RelatedOptions that can be used to parse the export
|
||||
* arguments.
|
||||
*/
|
||||
|
@ -19,7 +19,6 @@
|
||||
package org.apache.hadoop.sqoop.tool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
@ -34,8 +33,6 @@
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.hive.HiveImport;
|
||||
import org.apache.hadoop.sqoop.manager.ImportJobContext;
|
||||
import org.apache.hadoop.sqoop.orm.ClassWriter;
|
||||
import org.apache.hadoop.sqoop.orm.CompilationManager;
|
||||
import org.apache.hadoop.sqoop.util.ImportException;
|
||||
|
||||
/**
|
||||
|
@ -24,7 +24,6 @@
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
|
||||
/**
|
||||
|
@ -24,7 +24,6 @@
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
|
||||
/**
|
||||
|
@ -29,13 +29,11 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.GenericOptionsParser;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
import org.apache.hadoop.sqoop.SqoopOptions;
|
||||
import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
|
||||
import org.apache.hadoop.sqoop.cli.RelatedOptions;
|
||||
import org.apache.hadoop.sqoop.cli.SqoopParser;
|
||||
import org.apache.hadoop.sqoop.cli.ToolOptions;
|
||||
import org.apache.hadoop.sqoop.shims.ShimLoader;
|
||||
|
Loading…
Reference in New Issue
Block a user