From 5eaafd32432fecb47f1d6b13615e5f0384f3f230 Mon Sep 17 00:00:00 2001 From: Andrew Bayer Date: Fri, 22 Jul 2011 20:04:39 +0000 Subject: [PATCH] SQOOP-225. Checkstyle module for detecting trailing white spaces. This patch adds a checkstyle module to detect trailing white spaces. It also removed various current instances of trailing white spaces in the code. From: Ahmed Radwan git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1150046 13f79535-47bb-0310-9956-ffa450edef68 --- src/java/com/cloudera/sqoop/ConnFactory.java | 6 +-- src/java/com/cloudera/sqoop/SqoopOptions.java | 4 +- .../com/cloudera/sqoop/cli/ToolOptions.java | 4 +- .../sqoop/hbase/HBasePutProcessor.java | 2 +- .../com/cloudera/sqoop/hive/HiveTypes.java | 2 +- src/java/com/cloudera/sqoop/io/LobFile.java | 40 +++++++++---------- .../com/cloudera/sqoop/io/LobReaderCache.java | 4 +- src/java/com/cloudera/sqoop/io/NamedFifo.java | 2 +- .../com/cloudera/sqoop/lib/DelimiterSet.java | 2 +- .../com/cloudera/sqoop/lib/FieldMappable.java | 2 +- .../cloudera/sqoop/lib/LargeObjectLoader.java | 4 +- .../com/cloudera/sqoop/lib/RecordParser.java | 10 ++--- .../com/cloudera/sqoop/lib/SqoopRecord.java | 4 +- .../sqoop/mapreduce/AsyncSqlOutputFormat.java | 8 ++-- .../sqoop/mapreduce/AsyncSqlRecordWriter.java | 4 +- .../mapreduce/CombineShimRecordReader.java | 2 +- .../mapreduce/DelegatingOutputFormat.java | 8 ++-- .../sqoop/mapreduce/ImportJobBase.java | 6 +-- .../sqoop/mapreduce/MergeMapperBase.java | 4 +- .../cloudera/sqoop/mapreduce/MergeRecord.java | 4 +- .../sqoop/mapreduce/MergeTextMapper.java | 2 +- .../sqoop/mapreduce/MySQLExportJob.java | 2 +- .../sqoop/mapreduce/MySQLExportMapper.java | 2 +- .../mapreduce/MySQLTextExportMapper.java | 2 +- .../mapreduce/OracleExportOutputFormat.java | 4 +- .../sqoop/mapreduce/UpdateOutputFormat.java | 8 ++-- .../db/DataDrivenDBRecordReader.java | 2 +- .../mapreduce/db/OracleDBRecordReader.java | 8 ++-- .../cloudera/sqoop/metastore/JobStorage.java | 2 +- .../metastore/hsqldb/AutoHsqldbStorage.java | 2 +- .../metastore/hsqldb/HsqldbJobStorage.java | 10 ++--- .../com/cloudera/sqoop/orm/ClassWriter.java | 2 +- .../sqoop/orm/CompilationManager.java | 2 +- .../cloudera/sqoop/orm/TableClassName.java | 2 +- .../com/cloudera/sqoop/tool/ImportTool.java | 2 +- src/java/com/cloudera/sqoop/tool/JobTool.java | 8 ++-- .../com/cloudera/sqoop/tool/MergeTool.java | 2 +- .../cloudera/sqoop/tool/MetastoreTool.java | 2 +- .../com/cloudera/sqoop/tool/SqoopTool.java | 2 +- .../com/cloudera/sqoop/tool/ToolDesc.java | 2 +- .../com/cloudera/sqoop/util/AppendUtils.java | 2 +- .../com/cloudera/sqoop/util/AsyncSink.java | 2 +- .../com/cloudera/sqoop/util/Executor.java | 6 +-- src/perftest/ExportStressTest.java | 2 +- src/perftest/LobFileStressTest.java | 8 ++-- src/test/checkstyle.xml | 6 +++ .../com/cloudera/sqoop/TestAppendUtils.java | 6 +-- .../com/cloudera/sqoop/TestCompression.java | 6 +-- .../com/cloudera/sqoop/TestConnFactory.java | 4 +- .../com/cloudera/sqoop/TestExportUpdate.java | 8 ++-- .../com/cloudera/sqoop/TestTargetDir.java | 2 +- .../cloudera/sqoop/hive/TestHiveImport.java | 2 +- .../com/cloudera/sqoop/io/TestLobFile.java | 6 +-- .../io/TestSplittableBufferedWriter.java | 2 +- .../com/cloudera/sqoop/lib/TestClobRef.java | 2 +- .../sqoop/lib/TestFieldFormatter.java | 6 +-- .../cloudera/sqoop/lib/TestRecordParser.java | 4 +- .../sqoop/manager/OracleManagerTest.java | 4 +- .../db/TestDataDrivenDBInputFormat.java | 2 +- .../sqoop/metastore/TestSavedJobs.java | 4 +- .../sqoop/testutil/ExportJobTestCase.java | 2 +- .../sqoop/testutil/ManagerCompatTestCase.java | 10 ++--- .../sqoop/testutil/MockResultSet.java | 4 +- .../sqoop/testutil/ReparseMapper.java | 2 +- .../sqoop/testutil/SeqFileReader.java | 2 +- .../cloudera/sqoop/tool/TestToolPlugin.java | 6 +-- 66 files changed, 153 insertions(+), 147 deletions(-) diff --git a/src/java/com/cloudera/sqoop/ConnFactory.java b/src/java/com/cloudera/sqoop/ConnFactory.java index 3eaa5a02..4e358235 100644 --- a/src/java/com/cloudera/sqoop/ConnFactory.java +++ b/src/java/com/cloudera/sqoop/ConnFactory.java @@ -60,7 +60,7 @@ public ConnFactory(Configuration conf) { instantiateFactories(conf); } - /** The sqoop-site.xml configuration property used to set the list of + /** The sqoop-site.xml configuration property used to set the list of * available ManagerFactories. */ public static final String FACTORY_CLASS_NAMES_KEY = @@ -69,7 +69,7 @@ public ConnFactory(Configuration conf) { // The default value for sqoop.connection.factories is the // name of the DefaultManagerFactory. static final String DEFAULT_FACTORY_CLASS_NAMES = - DefaultManagerFactory.class.getName(); + DefaultManagerFactory.class.getName(); /** The list of ManagerFactory instances consulted by getManager(). */ @@ -199,7 +199,7 @@ private Configuration loadManagersFromConfDir(Configuration conf) { String confDirName = System.getenv("SQOOP_CONF_DIR"); if (null == confDirName) { LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. " - + "Cannot check for additional configuration."); + + "Cannot check for additional configuration."); return conf; } diff --git a/src/java/com/cloudera/sqoop/SqoopOptions.java b/src/java/com/cloudera/sqoop/SqoopOptions.java index eddae34e..5aa3ec4e 100644 --- a/src/java/com/cloudera/sqoop/SqoopOptions.java +++ b/src/java/com/cloudera/sqoop/SqoopOptions.java @@ -1345,7 +1345,7 @@ public boolean shouldUseCompression() { public void setUseCompression(boolean compress) { this.useCompression = compress; } - + /** * @return the name of the compression codec to use when importing. * E.g. org.apache.hadoop.io.compress.GzipCodec. @@ -1353,7 +1353,7 @@ public void setUseCompression(boolean compress) { public String getCompressionCodec() { return compressionCodec; } - + public void setCompressionCodec(String codec) { this.compressionCodec = codec; } diff --git a/src/java/com/cloudera/sqoop/cli/ToolOptions.java b/src/java/com/cloudera/sqoop/cli/ToolOptions.java index a5891c65..c4535ddb 100644 --- a/src/java/com/cloudera/sqoop/cli/ToolOptions.java +++ b/src/java/com/cloudera/sqoop/cli/ToolOptions.java @@ -86,7 +86,7 @@ public Iterator iterator() { return optGroups.iterator(); } - + /** * Flatten the different sets of related options into a single collection * of options. @@ -102,7 +102,7 @@ public Options merge() { totalOpts++; } } - + return mergedOpts; } diff --git a/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java b/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java index aeecba70..38c40f4b 100644 --- a/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java +++ b/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java @@ -70,7 +70,7 @@ public class HBasePutProcessor implements Closeable, Configurable, public HBasePutProcessor() { } - + @Override @SuppressWarnings("unchecked") public void setConf(Configuration config) { diff --git a/src/java/com/cloudera/sqoop/hive/HiveTypes.java b/src/java/com/cloudera/sqoop/hive/HiveTypes.java index 7759bb1d..f8bda617 100644 --- a/src/java/com/cloudera/sqoop/hive/HiveTypes.java +++ b/src/java/com/cloudera/sqoop/hive/HiveTypes.java @@ -86,7 +86,7 @@ public static String toHiveType(int sqlType) { } } - /** + /** * @return true if a sql type can't be translated to a precise match * in Hive, and we have to cast it to something more generic. */ diff --git a/src/java/com/cloudera/sqoop/io/LobFile.java b/src/java/com/cloudera/sqoop/io/LobFile.java index fa6845b1..0c85aff1 100644 --- a/src/java/com/cloudera/sqoop/io/LobFile.java +++ b/src/java/com/cloudera/sqoop/io/LobFile.java @@ -85,14 +85,14 @@ private LobFile() { public static final int LATEST_LOB_VERSION = 0; static final char [] HEADER_ID_STR = { 'L', 'O', 'B' }; - // Value for entryId to write to the beginning of an IndexSegment. + // Value for entryId to write to the beginning of an IndexSegment. static final long SEGMENT_HEADER_ID = -1; // Value for entryId to write before the finale. static final long SEGMENT_OFFSET_ID = -2; // Value for entryID to write before the IndexTable - static final long INDEX_TABLE_ID = -3; + static final long INDEX_TABLE_ID = -3; /** * Represents a header block in a LobFile. Can write a new header @@ -111,7 +111,7 @@ private static class LobFileHeader implements Writable { public LobFileHeader() { this.version = LATEST_LOB_VERSION; this.startMark = new RecordStartMark(); - this.metaBlock = new MetaBlock(); + this.metaBlock = new MetaBlock(); } /** @@ -271,7 +271,7 @@ public MetaBlock(Map map) { entries.put(entry.getKey(), entry.getValue()); } } - + @Override public Set> entrySet() { return entries.entrySet(); @@ -409,7 +409,7 @@ public void addRecordLen(long recordLen) throws IOException { public void write(DataOutput out) throws IOException { // Write the SEGMENT_HEADER_ID to distinguish this from a LobRecord. WritableUtils.writeVLong(out, SEGMENT_HEADER_ID); - + // The length of the main body of the segment is the length of the // data byte array. int segmentBytesLen = recordLenBytes.getLength(); @@ -444,12 +444,12 @@ public void readFields(DataInput in) throws IOException { reset(); // Reset the iterator allowing the user to yield offset/lengths. } - + // The following methods are used by a Reader to walk through the index // segment and get data about the records described in this segment of // the index. - + private DataInputBuffer dataInputBuf; // The following two fields are advanced by the next() method. @@ -559,7 +559,7 @@ public long getCurRecordStart() { * Describes an IndexSegment. This is one entry in the IndexTable. It * holds the physical location of the IndexSegment in the file, as well * as the range of entryIds and byte ranges corresponding to records - * described by the index subset in the IndexSegment. + * described by the index subset in the IndexSegment. */ private static class IndexTableEntry implements Writable { private long segmentOffset; @@ -792,16 +792,16 @@ private static class V0Writer extends Writer { // The LobIndex we are constructing. private LinkedList indexSegments; // Number of entries in the current IndexSegment. - private int entriesInSegment; + private int entriesInSegment; private IndexTable indexTable; // Number of entries that can be written to a single IndexSegment. - private int maxEntriesPerSegment; + private int maxEntriesPerSegment; // By default we write this many entries per IndexSegment. static final int DEFAULT_MAX_SEGMENT_ENTRIES = 4096; - - // Our OutputStream to the underlying file. + + // Our OutputStream to the underlying file. private DataOutputStream out; // 'out' is layered on top of this stream, which gives us a count @@ -848,7 +848,7 @@ private static class V0Writer extends Writer { this.compressor = codec.createCompressor(); } } - + init(); } @@ -1217,7 +1217,7 @@ private static class V0Reader extends Reader { private long claimedRecordLen; // After we've aligned on a record, this contains its entryId. - private long curEntryId; + private long curEntryId; // After we've aligned on a record, this contains the offset of the // beginning of its RSM from the start of the file. @@ -1437,7 +1437,7 @@ private boolean matchesRsm(byte [] buf) { /** * @return the offset in 'buf' where a RecordStartMark begins, or -1 - * if the RecordStartMark is not present in the buffer. + * if the RecordStartMark is not present in the buffer. */ private int findRecordStartMark(byte [] buf) { byte [] rsm = this.header.getStartMark().getBytes(); @@ -1482,7 +1482,7 @@ private void searchForRecord(long start) throws IOException { LOG.debug("Looking for the first record at/after offset " + start); // Scan through the IndexTable until we find the IndexSegment - // that contains the offset. + // that contains the offset. for (int i = 0; i < indexTable.size(); i++) { IndexTableEntry tableEntry = indexTable.get(i); if (LOG.isDebugEnabled()) { @@ -1495,7 +1495,7 @@ private void searchForRecord(long start) throws IOException { // Seek to the IndexSegment associated with this tableEntry. curIndexSegmentId = i; loadIndexSegment(); - + // Use this index segment. The record index iterator // is at the beginning of the IndexSegment, since we just // read it in. @@ -1525,7 +1525,7 @@ private void searchForRecord(long start) throws IOException { } // If we didn't return inside the loop, then we've searched the entire - // file and it's not there. Advance the IndexSegment iterator to + // file and it's not there. Advance the IndexSegment iterator to // the end of the road so that next() returns false. this.curIndexSegmentId = indexTable.size(); loadIndexSegment(); @@ -1614,7 +1614,7 @@ public boolean next() throws IOException { // Nothing left in the last IndexSegment. LOG.debug("Last index segment is finished; false."); this.curIndexSegment = null; - return false; + return false; } // Determine where the next record starts. @@ -1761,7 +1761,7 @@ public static Reader open(Path p, Configuration conf) throws IOException { if (version == 0) { return new V0Reader(p, conf, header, dis, fis, stats[0].getLen()); } else { - throw new IOException("No reader available for LobFile version " + throw new IOException("No reader available for LobFile version " + version); } } diff --git a/src/java/com/cloudera/sqoop/io/LobReaderCache.java b/src/java/com/cloudera/sqoop/io/LobReaderCache.java index 5cd5fcb5..788d5546 100644 --- a/src/java/com/cloudera/sqoop/io/LobReaderCache.java +++ b/src/java/com/cloudera/sqoop/io/LobReaderCache.java @@ -34,7 +34,7 @@ * Singleton pattern. While nothing prevents multiple LobReaderCache * instances, it is most useful to have a single global cache. This cache is * internally synchronized; only one thread can insert or retrieve a reader - * from the cache at a time. + * from the cache at a time. */ public final class LobReaderCache { @@ -78,7 +78,7 @@ public static Path qualify(Path path, Configuration conf) } return path.makeQualified(fs); } - + /** * Open a LobFile for read access, returning a cached reader if one is * available, or a new reader otherwise. diff --git a/src/java/com/cloudera/sqoop/io/NamedFifo.java b/src/java/com/cloudera/sqoop/io/NamedFifo.java index 8e3c54d4..38656cba 100644 --- a/src/java/com/cloudera/sqoop/io/NamedFifo.java +++ b/src/java/com/cloudera/sqoop/io/NamedFifo.java @@ -67,7 +67,7 @@ public void create() throws IOException { public void create(int permissions) throws IOException { String filename = fifoFile.toString(); - // Format permissions as a mode string in base 8. + // Format permissions as a mode string in base 8. String modeStr = Integer.toString(permissions, 8); // Create the FIFO itself. diff --git a/src/java/com/cloudera/sqoop/lib/DelimiterSet.java b/src/java/com/cloudera/sqoop/lib/DelimiterSet.java index 3f92401e..6330ebad 100644 --- a/src/java/com/cloudera/sqoop/lib/DelimiterSet.java +++ b/src/java/com/cloudera/sqoop/lib/DelimiterSet.java @@ -29,7 +29,7 @@ public class DelimiterSet implements Cloneable { private char recordDelim; // records terminated by this. // If these next two fields are '\000', then they are ignored. - private char enclosedBy; + private char enclosedBy; private char escapedBy; // If true, then the enclosed-by character is applied to every diff --git a/src/java/com/cloudera/sqoop/lib/FieldMappable.java b/src/java/com/cloudera/sqoop/lib/FieldMappable.java index cd170c8f..de4e4acc 100644 --- a/src/java/com/cloudera/sqoop/lib/FieldMappable.java +++ b/src/java/com/cloudera/sqoop/lib/FieldMappable.java @@ -27,7 +27,7 @@ public interface FieldMappable { /** - * Returns a map containing all fields of this record. + * Returns a map containing all fields of this record. * @return a map from column names to the object-based values for * this record. The map may not be null, though it may be empty. */ diff --git a/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java b/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java index 56ad7a93..4e1b7599 100644 --- a/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java +++ b/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java @@ -58,7 +58,7 @@ public class LargeObjectLoader implements Closeable { private Path workPath; private FileSystem fs; - // Handles to the open BLOB / CLOB file writers. + // Handles to the open BLOB / CLOB file writers. private LobFile.Writer curBlobWriter; private LobFile.Writer curClobWriter; @@ -155,7 +155,7 @@ private LobFile.Writer getClobWriter() throws IOException { */ private String getRelativePath(LobFile.Writer w) { Path writerPath = w.getPath(); - + String writerPathStr = writerPath.toString(); String workPathStr = workPath.toString(); if (!workPathStr.endsWith(File.separator)) { diff --git a/src/java/com/cloudera/sqoop/lib/RecordParser.java b/src/java/com/cloudera/sqoop/lib/RecordParser.java index da42be86..f12ff0fa 100644 --- a/src/java/com/cloudera/sqoop/lib/RecordParser.java +++ b/src/java/com/cloudera/sqoop/lib/RecordParser.java @@ -67,7 +67,7 @@ private enum ParseState { /** * An error thrown when parsing fails. - */ + */ public static class ParseError extends Exception { public ParseError() { super("ParseError"); @@ -112,8 +112,8 @@ public List parseRecord(CharSequence input) throws ParseError { * This list is backed by an internal buffer which is cleared by the * next call to parseRecord(). */ - public List parseRecord(Text input) throws ParseError { - if (null == input) { + public List parseRecord(Text input) throws ParseError { + if (null == input) { throw new ParseError("null input string"); } @@ -286,7 +286,7 @@ record sep halts processing. } break; - + case ENCLOSED_ESCAPE: // Treat this character literally, whatever it is, and return to // enclosed field processing. @@ -342,7 +342,7 @@ record sep halts processing. } // CHECKSTYLE:ON - public boolean isEnclosingRequired() { + public boolean isEnclosingRequired() { return delimiters.isEncloseRequired(); } diff --git a/src/java/com/cloudera/sqoop/lib/SqoopRecord.java b/src/java/com/cloudera/sqoop/lib/SqoopRecord.java index 932d6a55..25756c31 100644 --- a/src/java/com/cloudera/sqoop/lib/SqoopRecord.java +++ b/src/java/com/cloudera/sqoop/lib/SqoopRecord.java @@ -49,7 +49,7 @@ public abstract void loadLargeObjects(LargeObjectLoader objLoader) /** * Inserts the data in this object into the PreparedStatement, starting - * at parameter 'offset'. + * at parameter 'offset'. * @return the number of fields written to the statement. */ public abstract int write(PreparedStatement stmt, int offset) @@ -130,7 +130,7 @@ public void delegate(FieldMapProcessor processor) */ public Map getFieldMap() { // Default implementation does not support field iteration. - // ClassWriter should provide an overriding version. + // ClassWriter should provide an overriding version. throw new RuntimeException( "Got null field map from record. Regenerate your record class."); } diff --git a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java index 9b51fbd5..ce615f98 100755 --- a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java @@ -40,7 +40,7 @@ * enough commands are created. * * This supports a configurable "spill threshold" at which - * point intermediate transactions are committed. + * point intermediate transactions are committed. * * Uses DBOutputFormat/DBConfiguration for configuring the output. * This is used in conjunction with the abstract AsyncSqlRecordWriter @@ -49,7 +49,7 @@ * Clients of this OutputFormat must implement getRecordWriter(); the * returned RecordWriter is intended to subclass AsyncSqlRecordWriter. */ -public abstract class AsyncSqlOutputFormat +public abstract class AsyncSqlOutputFormat extends OutputFormat { /** conf key: number of rows to export per INSERT statement. */ @@ -87,13 +87,13 @@ public abstract class AsyncSqlOutputFormat @Override /** {@inheritDoc} */ - public void checkOutputSpecs(JobContext context) + public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { } @Override /** {@inheritDoc} */ - public OutputCommitter getOutputCommitter(TaskAttemptContext context) + public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new NullOutputCommitter(); } diff --git a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java index da23bbc4..193cf413 100755 --- a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java +++ b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java @@ -50,7 +50,7 @@ public abstract class AsyncSqlRecordWriter private Configuration conf; protected final int rowsPerStmt; // rows to insert per statement. - + // Buffer for records to be put into export SQL statements. private List records; @@ -85,7 +85,7 @@ public AsyncSqlRecordWriter(TaskAttemptContext context) * Allow subclasses access to the Connection instance we hold. * This Connection is shared with the asynchronous SQL exec thread. * Any uses of the Connection must be synchronized on it. - * @return the Connection object used for this SQL transaction. + * @return the Connection object used for this SQL transaction. */ protected final Connection getConnection() { return this.connection; diff --git a/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java index 8fb766ed..98b45bc0 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java +++ b/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java @@ -49,7 +49,7 @@ public class CombineShimRecordReader private int index; private RecordReader rr; - /** + /** * Constructor invoked by CombineFileRecordReader that identifies part of a * CombineFileSplit to use. */ diff --git a/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java index d9ddd861..e6447ad7 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java @@ -51,7 +51,7 @@ *

If the FMP implements Configurable, it will be configured * correctly via ReflectionUtils.

*/ -public class DelegatingOutputFormat +public class DelegatingOutputFormat extends OutputFormat { /** conf key: the FieldMapProcessor class to instantiate. */ @@ -60,7 +60,7 @@ public class DelegatingOutputFormat @Override /** {@inheritDoc} */ - public void checkOutputSpecs(JobContext context) + public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); @@ -71,14 +71,14 @@ public void checkOutputSpecs(JobContext context) @Override /** {@inheritDoc} */ - public OutputCommitter getOutputCommitter(TaskAttemptContext context) + public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new NullOutputCommitter(); } @Override /** {@inheritDoc} */ - public RecordWriter getRecordWriter(TaskAttemptContext context) + public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException { try { return new DelegatingRecordWriter(context); diff --git a/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java b/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java index 799c388f..214c569e 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java +++ b/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java @@ -87,7 +87,7 @@ protected void configureOutputFormat(Job job, String tableName, if (options.shouldUseCompression()) { FileOutputFormat.setCompressOutput(job, true); - + String codecName = options.getCompressionCodec(); Class codecClass; if (codecName == null) { @@ -97,13 +97,13 @@ protected void configureOutputFormat(Job job, String tableName, codecClass = CodecMap.getCodec(codecName, conf).getClass(); } FileOutputFormat.setOutputCompressorClass(job, codecClass); - + if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) { SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); } } - + Path outputPath = context.getDestination(); FileOutputFormat.setOutputPath(job, outputPath); } diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java b/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java index 39226093..387455b6 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java @@ -41,12 +41,12 @@ */ public class MergeMapperBase extends Mapper { - + public static final Log LOG = LogFactory.getLog( MergeMapperBase.class.getName()); private String keyColName; // name of the key column. - private boolean isNew; // true if this split is from the new dataset. + private boolean isNew; // true if this split is from the new dataset. @Override protected void setup(Context context) diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java b/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java index 3ea25636..96065b1c 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java @@ -72,9 +72,9 @@ public Configuration getConf() { /** @return true if this record came from the "new" dataset. */ public boolean isNewRecord() { return isNew; - } + } - /** + /** * Set the isNew field to 'newVal'. */ public void setNewRecord(boolean newVal) { diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java index 1fc818c7..61f8e9b0 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java @@ -40,7 +40,7 @@ public class MergeTextMapper extends MergeMapperBase { protected void setup(Context c) throws IOException, InterruptedException { Configuration conf = c.getConfiguration(); - Class recordClass = + Class recordClass = (Class) conf.getClass( MergeJob.MERGE_SQOOP_RECORD_KEY, SqoopRecord.class); this.record = ReflectionUtils.newInstance(recordClass, conf); diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java index 2780a648..0ba379b2 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java @@ -100,7 +100,7 @@ protected void configureInputFormat(Job job, String tableName, DataDrivenDBInputFormat.setInput(job, DBWritable.class, tableName, null, null, sqlColNames); - // Configure the actual InputFormat to use. + // Configure the actual InputFormat to use. super.configureInputFormat(job, tableName, tableClassName, splitByCol); } diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java index 0802f319..5e6a1aad 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java @@ -42,7 +42,7 @@ /** * Mapper that starts a 'mysqlimport' process and uses that to export rows from - * HDFS to a MySQL database at high speed. + * HDFS to a MySQL database at high speed. * * map() methods are actually provided by subclasses that read from * SequenceFiles (containing existing SqoopRecords) or text files diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java index e244cb2f..cb42975f 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java @@ -33,7 +33,7 @@ public class MySQLTextExportMapper extends MySQLExportMapper { // End-of-record delimiter. - private String recordEndStr; + private String recordEndStr; @Override protected void setup(Context context) { diff --git a/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java index 38fb1ef2..31d6249b 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java @@ -29,12 +29,12 @@ /** * Oracle-specific SQL formatting overrides default ExportOutputFormat's. */ -public class OracleExportOutputFormat +public class OracleExportOutputFormat extends ExportOutputFormat { @Override /** {@inheritDoc} */ - public RecordWriter getRecordWriter(TaskAttemptContext context) + public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException { try { return new OracleExportRecordWriter(context); diff --git a/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java index a649fa9f..d5339d9c 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java @@ -44,14 +44,14 @@ * * Uses DBOutputFormat/DBConfiguration for configuring the output. */ -public class UpdateOutputFormat +public class UpdateOutputFormat extends AsyncSqlOutputFormat { private static final Log LOG = LogFactory.getLog(UpdateOutputFormat.class); @Override /** {@inheritDoc} */ - public void checkOutputSpecs(JobContext context) + public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); DBConfiguration dbConf = new DBConfiguration(conf); @@ -71,7 +71,7 @@ public void checkOutputSpecs(JobContext context) @Override /** {@inheritDoc} */ - public RecordWriter getRecordWriter(TaskAttemptContext context) + public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException { try { return new UpdateRecordWriter(context); @@ -126,7 +126,7 @@ protected final String getTableName() { return Arrays.copyOf(columnNames, columnNames.length); } } - + /** * @return the column we are using to determine the row to update. */ diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java index b632834a..0327b001 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java @@ -45,7 +45,7 @@ public class DataDrivenDBRecordReader // TODO(aaron): Refactor constructor to use fewer arguments. /** * @param split The InputSplit to read data for - * @throws SQLException + * @throws SQLException */ public DataDrivenDBRecordReader(DBInputFormat.DBInputSplit split, Class inputClass, Configuration conf, Connection conn, diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java index 9a948cd5..6e648a23 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java @@ -40,7 +40,7 @@ public class OracleDBRecordReader private static final Log LOG = LogFactory.getLog(OracleDBRecordReader.class); // CHECKSTYLE:OFF - public OracleDBRecordReader(DBInputFormat.DBInputSplit split, + public OracleDBRecordReader(DBInputFormat.DBInputSplit split, Class inputClass, Configuration conf, Connection conn, DBConfiguration dbConfig, String cond, String [] fields, String table) throws SQLException { @@ -60,14 +60,14 @@ protected String getSelectQuery() { // Oracle-specific codepath to use rownum instead of LIMIT/OFFSET. if(dbConf.getInputQuery() == null) { query.append("SELECT "); - + for (int i = 0; i < fieldNames.length; i++) { query.append(fieldNames[i]); if (i != fieldNames.length -1) { query.append(", "); } } - + query.append(" FROM ").append(tableName); if (conditions != null && conditions.length() > 0) { query.append(" WHERE ").append(conditions); @@ -80,7 +80,7 @@ protected String getSelectQuery() { //PREBUILT QUERY query.append(dbConf.getInputQuery()); } - + try { DBInputFormat.DBInputSplit split = getSplit(); if (split.getLength() > 0 && split.getStart() > 0) { diff --git a/src/java/com/cloudera/sqoop/metastore/JobStorage.java b/src/java/com/cloudera/sqoop/metastore/JobStorage.java index 74760b24..06697678 100644 --- a/src/java/com/cloudera/sqoop/metastore/JobStorage.java +++ b/src/java/com/cloudera/sqoop/metastore/JobStorage.java @@ -78,7 +78,7 @@ public abstract void create(String jobName, JobData data) */ public abstract void update(String jobName, JobData data) throws IOException; - + /** * Close any resources opened by the JobStorage system. */ diff --git a/src/java/com/cloudera/sqoop/metastore/hsqldb/AutoHsqldbStorage.java b/src/java/com/cloudera/sqoop/metastore/hsqldb/AutoHsqldbStorage.java index fded5558..b6a530bf 100644 --- a/src/java/com/cloudera/sqoop/metastore/hsqldb/AutoHsqldbStorage.java +++ b/src/java/com/cloudera/sqoop/metastore/hsqldb/AutoHsqldbStorage.java @@ -36,7 +36,7 @@ public class AutoHsqldbStorage extends HsqldbJobStorage { public static final Log LOG = LogFactory.getLog( AutoHsqldbStorage.class.getName()); - + /** * Configuration key specifying whether this storage agent is active. * Defaults to "on" to allow zero-conf local users. diff --git a/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobStorage.java b/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobStorage.java index 181a4995..8d015929 100644 --- a/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobStorage.java +++ b/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobStorage.java @@ -51,9 +51,9 @@ public class HsqldbJobStorage extends JobStorage { public static final Log LOG = LogFactory.getLog( HsqldbJobStorage.class.getName()); - + /** descriptor key identifying the connect string for the metastore. */ - public static final String META_CONNECT_KEY = "metastore.connect.string"; + public static final String META_CONNECT_KEY = "metastore.connect.string"; /** descriptor key identifying the username to use when connecting * to the metastore. @@ -94,14 +94,14 @@ public class HsqldbJobStorage extends JobStorage { "sqoop.property.set.id"; /** Current value for PROPERTY_SET_KEY. */ - private static final String CUR_PROPERTY_SET_ID = "0"; + private static final String CUR_PROPERTY_SET_ID = "0"; // The following are values for propClass in the v0 schema which // describe different aspects of the stored metadata. /** Property class for properties about the stored data itself. */ private static final String PROPERTY_CLASS_SCHEMA = "schema"; - + /** Property class for properties that are loaded into SqoopOptions. */ private static final String PROPERTY_CLASS_SQOOP_OPTIONS = "SqoopOptions"; @@ -636,7 +636,7 @@ private void createJobTable() throws SQLException { } else { break; } - } + } // curTableName contains a table name that does not exist. // Create this table. diff --git a/src/java/com/cloudera/sqoop/orm/ClassWriter.java b/src/java/com/cloudera/sqoop/orm/ClassWriter.java index 6f067dd0..61a0d693 100644 --- a/src/java/com/cloudera/sqoop/orm/ClassWriter.java +++ b/src/java/com/cloudera/sqoop/orm/ClassWriter.java @@ -713,7 +713,7 @@ private void generateSetField(Map columnTypes, if (!first) { sb.append(" else"); } - + sb.append(" if (\"" + colName + "\".equals(__fieldName)) {\n"); sb.append(" this." + colName + " = (" + javaType + ") __fieldVal;\n"); diff --git a/src/java/com/cloudera/sqoop/orm/CompilationManager.java b/src/java/com/cloudera/sqoop/orm/CompilationManager.java index 9e05058c..7e3ca629 100644 --- a/src/java/com/cloudera/sqoop/orm/CompilationManager.java +++ b/src/java/com/cloudera/sqoop/orm/CompilationManager.java @@ -224,7 +224,7 @@ public void compile() throws IOException { } } try { - FileUtils.moveFile(fOrig, fDest); + FileUtils.moveFile(fOrig, fDest); } catch (IOException e) { LOG.error("Could not rename " + orig + " to " + dest, e); } diff --git a/src/java/com/cloudera/sqoop/orm/TableClassName.java b/src/java/com/cloudera/sqoop/orm/TableClassName.java index acd31323..d141e2fc 100644 --- a/src/java/com/cloudera/sqoop/orm/TableClassName.java +++ b/src/java/com/cloudera/sqoop/orm/TableClassName.java @@ -100,7 +100,7 @@ public String getClassForTable(String tableName) { /** * @return just the last segment of the class name -- all package info - * stripped. + * stripped. */ public String getShortClassForTable(String tableName) { String fullClass = getClassForTable(tableName); diff --git a/src/java/com/cloudera/sqoop/tool/ImportTool.java b/src/java/com/cloudera/sqoop/tool/ImportTool.java index a891dc92..2245d03b 100644 --- a/src/java/com/cloudera/sqoop/tool/ImportTool.java +++ b/src/java/com/cloudera/sqoop/tool/ImportTool.java @@ -706,7 +706,7 @@ public void applyOptions(CommandLine in, SqoopOptions out) if (in.hasOption(COMPRESS_ARG)) { out.setUseCompression(true); } - + if (in.hasOption(COMPRESSION_CODEC_ARG)) { out.setCompressionCodec(in.getOptionValue(COMPRESSION_CODEC_ARG)); } diff --git a/src/java/com/cloudera/sqoop/tool/JobTool.java b/src/java/com/cloudera/sqoop/tool/JobTool.java index 49c49d66..0389df4b 100644 --- a/src/java/com/cloudera/sqoop/tool/JobTool.java +++ b/src/java/com/cloudera/sqoop/tool/JobTool.java @@ -121,7 +121,7 @@ private int configureChildTool(SqoopOptions childOptions, // The '--' and any subsequent args. String [] extraChildArgv = getElementsAfterDoubleDash(childArgv); - + // Now feed the arguments into the tool itself. try { childOptions = childTool.parseArguments(parseableChildArgv, @@ -253,7 +253,7 @@ private int showJob(SqoopOptions opts) throws IOException { // TODO: This does not show entries in the Configuration // (SqoopOptions.getConf()) which were stored as different from the - // default. + // default. return 0; } @@ -392,9 +392,9 @@ public void printHelp(ToolOptions opts) { System.out.println("usage: sqoop " + getToolName() + " [GENERIC-ARGS] [JOB-ARGS] [-- [] [TOOL-ARGS]]"); System.out.println(""); - + opts.printHelp(); - + System.out.println(""); System.out.println("Generic Hadoop command-line arguments:"); System.out.println("(must preceed any tool-specific arguments)"); diff --git a/src/java/com/cloudera/sqoop/tool/MergeTool.java b/src/java/com/cloudera/sqoop/tool/MergeTool.java index 796570b4..cbda6b29 100644 --- a/src/java/com/cloudera/sqoop/tool/MergeTool.java +++ b/src/java/com/cloudera/sqoop/tool/MergeTool.java @@ -115,7 +115,7 @@ protected RelatedOptions getMergeOptions() { .hasArg().withDescription("Key column to use to join results") .withLongOpt(MERGE_KEY_ARG) .create()); - + // Since the "common" options aren't used in the merge tool, // add these settings here. mergeOpts.addOption(OptionBuilder diff --git a/src/java/com/cloudera/sqoop/tool/MetastoreTool.java b/src/java/com/cloudera/sqoop/tool/MetastoreTool.java index 95edb90c..ad254b60 100644 --- a/src/java/com/cloudera/sqoop/tool/MetastoreTool.java +++ b/src/java/com/cloudera/sqoop/tool/MetastoreTool.java @@ -41,7 +41,7 @@ public class MetastoreTool extends BaseSqoopTool { private HsqldbMetaStore metastore; // If set to true, shut an existing metastore down. - private boolean shutdown = false; + private boolean shutdown = false; public MetastoreTool() { super("metastore"); diff --git a/src/java/com/cloudera/sqoop/tool/SqoopTool.java b/src/java/com/cloudera/sqoop/tool/SqoopTool.java index 785faa84..a5ae2e18 100644 --- a/src/java/com/cloudera/sqoop/tool/SqoopTool.java +++ b/src/java/com/cloudera/sqoop/tool/SqoopTool.java @@ -172,7 +172,7 @@ private static Configuration loadPluginsFromConfDir(Configuration conf) { String confDirName = System.getenv("SQOOP_CONF_DIR"); if (null == confDirName) { LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. " - + "Cannot check for additional configuration."); + + "Cannot check for additional configuration."); return conf; } diff --git a/src/java/com/cloudera/sqoop/tool/ToolDesc.java b/src/java/com/cloudera/sqoop/tool/ToolDesc.java index dc56cce6..cd0e66ef 100644 --- a/src/java/com/cloudera/sqoop/tool/ToolDesc.java +++ b/src/java/com/cloudera/sqoop/tool/ToolDesc.java @@ -28,7 +28,7 @@ public final class ToolDesc { /** - * Main c'tor; sets all fields that describe a SqoopTool. + * Main c'tor; sets all fields that describe a SqoopTool. */ public ToolDesc(String name, Class cls, String desc) { this.toolName = name; diff --git a/src/java/com/cloudera/sqoop/util/AppendUtils.java b/src/java/com/cloudera/sqoop/util/AppendUtils.java index 8f26cdf7..d365e838 100644 --- a/src/java/com/cloudera/sqoop/util/AppendUtils.java +++ b/src/java/com/cloudera/sqoop/util/AppendUtils.java @@ -218,7 +218,7 @@ private String getFileExtension(String filename) { /** * Creates a unique path object inside the sqoop temporary directory. - * + * * @param tableName * @return a path pointing to the temporary directory */ diff --git a/src/java/com/cloudera/sqoop/util/AsyncSink.java b/src/java/com/cloudera/sqoop/util/AsyncSink.java index 9b748d89..57de8f9f 100644 --- a/src/java/com/cloudera/sqoop/util/AsyncSink.java +++ b/src/java/com/cloudera/sqoop/util/AsyncSink.java @@ -27,7 +27,7 @@ * When the stream is closed, the thread should terminate. */ public abstract class AsyncSink { - + /** * Create and run a thread to handle input from the provided InputStream. * When processStream returns, the thread should be running; it should diff --git a/src/java/com/cloudera/sqoop/util/Executor.java b/src/java/com/cloudera/sqoop/util/Executor.java index 46da59af..4342d93d 100644 --- a/src/java/com/cloudera/sqoop/util/Executor.java +++ b/src/java/com/cloudera/sqoop/util/Executor.java @@ -32,7 +32,7 @@ * */ public final class Executor { - + public static final Log LOG = LogFactory.getLog(Executor.class.getName()); private Executor() { @@ -51,7 +51,7 @@ public static int exec(String [] args) throws IOException { /** * Run a command via Runtime.exec(), with its stdout and stderr streams * directed to be handled by threads generated by AsyncSinks. - * Block until the child process terminates. + * Block until the child process terminates. * * @return the exit status of the ran program */ @@ -78,7 +78,7 @@ public static int exec(String [] args, String [] envp, AsyncSink outSink, // dispatch its stdout and stderr to stream sinks if available. if (null != outSink) { outSink.processStream(p.getInputStream()); - } + } if (null != errSink) { errSink.processStream(p.getErrorStream()); diff --git a/src/perftest/ExportStressTest.java b/src/perftest/ExportStressTest.java index 4d4e29ed..6a772306 100644 --- a/src/perftest/ExportStressTest.java +++ b/src/perftest/ExportStressTest.java @@ -118,7 +118,7 @@ public void runExport(String connectStr, String username) throws Exception { options.setLinesTerminatedBy('\n'); options.setFieldsTerminatedBy(','); options.setExplicitDelims(true); - + SqoopTool exportTool = new ExportTool(); Sqoop sqoop = new Sqoop(exportTool, getConf(), options); int ret = Sqoop.runSqoop(sqoop, new String[0]); diff --git a/src/perftest/LobFileStressTest.java b/src/perftest/LobFileStressTest.java index b2330198..f75c3d18 100644 --- a/src/perftest/LobFileStressTest.java +++ b/src/perftest/LobFileStressTest.java @@ -48,7 +48,7 @@ public LobFileStressTest() { private Path getPath(boolean compress) { if (compress) { return new Path("compressed.lob"); - } else { + } else { return new Path("integers.lob"); } } @@ -76,7 +76,7 @@ private int getNumRecords(boolean compress) { return 5000000; // 5 million; the compressor is just too slow for 40M. } } - + private void writeIntegerFile(boolean compress) throws Exception { boolean passed = false; try { @@ -297,7 +297,7 @@ private void checkBigRecord(LobFile.Reader r, long expectedId) throw new Exception("Couldn't read all the data! expected " + expected + " more bytes"); } - + if (is.read() != -1) { throw new Exception("Got an extra byte! Expected no more data."); } @@ -305,7 +305,7 @@ private void checkBigRecord(LobFile.Reader r, long expectedId) private void testBigFile(boolean compress) throws Exception { // Write a file containing 5 GB records. - + final int NUM_RECORDS = 5; boolean passed = false; diff --git a/src/test/checkstyle.xml b/src/test/checkstyle.xml index aa5eae7c..5b81d24e 100644 --- a/src/test/checkstyle.xml +++ b/src/test/checkstyle.xml @@ -131,6 +131,12 @@ + + + + + + diff --git a/src/test/com/cloudera/sqoop/TestAppendUtils.java b/src/test/com/cloudera/sqoop/TestAppendUtils.java index c71fdff6..6a085e27 100644 --- a/src/test/com/cloudera/sqoop/TestAppendUtils.java +++ b/src/test/com/cloudera/sqoop/TestAppendUtils.java @@ -55,7 +55,7 @@ public class TestAppendUtils extends ImportJobTestCase { /** * Create the argv to pass to Sqoop. - * + * * @return the argv as an array of strings. */ protected ArrayList getOutputlessArgv(boolean includeHadoopFlags, @@ -143,7 +143,7 @@ public int compare(FileStatus fs1, FileStatus fs2) { return fs1.getPath().toString().compareTo(fs2.getPath().toString()); } } - + /** @return a concat. string with file-creation dates excluding folders. */ private String getFileCreationTimeImage(FileSystem fs, Path outputPath, int fileCount) throws IOException { @@ -175,7 +175,7 @@ private int getFilePartition(Path file) { /** * Test for ouput path file-count increase, current files untouched and new * correct partition number. - * + * * @throws IOException */ public void runAppendTest(ArrayList args, Path outputPath) diff --git a/src/test/com/cloudera/sqoop/TestCompression.java b/src/test/com/cloudera/sqoop/TestCompression.java index 7ad8c0f7..3ad0b2e2 100644 --- a/src/test/com/cloudera/sqoop/TestCompression.java +++ b/src/test/com/cloudera/sqoop/TestCompression.java @@ -112,7 +112,7 @@ public void runSequenceFileCompressionTest(CompressionCodec codec, getTableName()); reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString()); - + if (codec == null) { codec = new GzipCodec(); } @@ -150,11 +150,11 @@ public void runSequenceFileCompressionTest(CompressionCodec codec, public void runTextCompressionTest(CompressionCodec codec, int expectedNum) throws IOException { - + String [] columns = HsqldbTestServer.getFieldNames(); String [] argv = getArgv(true, columns, codec, "--as-textfile"); runImport(argv); - + Configuration conf = new Configuration(); if (!BaseSqoopTestCase.isOnPhysicalCluster()) { conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS); diff --git a/src/test/com/cloudera/sqoop/TestConnFactory.java b/src/test/com/cloudera/sqoop/TestConnFactory.java index 9a565f4e..d8453ada 100644 --- a/src/test/com/cloudera/sqoop/TestConnFactory.java +++ b/src/test/com/cloudera/sqoop/TestConnFactory.java @@ -27,7 +27,7 @@ import junit.framework.TestCase; -import java.io.IOException; +import java.io.IOException; import java.util.Map; import java.sql.Connection; import java.sql.ResultSet; @@ -111,7 +111,7 @@ public void close() { } public String [] listDatabases() { - return null; + return null; } public String [] listTables() { diff --git a/src/test/com/cloudera/sqoop/TestExportUpdate.java b/src/test/com/cloudera/sqoop/TestExportUpdate.java index ffa7837a..7f76e3ff 100644 --- a/src/test/com/cloudera/sqoop/TestExportUpdate.java +++ b/src/test/com/cloudera/sqoop/TestExportUpdate.java @@ -57,7 +57,7 @@ public void setUp() { } } } - + @Override protected String getTablePrefix() { return "UPDATE_TABLE_"; @@ -107,7 +107,7 @@ private void populateDatabase(int numRows) throws SQLException { * the same. * @param startOffsets is an optional list of row ids/values for a/c * which are the record ids at which the update files begin. - * For instance, if numFiles=3, updatesPerFile=2, and keyCol=0 then + * For instance, if numFiles=3, updatesPerFile=2, and keyCol=0 then * if startOffsets is {5, 10, 12}, files will be generated to update * rows with A=5,6; A=10,11; A=12,13. * @@ -132,7 +132,7 @@ private void createUpdateFiles(int numFiles, int updatesPerFile, // Otherwise, just carry over from the previous file iteration. rowId = startOffsets[i]; } - + for (int j = 0; j < updatesPerFile; j++) { w.write(getUpdateStringForRow(keyCol, rowId++)); } @@ -353,7 +353,7 @@ public void testSubsetUpdate2() throws Exception { // Update only some of the rows in the db. Also include some // updates that do not affect actual rows in the table. // These should just be ignored. - + populateDatabase(10); // Create two files that update four rows each. // File0 updates A=-2..1 (-2 and -1 don't exist). diff --git a/src/test/com/cloudera/sqoop/TestTargetDir.java b/src/test/com/cloudera/sqoop/TestTargetDir.java index 2ee5a715..5fe14ea1 100644 --- a/src/test/com/cloudera/sqoop/TestTargetDir.java +++ b/src/test/com/cloudera/sqoop/TestTargetDir.java @@ -43,7 +43,7 @@ public class TestTargetDir extends ImportJobTestCase { /** * Create the argv to pass to Sqoop. - * + * * @return the argv as an array of strings. */ protected ArrayList getOutputArgv(boolean includeHadoopFlags) { diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java index ce48aabe..35de2fdc 100644 --- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java +++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java @@ -158,7 +158,7 @@ private void runImportTest(String tableName, String [] types, // create a table and populate it with a row... createTableWithColTypes(types, values); - + // set up our mock hive shell to compare our generated script // against the correct expected one. SqoopOptions options = getSqoopOptions(args, tool); diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/com/cloudera/sqoop/io/TestLobFile.java index fd626972..8e4c7a26 100644 --- a/src/test/com/cloudera/sqoop/io/TestLobFile.java +++ b/src/test/com/cloudera/sqoop/io/TestLobFile.java @@ -91,7 +91,7 @@ public void setUp() throws Exception { private void verifyClobFile(Path p, String... expectedRecords) throws Exception { - + LobFile.Reader reader = LobFile.open(p, conf); int recNum = 0; @@ -226,7 +226,7 @@ private void runLineAndRecordTest(Path p, String firstLine, String s = new String(chars); assertEquals(records[1], s); - // Close the reader before we consume the entire file. + // Close the reader before we consume the entire file. reader.close(); assertFalse(reader.isRecordAvailable()); } @@ -309,7 +309,7 @@ public void testSeekToRecord() throws Exception { char [] chars = buf.array(); String s = new String(chars); assertEquals(records[2], s); - + r.close(); reader.close(); } diff --git a/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java b/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java index a164ace8..e4479ced 100644 --- a/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java +++ b/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java @@ -248,7 +248,7 @@ public void testSplittingTextFile() throws IOException { try { fis.close(); } catch (IOException ioe) { - // ignored; may be generated because fis closed in verifyFileContents. + // ignored; may be generated because fis closed in verifyFileContents. } } diff --git a/src/test/com/cloudera/sqoop/lib/TestClobRef.java b/src/test/com/cloudera/sqoop/lib/TestClobRef.java index 067ab5c6..188fcc10 100644 --- a/src/test/com/cloudera/sqoop/lib/TestClobRef.java +++ b/src/test/com/cloudera/sqoop/lib/TestClobRef.java @@ -134,7 +134,7 @@ private void doExternalTest(final String data, final String filename) w.close(); lw.close(); - String refString = "externalLob(lf," + filename + String refString = "externalLob(lf," + filename + "," + off + "," + len + ")"; ClobRef clob = ClobRef.parse(refString); assertTrue(clob.isExternal()); diff --git a/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java b/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java index a1d524b6..e56e665a 100644 --- a/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java +++ b/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java @@ -25,9 +25,9 @@ * Test that the field formatter works in a variety of configurations. */ public class TestFieldFormatter extends TestCase { - + public void testAllEmpty() { - String result = FieldFormatter.escapeAndEnclose("", + String result = FieldFormatter.escapeAndEnclose("", new DelimiterSet(DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false)); assertEquals("", result); @@ -92,7 +92,7 @@ public void testEmptyCharToEscapeString() { String s = "" + nul; assertEquals("\000", s); } - + public void testEscapeCentralQuote() { String result = FieldFormatter.escapeAndEnclose("foo\"bar", new DelimiterSet(',', '\n', '\"', '\\', false)); diff --git a/src/test/com/cloudera/sqoop/lib/TestRecordParser.java b/src/test/com/cloudera/sqoop/lib/TestRecordParser.java index 721ec416..5f811909 100644 --- a/src/test/com/cloudera/sqoop/lib/TestRecordParser.java +++ b/src/test/com/cloudera/sqoop/lib/TestRecordParser.java @@ -99,7 +99,7 @@ private List list(String [] items) { return asList; } - + public void testEmptyLine() throws RecordParser.ParseError { // an empty line should return no fields. @@ -256,7 +256,7 @@ public void testTwoFields5() throws RecordParser.ParseError { assertListsEqual(null, list(strings), parser.parseRecord("field1,\"field2\"")); } - + public void testRequiredQuotes0() throws RecordParser.ParseError { RecordParser parser = new RecordParser( new DelimiterSet(',', '\n', '\"', '\\', true)); diff --git a/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java b/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java index 96f4e1b2..b870ef84 100644 --- a/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java +++ b/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java @@ -89,7 +89,7 @@ public class OracleManagerTest extends ImportJobTestCase { // instance variables populated during setUp, used during tests private OracleManager manager; - + @Override protected boolean useHsqldbTestServer() { return false; @@ -219,7 +219,7 @@ private void runOracleTest(String [] expectedResults) throws IOException { ioe.printStackTrace(); fail(ioe.toString()); } - + File f = new File(filePath.toString()); assertTrue("Could not find imported data file", f.exists()); BufferedReader r = null; diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java index 03e2883b..3d2c5c4c 100644 --- a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java +++ b/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java @@ -44,7 +44,7 @@ public class TestDataDrivenDBInputFormat extends HadoopTestCase { TestDataDrivenDBInputFormat.class); private static final String DB_NAME = "dddbif"; - private static final String DB_URL = + private static final String DB_URL = "jdbc:hsqldb:mem:" + DB_NAME; private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver"; diff --git a/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java b/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java index 8bc039a6..78b2ab11 100644 --- a/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java +++ b/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java @@ -34,7 +34,7 @@ import junit.framework.TestCase; -import java.io.IOException; +import java.io.IOException; import java.sql.Connection; /** @@ -156,7 +156,7 @@ public void testCreateDeleteJob() throws IOException { } catch (IOException ioe) { // This is expected. Continue. } - + // Now delete the job. storage.delete("versionJob"); diff --git a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java b/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java index 8ae4b6dc..5d7811f2 100644 --- a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java +++ b/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java @@ -220,7 +220,7 @@ protected void verifyExport(int expectedNumRecords, Connection conn) ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); int actualNumRecords = 0; ResultSet rs = null; - try { + try { rs = statement.executeQuery(); try { rs.next(); diff --git a/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java b/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java index 29117361..c523b058 100644 --- a/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java +++ b/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java @@ -57,7 +57,7 @@ * void dropTableIfExists(tableName) -- how to drop a table that may not exist. * void createTableWithColTypes() -- how to create a table with a set of cols. * Configuration getConf() -- specifies config properties specific to a test. - * SqoopOptions getSqoopOptions(conf) -- Instantiates the SqoopOptions to use. + * SqoopOptions getSqoopOptions(conf) -- Instantiates the SqoopOptions to use. * List<String> getExtraArgs() -- specifies extra argv elements. */ public abstract class ManagerCompatTestCase extends ImportJobTestCase { @@ -229,7 +229,7 @@ protected String getTimestampType() { protected String getClobType() { return "CLOB"; } - + /** * Define a BLOB column that can contain up to 16 MB of data. */ @@ -253,7 +253,7 @@ protected String getTinyIntType() { //////// These methods indicate how databases respond to various datatypes. //////// Since our comparisons are all string-based, these return strings. - + /** @return How we insert the value TRUE represented as an int. */ protected String getTrueBoolNumericSqlInput() { return "1"; @@ -263,7 +263,7 @@ protected String getTrueBoolNumericSqlInput() { protected String getFalseBoolNumericSqlInput() { return "0"; } - + /** @return How we insert the value TRUE represented as a boolean literal. */ protected String getTrueBoolLiteralSqlInput() { return "true"; @@ -545,7 +545,7 @@ protected String getVarBinarySeqOutput(String asInserted) { * octets, in lower case (e.g., 'ab f0 0f 12 38'). * * @param str the input string of hex digits - * @return the input string as space-separated lower-case octets. + * @return the input string as space-separated lower-case octets. */ protected String toLowerHexString(String str) { // The inserted text is a hex string of the form 'ABABABAB'. diff --git a/src/test/com/cloudera/sqoop/testutil/MockResultSet.java b/src/test/com/cloudera/sqoop/testutil/MockResultSet.java index 3bf9d604..57401357 100644 --- a/src/test/com/cloudera/sqoop/testutil/MockResultSet.java +++ b/src/test/com/cloudera/sqoop/testutil/MockResultSet.java @@ -1008,12 +1008,12 @@ public boolean isWrapperFor(Class iface) throws SQLException { public T unwrap(Class iface) throws SQLException { return null; } - + @Override public byte[] getBytes(int columnIndex) throws SQLException { return null; } - + @Override public byte[] getBytes(String columnLabel) throws SQLException { return null; diff --git a/src/test/com/cloudera/sqoop/testutil/ReparseMapper.java b/src/test/com/cloudera/sqoop/testutil/ReparseMapper.java index 3e563a47..7654ee09 100644 --- a/src/test/com/cloudera/sqoop/testutil/ReparseMapper.java +++ b/src/test/com/cloudera/sqoop/testutil/ReparseMapper.java @@ -38,7 +38,7 @@ /** - * Test harness mapper. Instantiate the user's specific type, parse() the input + * Test harness mapper. Instantiate the user's specific type, parse() the input * line of text, and throw an IOException if the output toString() line of text * differs. */ diff --git a/src/test/com/cloudera/sqoop/testutil/SeqFileReader.java b/src/test/com/cloudera/sqoop/testutil/SeqFileReader.java index f8108ac3..2a24138c 100644 --- a/src/test/com/cloudera/sqoop/testutil/SeqFileReader.java +++ b/src/test/com/cloudera/sqoop/testutil/SeqFileReader.java @@ -32,7 +32,7 @@ /** * Utility class to help with test cases. Just reads the first (k, v) pair * from a SequenceFile and returns the value part. - * + * * */ public final class SeqFileReader { diff --git a/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java b/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java index 04c44b9e..334190ba 100644 --- a/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java +++ b/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java @@ -63,7 +63,7 @@ public List getTools() { */ public static class FooTool extends BaseSqoopTool { /** Holds the name of the last user we "operated" as. */ - private static String lastUser; + private static String lastUser; static String getLastUser() { return lastUser; } @@ -90,7 +90,7 @@ public void configureOptions(ToolOptions toolOptions) { } @Override - public void applyOptions(CommandLine in, SqoopOptions out) + public void applyOptions(CommandLine in, SqoopOptions out) throws InvalidOptionsException { applyCommonOptions(in, out); } @@ -101,7 +101,7 @@ public void validateOptions(SqoopOptions options) validateCommonOptions(options); } } - + public void testPlugin() { // Register the plugin with SqoopTool. Configuration pluginConf = new Configuration();