mirror of
https://github.com/apache/sqoop.git
synced 2025-05-17 09:20:43 +08:00
Enable findbugs on build and fix all warnings.
Some spurious warnings (and inconsequential warnings in test code) have been disabled by src/test/findbugsExcludeFile.xml. From: Aaron Kimball <aaron@cloudera.com> git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149874 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7214230695
commit
8147d262d8
49
build.xml
49
build.xml
@ -38,6 +38,7 @@
|
|||||||
<property name="build.dir" location="${basedir}/build" />
|
<property name="build.dir" location="${basedir}/build" />
|
||||||
<property name="build.classes" location="${build.dir}/classes"/>
|
<property name="build.classes" location="${build.dir}/classes"/>
|
||||||
<property name="build.test" location="${build.dir}/test"/>
|
<property name="build.test" location="${build.dir}/test"/>
|
||||||
|
<property name="build.test.classes" location="${build.test}/classes" />
|
||||||
<property name="test.log.dir" location="${build.dir}/test/logs"/>
|
<property name="test.log.dir" location="${build.dir}/test/logs"/>
|
||||||
<property name="dist.dir" location="${build.dir}/${artifact.name}" />
|
<property name="dist.dir" location="${build.dir}/${artifact.name}" />
|
||||||
<property name="tar.file" location="${build.dir}/${artifact.name}.tar.gz" />
|
<property name="tar.file" location="${build.dir}/${artifact.name}.tar.gz" />
|
||||||
@ -52,6 +53,15 @@
|
|||||||
<property name="test.output" value="no"/>
|
<property name="test.output" value="no"/>
|
||||||
<property name="test.timeout" value="300000"/>
|
<property name="test.timeout" value="300000"/>
|
||||||
|
|
||||||
|
<!-- static analysis -->
|
||||||
|
<property name="findbugs.out.dir" value="${build.dir}/findbugs" />
|
||||||
|
<property name="findbugs.output.xml.file"
|
||||||
|
value="${findbugs.out.dir}/report.xml" />
|
||||||
|
<property name="findbugs.output.html.file"
|
||||||
|
value="${findbugs.out.dir}/report.html" />
|
||||||
|
<property name="findbugs.excludes"
|
||||||
|
location="${test.dir}/findbugsExcludeFile.xml" />
|
||||||
|
|
||||||
<!-- When testing with non-free JDBC drivers, override this parameter
|
<!-- When testing with non-free JDBC drivers, override this parameter
|
||||||
to contain the path to the driver library dir.
|
to contain the path to the driver library dir.
|
||||||
-->
|
-->
|
||||||
@ -93,7 +103,7 @@
|
|||||||
|
|
||||||
<!-- Classpath for unit tests (superset of compile.classpath) -->
|
<!-- Classpath for unit tests (superset of compile.classpath) -->
|
||||||
<path id="test.classpath">
|
<path id="test.classpath">
|
||||||
<pathelement location="${build.test}" />
|
<pathelement location="${build.test.classes}" />
|
||||||
<path refid="${name}.test.classpath"/>
|
<path refid="${name}.test.classpath"/>
|
||||||
<path refid="compile.classpath"/>
|
<path refid="compile.classpath"/>
|
||||||
</path>
|
</path>
|
||||||
@ -118,12 +128,12 @@
|
|||||||
|
|
||||||
<target name="compile-test" depends="compile, ivy-retrieve-test"
|
<target name="compile-test" depends="compile, ivy-retrieve-test"
|
||||||
description="Compile test classes">
|
description="Compile test classes">
|
||||||
<mkdir dir="${build.test}" />
|
<mkdir dir="${build.test.classes}" />
|
||||||
<javac
|
<javac
|
||||||
encoding="${build.encoding}"
|
encoding="${build.encoding}"
|
||||||
srcdir="${test.dir}"
|
srcdir="${test.dir}"
|
||||||
includes="**/*.java"
|
includes="**/*.java"
|
||||||
destdir="${build.test}"
|
destdir="${build.test.classes}"
|
||||||
debug="${javac.debug}">
|
debug="${javac.debug}">
|
||||||
<classpath>
|
<classpath>
|
||||||
<path refid="test.classpath"/>
|
<path refid="test.classpath"/>
|
||||||
@ -228,6 +238,9 @@
|
|||||||
timeout="${test.timeout}"
|
timeout="${test.timeout}"
|
||||||
dir="${build.test}/data">
|
dir="${build.test}/data">
|
||||||
|
|
||||||
|
<!-- enable asserts in tests -->
|
||||||
|
<jvmarg value="-ea" />
|
||||||
|
|
||||||
<!-- uncomment this if you want to attach a debugger -->
|
<!-- uncomment this if you want to attach a debugger -->
|
||||||
<!--
|
<!--
|
||||||
<jvmarg line="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=2601" />
|
<jvmarg line="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=2601" />
|
||||||
@ -310,6 +323,36 @@
|
|||||||
<delete dir="${build.dir}"/>
|
<delete dir="${build.dir}"/>
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
|
<target name="findbugs" depends="check-for-findbugs,jar,compile-test"
|
||||||
|
if="findbugs.present" description="Run FindBugs">
|
||||||
|
<taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
|
||||||
|
classpath="${findbugs.home}/lib/findbugs-ant.jar" />
|
||||||
|
<mkdir dir="${findbugs.out.dir}"/>
|
||||||
|
<findbugs home="${findbugs.home}" output="xml:withMessages"
|
||||||
|
outputFile="${findbugs.output.xml.file}" effort="max"
|
||||||
|
excludeFilter="${findbugs.excludes}">
|
||||||
|
<auxClasspath>
|
||||||
|
<path refid="compile.classpath"/>
|
||||||
|
</auxClasspath>
|
||||||
|
<sourcePath path="${src.dir}" />
|
||||||
|
<sourcePath path="${test.dir}" />
|
||||||
|
<class location="${build.dir}/${dest.jar}" />
|
||||||
|
<class location="${build.test.classes}" />
|
||||||
|
</findbugs>
|
||||||
|
<xslt style="${findbugs.home}/src/xsl/default.xsl"
|
||||||
|
in="${findbugs.output.xml.file}"
|
||||||
|
out="${findbugs.output.html.file}" />
|
||||||
|
</target>
|
||||||
|
|
||||||
|
<target name="warn-findbugs-unset" unless="findbugs.home">
|
||||||
|
<fail message="You need to set -Dfindbugs.home=/path/to/findbugs" />
|
||||||
|
</target>
|
||||||
|
|
||||||
|
<target name="check-for-findbugs" depends="warn-findbugs-unset">
|
||||||
|
<available property="findbugs.present"
|
||||||
|
file="${findbugs.home}/lib/findbugs.jar" />
|
||||||
|
</target>
|
||||||
|
|
||||||
<target name="ivy-probe-antlib" >
|
<target name="ivy-probe-antlib" >
|
||||||
<condition property="ivy.found">
|
<condition property="ivy.found">
|
||||||
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
|
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
|
||||||
|
@ -61,7 +61,7 @@ public void importTable(ImportJobContext context)
|
|||||||
LOG.warn("Direct-mode import from MySQL does not support column");
|
LOG.warn("Direct-mode import from MySQL does not support column");
|
||||||
LOG.warn("selection. Falling back to JDBC-based import.");
|
LOG.warn("selection. Falling back to JDBC-based import.");
|
||||||
// Don't warn them "This could go faster..."
|
// Don't warn them "This could go faster..."
|
||||||
MySQLManager.warningPrinted = true;
|
MySQLManager.markWarningPrinted();
|
||||||
// Use JDBC-based importTable() method.
|
// Use JDBC-based importTable() method.
|
||||||
super.importTable(context);
|
super.importTable(context);
|
||||||
return;
|
return;
|
||||||
|
@ -281,7 +281,6 @@ public void importTable(ImportJobContext context)
|
|||||||
throws IOException, ImportException {
|
throws IOException, ImportException {
|
||||||
|
|
||||||
String tableName = context.getTableName();
|
String tableName = context.getTableName();
|
||||||
String jarFile = context.getJarFile();
|
|
||||||
SqoopOptions options = context.getOptions();
|
SqoopOptions options = context.getOptions();
|
||||||
|
|
||||||
LOG.info("Beginning psql fast path import");
|
LOG.info("Beginning psql fast path import");
|
||||||
|
@ -45,7 +45,7 @@ public class MySQLManager extends GenericJdbcManager {
|
|||||||
private static final String DRIVER_CLASS = "com.mysql.jdbc.Driver";
|
private static final String DRIVER_CLASS = "com.mysql.jdbc.Driver";
|
||||||
|
|
||||||
// set to true after we warn the user that we can use direct fastpath.
|
// set to true after we warn the user that we can use direct fastpath.
|
||||||
protected static boolean warningPrinted = false;
|
private static boolean warningPrinted = false;
|
||||||
|
|
||||||
private Statement lastStatement;
|
private Statement lastStatement;
|
||||||
|
|
||||||
@ -114,7 +114,7 @@ public void importTable(ImportJobContext context)
|
|||||||
LOG.warn("This transfer can be faster! Use the --direct");
|
LOG.warn("This transfer can be faster! Use the --direct");
|
||||||
LOG.warn("option to exercise a MySQL-specific fast path.");
|
LOG.warn("option to exercise a MySQL-specific fast path.");
|
||||||
|
|
||||||
MySQLManager.warningPrinted = true; // don't display this twice.
|
MySQLManager.markWarningPrinted(); // don't display this twice.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,6 +124,13 @@ public void importTable(ImportJobContext context)
|
|||||||
super.importTable(context);
|
super.importTable(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a flag to prevent printing the --direct warning twice.
|
||||||
|
*/
|
||||||
|
protected static void markWarningPrinted() {
|
||||||
|
MySQLManager.warningPrinted = true;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MySQL allows TIMESTAMP fields to have the value '0000-00-00 00:00:00',
|
* MySQL allows TIMESTAMP fields to have the value '0000-00-00 00:00:00',
|
||||||
* which causes errors in import. If the user has not set the
|
* which causes errors in import. If the user has not set the
|
||||||
@ -191,6 +198,7 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
|||||||
PreparedStatement statement = null;
|
PreparedStatement statement = null;
|
||||||
statement = this.getConnection().prepareStatement(stmt,
|
statement = this.getConnection().prepareStatement(stmt,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
this.lastStatement = statement;
|
||||||
statement.setFetchSize(Integer.MIN_VALUE); // MySQL: read row-at-a-time.
|
statement.setFetchSize(Integer.MIN_VALUE); // MySQL: read row-at-a-time.
|
||||||
if (null != args) {
|
if (null != args) {
|
||||||
for (int i = 0; i < args.length; i++) {
|
for (int i = 0; i < args.length; i++) {
|
||||||
@ -199,7 +207,6 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
|||||||
}
|
}
|
||||||
|
|
||||||
LOG.info("Executing SQL statement: " + stmt);
|
LOG.info("Executing SQL statement: " + stmt);
|
||||||
this.lastStatement = statement;
|
|
||||||
return statement.executeQuery();
|
return statement.executeQuery();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,8 +80,6 @@ public void importTable(ImportJobContext context)
|
|||||||
// The user probably should have requested --direct to invoke pg_dump.
|
// The user probably should have requested --direct to invoke pg_dump.
|
||||||
// Display a warning informing them of this fact.
|
// Display a warning informing them of this fact.
|
||||||
if (!PostgresqlManager.warningPrinted) {
|
if (!PostgresqlManager.warningPrinted) {
|
||||||
String connectString = context.getOptions().getConnectString();
|
|
||||||
|
|
||||||
LOG.warn("It looks like you are importing from postgresql.");
|
LOG.warn("It looks like you are importing from postgresql.");
|
||||||
LOG.warn("This transfer can be faster! Use the --direct");
|
LOG.warn("This transfer can be faster! Use the --direct");
|
||||||
LOG.warn("option to exercise a postgresql-specific fast path.");
|
LOG.warn("option to exercise a postgresql-specific fast path.");
|
||||||
@ -117,6 +115,7 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
|||||||
PreparedStatement statement = null;
|
PreparedStatement statement = null;
|
||||||
statement = this.getConnection().prepareStatement(stmt,
|
statement = this.getConnection().prepareStatement(stmt,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
this.lastStatement = statement;
|
||||||
statement.setFetchSize(POSTGRESQL_FETCH_SIZE);
|
statement.setFetchSize(POSTGRESQL_FETCH_SIZE);
|
||||||
if (null != args) {
|
if (null != args) {
|
||||||
for (int i = 0; i < args.length; i++) {
|
for (int i = 0; i < args.length; i++) {
|
||||||
@ -125,7 +124,6 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
|||||||
}
|
}
|
||||||
|
|
||||||
LOG.info("Executing SQL statement: " + stmt);
|
LOG.info("Executing SQL statement: " + stmt);
|
||||||
this.lastStatement = statement;
|
|
||||||
return statement.executeQuery();
|
return statement.executeQuery();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -326,6 +326,7 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
|||||||
PreparedStatement statement = null;
|
PreparedStatement statement = null;
|
||||||
statement = this.getConnection().prepareStatement(stmt,
|
statement = this.getConnection().prepareStatement(stmt,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
this.lastStatement = statement;
|
||||||
if (null != args) {
|
if (null != args) {
|
||||||
for (int i = 0; i < args.length; i++) {
|
for (int i = 0; i < args.length; i++) {
|
||||||
statement.setObject(i + 1, args[i]);
|
statement.setObject(i + 1, args[i]);
|
||||||
@ -333,7 +334,6 @@ protected ResultSet execute(String stmt, Object... args) throws SQLException {
|
|||||||
}
|
}
|
||||||
|
|
||||||
LOG.info("Executing SQL statement: " + stmt);
|
LOG.info("Executing SQL statement: " + stmt);
|
||||||
this.lastStatement = statement;
|
|
||||||
return statement.executeQuery();
|
return statement.executeQuery();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,8 +72,6 @@ public class MySQLDumpInputFormat extends DataDrivenDBInputFormat {
|
|||||||
public static class MySQLDumpRecordReader
|
public static class MySQLDumpRecordReader
|
||||||
extends RecordReader<String, NullWritable> {
|
extends RecordReader<String, NullWritable> {
|
||||||
|
|
||||||
private InputSplit split;
|
|
||||||
|
|
||||||
private boolean delivered;
|
private boolean delivered;
|
||||||
private String clause;
|
private String clause;
|
||||||
|
|
||||||
@ -109,7 +107,6 @@ public float getProgress() {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void initialize(InputSplit split, TaskAttemptContext context) {
|
public void initialize(InputSplit split, TaskAttemptContext context) {
|
||||||
this.split = split;
|
|
||||||
DataDrivenDBInputFormat.DataDrivenDBInputSplit dbSplit =
|
DataDrivenDBInputFormat.DataDrivenDBInputSplit dbSplit =
|
||||||
(DataDrivenDBInputFormat.DataDrivenDBInputSplit) split;
|
(DataDrivenDBInputFormat.DataDrivenDBInputSplit) split;
|
||||||
|
|
||||||
|
@ -271,7 +271,9 @@ public void run() {
|
|||||||
try {
|
try {
|
||||||
fields = MYSQLDUMP_PARSER.parseRecord(charbuf);
|
fields = MYSQLDUMP_PARSER.parseRecord(charbuf);
|
||||||
} catch (RecordParser.ParseError pe) {
|
} catch (RecordParser.ParseError pe) {
|
||||||
LOG.warn("ParseError reading from mysqldump: " + pe.toString() + "; record skipped");
|
LOG.warn("ParseError reading from mysqldump: "
|
||||||
|
+ pe.toString() + "; record skipped");
|
||||||
|
continue; // Skip emitting this row.
|
||||||
}
|
}
|
||||||
|
|
||||||
// For all of the output fields, emit them using the delimiters the user chooses.
|
// For all of the output fields, emit them using the delimiters the user chooses.
|
||||||
|
@ -49,7 +49,7 @@ public int join() throws InterruptedException {
|
|||||||
* Run a background thread that reads and ignores the
|
* Run a background thread that reads and ignores the
|
||||||
* contents of the stream.
|
* contents of the stream.
|
||||||
*/
|
*/
|
||||||
private class IgnoringThread extends Thread {
|
private static class IgnoringThread extends Thread {
|
||||||
|
|
||||||
private InputStream stream;
|
private InputStream stream;
|
||||||
|
|
||||||
|
67
src/test/findbugsExcludeFile.xml
Normal file
67
src/test/findbugsExcludeFile.xml
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
<?xml version="1.0"?>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Licensed to Cloudera, Inc. under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
|
<!--
|
||||||
|
This file enumerates all the findbugs warnings that we want to suppress.
|
||||||
|
If you add a spurious warning, you should add it to this file so that it
|
||||||
|
does not generate warnings in the official report.
|
||||||
|
|
||||||
|
For each exception you add, include a comment in your <Match> block
|
||||||
|
explaining why this is not a bug.
|
||||||
|
-->
|
||||||
|
<FindBugsFilter>
|
||||||
|
<Match>
|
||||||
|
<!-- SQL db can return null for a boolean column; so can we. -->
|
||||||
|
<Class name="org.apache.hadoop.sqoop.lib.JdbcWritableBridge" />
|
||||||
|
<Method name="readBoolean" />
|
||||||
|
<Bug pattern="NP_BOOLEAN_RETURN_NULL" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<!-- This mapper intentially triggers an NPE to cause an exception
|
||||||
|
which the test case much catch. -->
|
||||||
|
<Class name="org.apache.hadoop.sqoop.mapreduce.TestImportJob$NullDereferenceMapper" />
|
||||||
|
<Method name="map" />
|
||||||
|
<Bug pattern="NP_ALWAYS_NULL" />
|
||||||
|
</Match>
|
||||||
|
|
||||||
|
<!-- The following broad categories suppress warnings in test code that do
|
||||||
|
not need to be rigidly upheld. -->
|
||||||
|
<Match>
|
||||||
|
<!-- Performance warnings are ignored in test code. -->
|
||||||
|
<Class name="~org\.apache\.hadoop\.sqoop\..*Test.*" />
|
||||||
|
<Bug category="PERFORMANCE" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<!-- More performance warnings to suppress in tests. -->
|
||||||
|
<Class name="~org\.apache\.hadoop\.sqoop\..*Test.*" />
|
||||||
|
<Bug pattern="SBSC_USE_STRINGBUFFER_CONCATENATION" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<!-- Security warnings are ignored in test code. -->
|
||||||
|
<Class name="~org\.apache\.hadoop\.sqoop\..*Test.*" />
|
||||||
|
<Bug category="SECURITY" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<!-- Ok to use methods to generate SQL statements in tests. -->
|
||||||
|
<Class name="~org\.apache\.hadoop\.sqoop\..*Test.*" />
|
||||||
|
<Bug pattern="SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING" />
|
||||||
|
</Match>
|
||||||
|
|
||||||
|
</FindBugsFilter>
|
@ -52,7 +52,7 @@ public void testExceptionForNoManager() {
|
|||||||
|
|
||||||
ConnFactory factory = new ConnFactory(conf);
|
ConnFactory factory = new ConnFactory(conf);
|
||||||
try {
|
try {
|
||||||
ConnManager manager = factory.getManager(new SqoopOptions());
|
factory.getManager(new SqoopOptions());
|
||||||
fail("factory.getManager() expected to throw IOException");
|
fail("factory.getManager() expected to throw IOException");
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
// Expected this. Test passes.
|
// Expected this. Test passes.
|
||||||
|
@ -238,9 +238,12 @@ public void createTable(ColumnGenerator... extraColumns) throws SQLException {
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"DROP TABLE " + getTableName() + " IF EXISTS",
|
"DROP TABLE " + getTableName() + " IF EXISTS",
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
try {
|
||||||
statement.executeUpdate();
|
statement.executeUpdate();
|
||||||
conn.commit();
|
conn.commit();
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("CREATE TABLE ");
|
sb.append("CREATE TABLE ");
|
||||||
@ -254,10 +257,13 @@ public void createTable(ColumnGenerator... extraColumns) throws SQLException {
|
|||||||
|
|
||||||
statement = conn.prepareStatement(sb.toString(),
|
statement = conn.prepareStatement(sb.toString(),
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
try {
|
||||||
statement.executeUpdate();
|
statement.executeUpdate();
|
||||||
conn.commit();
|
conn.commit();
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Removing an existing table directory from the filesystem */
|
/** Removing an existing table directory from the filesystem */
|
||||||
private void removeTablePath() throws IOException {
|
private void removeTablePath() throws IOException {
|
||||||
@ -278,12 +284,18 @@ private void assertColValForRowId(int id, String colName, String expectedVal)
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"SELECT " + colName + " FROM " + getTableName() + " WHERE id = " + id,
|
"SELECT " + colName + " FROM " + getTableName() + " WHERE id = " + id,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
String actualVal = null;
|
||||||
|
try {
|
||||||
ResultSet rs = statement.executeQuery();
|
ResultSet rs = statement.executeQuery();
|
||||||
|
try {
|
||||||
rs.next();
|
rs.next();
|
||||||
|
actualVal = rs.getString(1);
|
||||||
String actualVal = rs.getString(1);
|
} finally {
|
||||||
rs.close();
|
rs.close();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
assertEquals("Got unexpected column value", expectedVal, actualVal);
|
assertEquals("Got unexpected column value", expectedVal, actualVal);
|
||||||
}
|
}
|
||||||
|
@ -112,6 +112,11 @@ private void verifyFileContents(InputStream is, String [] lines)
|
|||||||
r.readLine());
|
r.readLine());
|
||||||
} finally {
|
} finally {
|
||||||
r.close();
|
r.close();
|
||||||
|
try {
|
||||||
|
is.close();
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// ignore IOE; may be closed by reader.
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,6 +133,7 @@ private void verifyFileDoesNotExist(Path p) throws IOException {
|
|||||||
public void testNonSplittingTextFile() throws IOException {
|
public void testNonSplittingTextFile() throws IOException {
|
||||||
SplittingOutputStream os = new SplittingOutputStream(getConf(),
|
SplittingOutputStream os = new SplittingOutputStream(getConf(),
|
||||||
getWritePath(), "nonsplit-", 0, false);
|
getWritePath(), "nonsplit-", 0, false);
|
||||||
|
try {
|
||||||
SplittableBufferedWriter w = new SplittableBufferedWriter(os, true);
|
SplittableBufferedWriter w = new SplittableBufferedWriter(os, true);
|
||||||
try {
|
try {
|
||||||
w.allowSplit();
|
w.allowSplit();
|
||||||
@ -138,6 +144,13 @@ public void testNonSplittingTextFile() throws IOException {
|
|||||||
} finally {
|
} finally {
|
||||||
w.close();
|
w.close();
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
os.close();
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// Ignored; may be thrown because w is already closed.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure we made exactly one file.
|
// Ensure we made exactly one file.
|
||||||
Path writePath = new Path(getWritePath(), "nonsplit-00000");
|
Path writePath = new Path(getWritePath(), "nonsplit-00000");
|
||||||
@ -150,8 +163,18 @@ public void testNonSplittingTextFile() throws IOException {
|
|||||||
"This is a string!",
|
"This is a string!",
|
||||||
"This is another string!",
|
"This is another string!",
|
||||||
};
|
};
|
||||||
verifyFileContents(new FileInputStream(new File(getWriteDir(),
|
|
||||||
"nonsplit-00000")), expectedLines);
|
InputStream fis = new FileInputStream(new File(getWriteDir(),
|
||||||
|
"nonsplit-00000"));
|
||||||
|
try {
|
||||||
|
verifyFileContents(fis, expectedLines);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
fis.close();
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// Ignored; may be closed by verifyFileContents().
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNonSplittingGzipFile() throws IOException {
|
public void testNonSplittingGzipFile() throws IOException {
|
||||||
@ -187,6 +210,7 @@ public void testNonSplittingGzipFile() throws IOException {
|
|||||||
public void testSplittingTextFile() throws IOException {
|
public void testSplittingTextFile() throws IOException {
|
||||||
SplittingOutputStream os = new SplittingOutputStream(getConf(),
|
SplittingOutputStream os = new SplittingOutputStream(getConf(),
|
||||||
getWritePath(), "split-", 10, false);
|
getWritePath(), "split-", 10, false);
|
||||||
|
try {
|
||||||
SplittableBufferedWriter w = new SplittableBufferedWriter(os, true);
|
SplittableBufferedWriter w = new SplittableBufferedWriter(os, true);
|
||||||
try {
|
try {
|
||||||
w.allowSplit();
|
w.allowSplit();
|
||||||
@ -196,6 +220,13 @@ public void testSplittingTextFile() throws IOException {
|
|||||||
} finally {
|
} finally {
|
||||||
w.close();
|
w.close();
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
os.close();
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// Ignored; may be thrown because w is already closed.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure we made exactly two files.
|
// Ensure we made exactly two files.
|
||||||
Path writePath = new Path(getWritePath(), "split-00000");
|
Path writePath = new Path(getWritePath(), "split-00000");
|
||||||
@ -209,14 +240,31 @@ public void testSplittingTextFile() throws IOException {
|
|||||||
String [] expectedLines0 = {
|
String [] expectedLines0 = {
|
||||||
"This is a string!"
|
"This is a string!"
|
||||||
};
|
};
|
||||||
verifyFileContents(new FileInputStream(new File(getWriteDir(),
|
InputStream fis = new FileInputStream(new File(getWriteDir(),
|
||||||
"split-00000")), expectedLines0);
|
"split-00000"));
|
||||||
|
try {
|
||||||
|
verifyFileContents(fis, expectedLines0);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
fis.close();
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// ignored; may be generated because fis closed in verifyFileContents.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
String [] expectedLines1 = {
|
String [] expectedLines1 = {
|
||||||
"This is another string!",
|
"This is another string!",
|
||||||
};
|
};
|
||||||
verifyFileContents(new FileInputStream(new File(getWriteDir(),
|
fis = new FileInputStream(new File(getWriteDir(), "split-00001"));
|
||||||
"split-00001")), expectedLines1);
|
try {
|
||||||
|
verifyFileContents(fis, expectedLines1);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
fis.close();
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// Ignored; may be thrown because it's closed in verifyFileContents.
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSplittingGzipFile() throws IOException {
|
public void testSplittingGzipFile() throws IOException {
|
||||||
|
@ -135,9 +135,10 @@ public void testReadBlobRef()
|
|||||||
assertNotNull(blob);
|
assertNotNull(blob);
|
||||||
assertFalse(blob.isExternal());
|
assertFalse(blob.isExternal());
|
||||||
byte [] data = blob.getData();
|
byte [] data = blob.getData();
|
||||||
assertEquals(MockResultSet.BLOB_DATA.length, data.length);
|
byte [] blobData = MockResultSet.BLOB_DATA();
|
||||||
|
assertEquals(blobData.length, data.length);
|
||||||
for (int i = 0; i < data.length; i++) {
|
for (int i = 0; i < data.length; i++) {
|
||||||
assertEquals(MockResultSet.BLOB_DATA[i], data[i]);
|
assertEquals(blobData[i], data[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// LOBs bigger than 4 bytes are now external.
|
// LOBs bigger than 4 bytes are now external.
|
||||||
@ -151,9 +152,9 @@ public void testReadBlobRef()
|
|||||||
int bytes = is.read(buf, 0, 4096);
|
int bytes = is.read(buf, 0, 4096);
|
||||||
is.close();
|
is.close();
|
||||||
|
|
||||||
assertEquals(MockResultSet.BLOB_DATA.length, bytes);
|
assertEquals(blobData.length, bytes);
|
||||||
for (int i = 0; i < bytes; i++) {
|
for (int i = 0; i < bytes; i++) {
|
||||||
assertEquals(MockResultSet.BLOB_DATA[i], buf[i]);
|
assertEquals(blobData[i], buf[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -41,18 +41,20 @@ private void assertListsEqual(String msg, List<String> expected, List<String> ac
|
|||||||
}
|
}
|
||||||
|
|
||||||
fail(msg);
|
fail(msg);
|
||||||
}
|
} else if (expected == null && actual == null) {
|
||||||
|
|
||||||
if (expected == null && actual == null) {
|
|
||||||
return; // ok. Both null; nothing to do.
|
return; // ok. Both null; nothing to do.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert(null != expected);
|
||||||
|
assert(null != actual);
|
||||||
|
|
||||||
int expectedLen = expected.size();
|
int expectedLen = expected.size();
|
||||||
int actualLen = actual.size();
|
int actualLen = actual.size();
|
||||||
|
|
||||||
if (expectedLen != actualLen) {
|
if (expectedLen != actualLen) {
|
||||||
if (null == msg) {
|
if (null == msg) {
|
||||||
msg = "Expected list of length " + expectedLen + "; got " + actualLen;
|
msg = "Expected list of length " + expectedLen
|
||||||
|
+ "; got " + actualLen;
|
||||||
}
|
}
|
||||||
|
|
||||||
fail(msg);
|
fail(msg);
|
||||||
@ -63,17 +65,19 @@ private void assertListsEqual(String msg, List<String> expected, List<String> ac
|
|||||||
String expectedElem = expected.get(i);
|
String expectedElem = expected.get(i);
|
||||||
String actualElem = actual.get(i);
|
String actualElem = actual.get(i);
|
||||||
|
|
||||||
if (expectedElem == null && actualElem != null) {
|
if (expectedElem == null) {
|
||||||
|
if (actualElem != null) {
|
||||||
if (null == msg) {
|
if (null == msg) {
|
||||||
msg = "Expected null element at position " + i + "; got [" + actualElem + "]";
|
msg = "Expected null element at position " + i
|
||||||
|
+ "; got [" + actualElem + "]";
|
||||||
}
|
}
|
||||||
|
|
||||||
fail(msg);
|
fail(msg);
|
||||||
}
|
}
|
||||||
|
} else if (!expectedElem.equals(actualElem)) {
|
||||||
if (!expectedElem.equals(actualElem)) {
|
|
||||||
if (null == msg) {
|
if (null == msg) {
|
||||||
msg = "Expected [" + expectedElem + "] at position " + i + "; got [" + actualElem + "]";
|
msg = "Expected [" + expectedElem + "] at position " + i
|
||||||
|
+ "; got [" + actualElem + "]";
|
||||||
}
|
}
|
||||||
|
|
||||||
fail(msg);
|
fail(msg);
|
||||||
@ -235,7 +239,6 @@ public void testRequiredQuotes1() throws RecordParser.ParseError {
|
|||||||
|
|
||||||
public void testRequiredQuotes2() throws RecordParser.ParseError {
|
public void testRequiredQuotes2() throws RecordParser.ParseError {
|
||||||
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
|
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
|
||||||
String [] strings = { "field1", "field2" };
|
|
||||||
try {
|
try {
|
||||||
parser.parseRecord("\"field1\",field2");
|
parser.parseRecord("\"field1\",field2");
|
||||||
fail("Expected parse error for required quotes");
|
fail("Expected parse error for required quotes");
|
||||||
@ -246,7 +249,6 @@ public void testRequiredQuotes2() throws RecordParser.ParseError {
|
|||||||
|
|
||||||
public void testRequiredQuotes3() throws RecordParser.ParseError {
|
public void testRequiredQuotes3() throws RecordParser.ParseError {
|
||||||
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
|
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
|
||||||
String [] strings = { "field1", "field2" };
|
|
||||||
try {
|
try {
|
||||||
parser.parseRecord("field1,\"field2\"");
|
parser.parseRecord("field1,\"field2\"");
|
||||||
fail("Expected parse error for required quotes");
|
fail("Expected parse error for required quotes");
|
||||||
@ -257,7 +259,6 @@ public void testRequiredQuotes3() throws RecordParser.ParseError {
|
|||||||
|
|
||||||
public void testRequiredQuotes4() throws RecordParser.ParseError {
|
public void testRequiredQuotes4() throws RecordParser.ParseError {
|
||||||
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
|
RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
|
||||||
String [] strings = { "field1", "field2" };
|
|
||||||
try {
|
try {
|
||||||
parser.parseRecord("field1,\"field2\"\n");
|
parser.parseRecord("field1,\"field2\"\n");
|
||||||
fail("Expected parse error for required quotes");
|
fail("Expected parse error for required quotes");
|
||||||
|
@ -77,8 +77,14 @@ public class MySQLAuthTest extends ImportJobTestCase {
|
|||||||
// instance variables populated during setUp, used during tests
|
// instance variables populated during setUp, used during tests
|
||||||
private DirectMySQLManager manager;
|
private DirectMySQLManager manager;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean useHsqldbTestServer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
|
super.setUp();
|
||||||
SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
|
SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
|
||||||
options.setUsername(AUTH_TEST_USER);
|
options.setUsername(AUTH_TEST_USER);
|
||||||
options.setPassword(AUTH_TEST_PASS);
|
options.setPassword(AUTH_TEST_PASS);
|
||||||
@ -123,6 +129,7 @@ public void setUp() {
|
|||||||
|
|
||||||
@After
|
@After
|
||||||
public void tearDown() {
|
public void tearDown() {
|
||||||
|
super.tearDown();
|
||||||
try {
|
try {
|
||||||
manager.close();
|
manager.close();
|
||||||
} catch (SQLException sqlE) {
|
} catch (SQLException sqlE) {
|
||||||
@ -242,6 +249,7 @@ private void dropTimestampTables() throws SQLException {
|
|||||||
connection.setAutoCommit(false);
|
connection.setAutoCommit(false);
|
||||||
st = connection.createStatement();
|
st = connection.createStatement();
|
||||||
|
|
||||||
|
try {
|
||||||
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable0");
|
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable0");
|
||||||
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable1");
|
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable1");
|
||||||
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable2");
|
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable2");
|
||||||
@ -249,9 +257,11 @@ private void dropTimestampTables() throws SQLException {
|
|||||||
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable4");
|
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable4");
|
||||||
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable5");
|
st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable5");
|
||||||
connection.commit();
|
connection.commit();
|
||||||
|
} finally {
|
||||||
st.close();
|
st.close();
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void doZeroTimestampTest(int testNum, boolean expectSuccess,
|
public void doZeroTimestampTest(int testNum, boolean expectSuccess,
|
||||||
String connectString) throws IOException, SQLException {
|
String connectString) throws IOException, SQLException {
|
||||||
|
@ -66,9 +66,12 @@ protected void dropTableIfExists(String table) throws SQLException {
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"DROP TABLE IF EXISTS " + table,
|
"DROP TABLE IF EXISTS " + table,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
try {
|
||||||
statement.executeUpdate();
|
statement.executeUpdate();
|
||||||
statement.close();
|
|
||||||
conn.commit();
|
conn.commit();
|
||||||
|
} finally {
|
||||||
|
statement.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -90,8 +90,15 @@ public class OracleManagerTest extends ImportJobTestCase {
|
|||||||
// instance variables populated during setUp, used during tests
|
// instance variables populated during setUp, used during tests
|
||||||
private OracleManager manager;
|
private OracleManager manager;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean useHsqldbTestServer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
SqoopOptions options = new SqoopOptions(OracleUtils.CONNECT_STRING,
|
SqoopOptions options = new SqoopOptions(OracleUtils.CONNECT_STRING,
|
||||||
TABLE_NAME);
|
TABLE_NAME);
|
||||||
OracleUtils.setOracleAuth(options);
|
OracleUtils.setOracleAuth(options);
|
||||||
@ -152,6 +159,7 @@ public void setUp() {
|
|||||||
|
|
||||||
@After
|
@After
|
||||||
public void tearDown() {
|
public void tearDown() {
|
||||||
|
super.tearDown();
|
||||||
try {
|
try {
|
||||||
manager.close();
|
manager.close();
|
||||||
} catch (SQLException sqlE) {
|
} catch (SQLException sqlE) {
|
||||||
|
@ -81,8 +81,15 @@ public class PostgresqlTest extends ImportJobTestCase {
|
|||||||
static final String TABLE_NAME = "EMPLOYEES_PG";
|
static final String TABLE_NAME = "EMPLOYEES_PG";
|
||||||
static final String CONNECT_STRING = HOST_URL + DATABASE_NAME;
|
static final String CONNECT_STRING = HOST_URL + DATABASE_NAME;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean useHsqldbTestServer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
LOG.debug("Setting up another postgresql test...");
|
LOG.debug("Setting up another postgresql test...");
|
||||||
|
|
||||||
SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
|
SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
|
||||||
|
@ -297,11 +297,16 @@ public void testWeirdColumnNames() throws SQLException {
|
|||||||
String tableName = HsqldbTestServer.getTableName();
|
String tableName = HsqldbTestServer.getTableName();
|
||||||
Connection connection = testServer.getConnection();
|
Connection connection = testServer.getConnection();
|
||||||
Statement st = connection.createStatement();
|
Statement st = connection.createStatement();
|
||||||
|
try {
|
||||||
st.executeUpdate("DROP TABLE " + tableName + " IF EXISTS");
|
st.executeUpdate("DROP TABLE " + tableName + " IF EXISTS");
|
||||||
st.executeUpdate("CREATE TABLE " + tableName + " (class INT, \"9field\" INT)");
|
st.executeUpdate("CREATE TABLE " + tableName
|
||||||
|
+ " (class INT, \"9field\" INT)");
|
||||||
st.executeUpdate("INSERT INTO " + tableName + " VALUES(42, 41)");
|
st.executeUpdate("INSERT INTO " + tableName + " VALUES(42, 41)");
|
||||||
connection.commit();
|
connection.commit();
|
||||||
|
} finally {
|
||||||
|
st.close();
|
||||||
connection.close();
|
connection.close();
|
||||||
|
}
|
||||||
|
|
||||||
String [] argv = {
|
String [] argv = {
|
||||||
"--bindir",
|
"--bindir",
|
||||||
|
@ -217,9 +217,12 @@ protected void dropTableIfExists(String table) throws SQLException {
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"DROP TABLE " + table + " IF EXISTS",
|
"DROP TABLE " + table + " IF EXISTS",
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
try {
|
||||||
statement.executeUpdate();
|
statement.executeUpdate();
|
||||||
statement.close();
|
|
||||||
conn.commit();
|
conn.commit();
|
||||||
|
} finally {
|
||||||
|
statement.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,11 +91,18 @@ protected int getMinRowId() throws SQLException {
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"SELECT MIN(id) FROM " + getTableName(),
|
"SELECT MIN(id) FROM " + getTableName(),
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
int minVal = 0;
|
||||||
|
try {
|
||||||
ResultSet rs = statement.executeQuery();
|
ResultSet rs = statement.executeQuery();
|
||||||
|
try {
|
||||||
rs.next();
|
rs.next();
|
||||||
int minVal = rs.getInt(1);
|
minVal = rs.getInt(1);
|
||||||
|
} finally {
|
||||||
rs.close();
|
rs.close();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
return minVal;
|
return minVal;
|
||||||
}
|
}
|
||||||
@ -106,11 +113,18 @@ protected int getMaxRowId() throws SQLException {
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"SELECT MAX(id) FROM " + getTableName(),
|
"SELECT MAX(id) FROM " + getTableName(),
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
int maxVal = 0;
|
||||||
|
try {
|
||||||
ResultSet rs = statement.executeQuery();
|
ResultSet rs = statement.executeQuery();
|
||||||
|
try {
|
||||||
rs.next();
|
rs.next();
|
||||||
int maxVal = rs.getInt(1);
|
maxVal = rs.getInt(1);
|
||||||
|
} finally {
|
||||||
rs.close();
|
rs.close();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
return maxVal;
|
return maxVal;
|
||||||
}
|
}
|
||||||
@ -128,11 +142,19 @@ protected void verifyExport(int expectedNumRecords) throws IOException, SQLExcep
|
|||||||
PreparedStatement statement = conn.prepareStatement(
|
PreparedStatement statement = conn.prepareStatement(
|
||||||
"SELECT COUNT(*) FROM " + getTableName(),
|
"SELECT COUNT(*) FROM " + getTableName(),
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
ResultSet rs = statement.executeQuery();
|
int actualNumRecords = 0;
|
||||||
|
ResultSet rs = null;
|
||||||
|
try {
|
||||||
|
rs = statement.executeQuery();
|
||||||
|
try {
|
||||||
rs.next();
|
rs.next();
|
||||||
int actualNumRecords = rs.getInt(1);
|
actualNumRecords = rs.getInt(1);
|
||||||
|
} finally {
|
||||||
rs.close();
|
rs.close();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
assertEquals("Got back unexpected row count", expectedNumRecords,
|
assertEquals("Got back unexpected row count", expectedNumRecords,
|
||||||
actualNumRecords);
|
actualNumRecords);
|
||||||
@ -149,22 +171,36 @@ protected void verifyExport(int expectedNumRecords) throws IOException, SQLExcep
|
|||||||
statement = conn.prepareStatement("SELECT msg FROM " + getTableName()
|
statement = conn.prepareStatement("SELECT msg FROM " + getTableName()
|
||||||
+ " WHERE id = " + minVal,
|
+ " WHERE id = " + minVal,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
String minMsg = "";
|
||||||
|
try {
|
||||||
rs = statement.executeQuery();
|
rs = statement.executeQuery();
|
||||||
|
try {
|
||||||
rs.next();
|
rs.next();
|
||||||
String minMsg = rs.getString(1);
|
minMsg = rs.getString(1);
|
||||||
|
} finally {
|
||||||
rs.close();
|
rs.close();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
assertEquals("Invalid msg field for min value", getMsgPrefix() + minVal, minMsg);
|
assertEquals("Invalid msg field for min value", getMsgPrefix() + minVal, minMsg);
|
||||||
|
|
||||||
statement = conn.prepareStatement("SELECT msg FROM " + getTableName()
|
statement = conn.prepareStatement("SELECT msg FROM " + getTableName()
|
||||||
+ " WHERE id = " + maxVal,
|
+ " WHERE id = " + maxVal,
|
||||||
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
||||||
|
String maxMsg = "";
|
||||||
|
try {
|
||||||
rs = statement.executeQuery();
|
rs = statement.executeQuery();
|
||||||
|
try {
|
||||||
rs.next();
|
rs.next();
|
||||||
String maxMsg = rs.getString(1);
|
maxMsg = rs.getString(1);
|
||||||
|
} finally {
|
||||||
rs.close();
|
rs.close();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
statement.close();
|
statement.close();
|
||||||
|
}
|
||||||
|
|
||||||
assertEquals("Invalid msg field for min value", getMsgPrefix() + maxVal, maxMsg);
|
assertEquals("Invalid msg field for min value", getMsgPrefix() + maxVal, maxMsg);
|
||||||
}
|
}
|
||||||
|
@ -209,12 +209,15 @@ public void dropExistingSchema() throws SQLException {
|
|||||||
Connection conn = mgr.getConnection();
|
Connection conn = mgr.getConnection();
|
||||||
for (String table : tables) {
|
for (String table : tables) {
|
||||||
Statement s = conn.createStatement();
|
Statement s = conn.createStatement();
|
||||||
|
try {
|
||||||
s.executeUpdate("DROP TABLE " + table);
|
s.executeUpdate("DROP TABLE " + table);
|
||||||
conn.commit();
|
conn.commit();
|
||||||
|
} finally {
|
||||||
s.close();
|
s.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an hsqldb server, fills it with tables and data.
|
* Creates an hsqldb server, fills it with tables and data.
|
||||||
|
@ -50,8 +50,10 @@
|
|||||||
*/
|
*/
|
||||||
public class MockResultSet implements ResultSet {
|
public class MockResultSet implements ResultSet {
|
||||||
|
|
||||||
public static final byte [] BLOB_DATA = { 0x0, 0x1, 0x2, 0x3,
|
public static final byte [] BLOB_DATA() {
|
||||||
0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF };
|
return new byte[] { 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9,
|
||||||
|
0xA, 0xB, 0xC, 0xD, 0xE, 0xF };
|
||||||
|
}
|
||||||
|
|
||||||
public static final String CLOB_DATA = "This is the mock clob data!";
|
public static final String CLOB_DATA = "This is the mock clob data!";
|
||||||
|
|
||||||
@ -60,7 +62,7 @@ public class MockResultSet implements ResultSet {
|
|||||||
*/
|
*/
|
||||||
public static class MockBlob implements Blob {
|
public static class MockBlob implements Blob {
|
||||||
public InputStream getBinaryStream() {
|
public InputStream getBinaryStream() {
|
||||||
return new ByteArrayInputStream(BLOB_DATA);
|
return new ByteArrayInputStream(BLOB_DATA());
|
||||||
}
|
}
|
||||||
|
|
||||||
public InputStream getBinaryStream(long pos, long len) {
|
public InputStream getBinaryStream(long pos, long len) {
|
||||||
@ -71,14 +73,15 @@ public InputStream getBinaryStream(long pos, long len) {
|
|||||||
byte [] bytes = new byte[length];
|
byte [] bytes = new byte[length];
|
||||||
|
|
||||||
int start = (int) pos - 1; // SQL uses 1-based arrays!!
|
int start = (int) pos - 1; // SQL uses 1-based arrays!!
|
||||||
|
byte [] blobData = BLOB_DATA();
|
||||||
for (int i = 0; i < length; i++) {
|
for (int i = 0; i < length; i++) {
|
||||||
bytes[i] = BLOB_DATA[i + start];
|
bytes[i] = blobData[i + start];
|
||||||
}
|
}
|
||||||
return bytes;
|
return bytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long length() {
|
public long length() {
|
||||||
return BLOB_DATA.length;
|
return BLOB_DATA().length;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user