diff --git a/COMPILING.txt b/COMPILING.txt index 4b61b11b..835ba33b 100644 --- a/COMPILING.txt +++ b/COMPILING.txt @@ -61,16 +61,23 @@ downloads the necessary binaries. == Testing Sqoop -Sqoop has several unit tests which can be run with +ant test+ or +./gradlew test+. This command +Sqoop has three main test categories: unit, third party and Amazon S3 tests. + +Sqoop unit tests can be run with +ant test+. This command will run all the "basic" checks against an in-memory database, HSQLDB. -Sqoop also has compatibility tests that check its ability to work with +Sqoop's third party tests are compatibility tests that check its ability to work with several third-party databases. To enable these tests, you will need to install and configure the databases or run them in Docker containers, and download the JDBC drivers for each one. -=== Installing the necessary databases +Sqoop's Amazon S3 test suite tests the "RDBMS to Amazon S3" use case with an in-memory database, HSQLDB. +To enable these tests, you will need to have either permanent or temporary Amazon S3 security credentials. -==== MySQL +=== Third party tests + +==== Installing the necessary databases + +===== MySQL Install MySQL server and client 5.0. Download MySQL Connector/J 5.0.8 for JDBC. Instructions for configuring the MySQL database are in MySQLAuthTest @@ -89,7 +96,7 @@ sqoop.test.mysql.password=MYPWD If not specified, the default value used for this property is: jdbc:mysql://localhost/ -==== Oracle +===== Oracle Install Oracle Enterprise Edition 10.2.0+. Instructions for configuring the database are in OracleManagerTest. Download the ojdbc6_g jar. @@ -111,7 +118,7 @@ jdbc:oracle:thin:@//localhost/xe Users sqooptest and sqooptest2 should be created prior to running the tests. SQL script is available in src/test/oraoop/create_users.sql -==== PostgreSQL +===== PostgreSQL Install PostgreSQL 8.3.9. Download the postgresql 8.4 jdbc driver. Instructions for configuring the database are in PostgresqlTest. @@ -130,7 +137,7 @@ sqoop.test.postgresql.password=MYPWD If not specified, the default value used for this property is: jdbc:postgresql://localhost/ -==== SQL Server +===== SQL Server Install SQL Server Express 2012 and create a database instance and download the appropriate JDBC driver. Instructions for configuring the @@ -153,7 +160,7 @@ jdbc:sqlserver://sqlserverhost:1433 This can be useful if you have the hostname sqlserverhost mapped to the IP address of the SQL Server instance. -==== Cubrid +===== Cubrid Install Cubrid 9.2.2.0003 and create a database instance and download the appropriate JDBC driver. Instructions for configuring the database are in @@ -174,7 +181,7 @@ sqoop.test.cubrid.connectstring.password=MYPWD If not specified, the default value used for this property is: jdbc:cubrid:localhost -==== DB2 +===== DB2 Install DB2 9.74 Express C and download the appropriate JDBC driver. Instructions for configuring the server can be found in @@ -197,7 +204,7 @@ jdbc:db2://db2host:50000 This can be useful if you have the hostname db2host mapped to the IP address of the DB2 Server instance. -=== Running the Third-party Tests on native database servers +==== Running the Third-party Tests on native database servers After the third-party databases are installed and configured, run: @@ -218,18 +225,18 @@ ant test -Dmanual=true -Dsqoop.thirdparty.lib.dir=/path/to/jdbc/drivers/ Note that +sqoop.thirdparty.lib.dir+ can also be specified in +build.properties+. -=== Setting up and executing third-party tests with databases running in Docker containers +==== Setting up and executing third-party tests with databases running in Docker containers The easiest way to run the Sqoop third party test pack is to start all the necessary database servers in Docker containers. This eliminates the need of installing and setting up 6 different RDBMSs on the development machines and provides a clean database environment every time the tests are executed. -==== Installing docker +===== Installing docker The first step is to install a recent version (1.13.0+) of Docker and Docker Compose on your development machine. Please refer to the Docker documentation for the installation instructions for your OS environment: https://docs.docker.com/engine/installation/ https://docs.docker.com/compose/install/ -==== Downloading docker images +===== Downloading docker images MySQL, PostgreSQL, MSSQL, DB2 and Cubrid images are freely available on Docker Hub so they will be pulled automatically by the startup command specified below however the Oracle EE image has to be built manually. Please refer to the README.md file on the below Github project for building instructions: @@ -237,7 +244,7 @@ https://github.com/oracle/docker-images/tree/master/OracleDatabase Please note that Sqoop third party tests require Oracle Enterprise Edition and the startup command assumes version 12.2.0.1. -==== Starting the Docker containers +===== Starting the Docker containers A startup script has been added to the Sqoop project to make the Docker container initialization easier: @@ -274,7 +281,7 @@ After the startup script is executed the containers need some time to initialize Most of the containers need less than 1 minute to start up but DB2 needs ~5 minutes and Oracle needs ~15 minutes. The Docker images need ~17GB free disk space and Docker requires ~5GB of memory to start all of them at the same time. -==== Stopping the Docker containers +===== Stopping the Docker containers You can stop and remove the Docker containers using the following command: @@ -282,7 +289,7 @@ You can stop and remove the Docker containers using the following command: /src/scripts/thirdpartytest/stop-thirdpartytest-db-containers.sh ---- -==== Running the third party tests using docker containers +===== Running the third party tests using docker containers You can execute the third party tests against the DBs running in Docker containers using the following command (replace with the path you have the necessary JDBC drivers): @@ -296,6 +303,22 @@ Please note that even if you do not need to install RDBMSs to run Sqoop third pa * mysqlimport * psql +=== Amazon S3 tests + +To enable Amazon S3 tests you need to have Amazon S3 security credentials. To pass these credentials to Sqoop during +test execution you need to have a generator command that writes Amazon S3 credentials to the first +line of standard output in the following order: access key, secret key and session token (the latter one only in case +of temporary credentials) having them separated by spaces. + +You can then pass the bucket URL and the generator command to the tests via system properties as follows: + +++++ +ant clean test -Ds3.bucket.url= -Ds3.generator.command= +or +./gradlew test -Ds3.bucket.url= -Ds3.generator.command= +++++ + + == Code Quality Analysis We have three tools which can be used to analyze Sqoop's code quality. diff --git a/build.gradle b/build.gradle index 99a4e24f..709172cc 100644 --- a/build.gradle +++ b/build.gradle @@ -89,6 +89,7 @@ def sqoopThirdPartyLib = System.getProperty("sqoop.thirdparty.lib.dir") dependencies { if (sqoopThirdPartyLib != null) runtime fileTree(dir: sqoopThirdPartyLib, include: '*.jar') + compile group: 'org.apache.hadoop', name: 'hadoop-aws', version: hadoopVersion compile group: 'org.apache.hadoop', name: 'hadoop-common', version: hadoopVersion compile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: hadoopVersion compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: hadoopVersion diff --git a/build.xml b/build.xml index 8115b026..084823cf 100644 --- a/build.xml +++ b/build.xml @@ -254,6 +254,9 @@ + + + @@ -888,6 +891,9 @@ + + + diff --git a/ivy.xml b/ivy.xml index e55172e1..18448602 100644 --- a/ivy.xml +++ b/ivy.xml @@ -76,6 +76,8 @@ under the License. rev="${hadoop.version}" conf="common->default"/> + generateS3Credentials(String generatorCommand) throws IOException { + ProcessBuilder processBuilder = new ProcessBuilder("/bin/sh", "-c", generatorCommand); + Process process = processBuilder.start(); + String output; + Map credentials = new HashMap(); + + try ( + InputStreamReader inputStreamReader = new InputStreamReader(process.getInputStream(), Charset.forName("UTF-8")); + BufferedReader bufferedReader = new BufferedReader(inputStreamReader); + ) { + output = bufferedReader.readLine(); + + if (output != null) { + + String[] splitOutput = output.split(" "); + + credentials.put(S3_ACCESS_KEY, splitOutput[0]); + credentials.put(S3_SECRET_KEY, splitOutput[1]); + + if (splitOutput.length > 2) { + credentials.put(S3_SESSION_TOKEN, splitOutput[2]); + } + + } else { + LOG.info("No S3 credential generator command is given or output of the command is null thus S3 tests are being skipped."); + } + + } catch (IOException ioE) { + LOG.error("Issue with generating S3 credentials", ioE); + throw new RuntimeException(ioE); + } + + return credentials; + } +} diff --git a/src/test/org/apache/sqoop/testutil/S3CredentialGenerator.java b/src/test/org/apache/sqoop/testutil/S3CredentialGenerator.java new file mode 100644 index 00000000..d7a1acf5 --- /dev/null +++ b/src/test/org/apache/sqoop/testutil/S3CredentialGenerator.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.testutil; + +import java.io.IOException; +import java.util.Map; + +public abstract class S3CredentialGenerator { + + protected static final String S3_ACCESS_KEY = "S3_ACCESS_KEY"; + protected static final String S3_SECRET_KEY = "S3_SECRET_KEY"; + protected static final String S3_SESSION_TOKEN = "S3_SESSION_TOKEN"; + + protected String s3AccessKey; + protected String s3SecretKey; + protected String s3SessionToken; + + /** + * By calling this constructor the {@link #generateS3Credentials(String)} method is called and the values of the + * returned map are assigned to the corresponding fields of the instantiated class. + * + * @param generatorCommand String containing the command to generate S3 credentials + * @throws IOException + */ + public S3CredentialGenerator(String generatorCommand) throws IOException { + Map s3Credentials = generateS3Credentials(generatorCommand); + if (s3Credentials != null) { + s3AccessKey = s3Credentials.get(S3_ACCESS_KEY); + s3SecretKey = s3Credentials.get(S3_SECRET_KEY); + s3SessionToken = s3Credentials.get(S3_SESSION_TOKEN); + } + } + + /** + * Executes the given S3 credential generator command and builds a map containing the credentials + * + * @param generatorCommand String containing the command to execute + * @return Map containing S3 credentials by keys {@link #S3_ACCESS_KEY}, {@link #S3_SECRET_KEY} and {@link #S3_SESSION_TOKEN} + * @throws IOException + */ + protected abstract Map generateS3Credentials(String generatorCommand) throws IOException; + + public String getS3AccessKey() { + return s3AccessKey; + } + + public String getS3SecretKey() { + return s3SecretKey; + } + + public String getS3SessionToken() { + return s3SessionToken; + } + +} diff --git a/src/test/org/apache/sqoop/testutil/S3TestUtils.java b/src/test/org/apache/sqoop/testutil/S3TestUtils.java new file mode 100644 index 00000000..ceaff3b3 --- /dev/null +++ b/src/test/org/apache/sqoop/testutil/S3TestUtils.java @@ -0,0 +1,185 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.testutil; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.s3a.Constants; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import static org.junit.Assume.assumeNotNull; + +public class S3TestUtils { + + private static final String PROPERTY_GENERATOR_COMMAND = "s3.generator.command"; + private static final String PROPERTY_BUCKET_URL = "s3.bucket.url"; + + private static final String TEMPORARY_CREDENTIALS_PROVIDER_CLASS = "org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider"; + + private static final String BUCKET_TEMP_DIR = "/tmp/"; + + private static final String TARGET_DIR_NAME_PREFIX = "testdir"; + + private static String targetDirName = TARGET_DIR_NAME_PREFIX; + + public static final Log LOG = LogFactory.getLog( + S3TestUtils.class.getName()); + + public static String getGeneratorCommand() { + return System.getProperty(PROPERTY_GENERATOR_COMMAND); + } + + private static String getPropertyBucketUrl() { + return System.getProperty(PROPERTY_BUCKET_URL); + } + + private static String getTemporaryCredentialsProviderClass() { + return TEMPORARY_CREDENTIALS_PROVIDER_CLASS; + } + + private static void setUniqueTargetDirName() { + String uuid = UUID.randomUUID().toString(); + targetDirName = targetDirName + "-" + uuid; + } + + public static void resetTargetDirName() { + targetDirName = TARGET_DIR_NAME_PREFIX; + } + + private static String getTargetDirName() { + return targetDirName; + } + + public static Path getTargetDirPath() { + String targetPathString = getPropertyBucketUrl() + BUCKET_TEMP_DIR + getTargetDirName(); + return new Path(targetPathString); + } + + public static void runTestCaseOnlyIfS3CredentialsAreSet(S3CredentialGenerator s3CredentialGenerator) { + assumeNotNull(s3CredentialGenerator); + assumeNotNull(s3CredentialGenerator.getS3AccessKey()); + assumeNotNull(s3CredentialGenerator.getS3SecretKey()); + } + + public static FileSystem setupS3ImportTestCase(S3CredentialGenerator s3CredentialGenerator, + BaseSqoopTestCase testCase) throws IOException { + createTestTableFromInputData(testCase); + + Configuration hadoopConf = new Configuration(); + S3TestUtils.setS3CredentialsInHadoopConf(hadoopConf, s3CredentialGenerator); + FileSystem s3Client = FileSystem.get(hadoopConf); + + setUniqueTargetDirName(); + + clearTargetDir(s3Client); + + return s3Client; + } + + private static void setS3CredentialsInHadoopConf(Configuration hadoopConf, + S3CredentialGenerator s3CredentialGenerator) { + hadoopConf.set("fs.defaultFS", getPropertyBucketUrl()); + hadoopConf.set(Constants.ACCESS_KEY, s3CredentialGenerator.getS3AccessKey()); + hadoopConf.set(Constants.SECRET_KEY, s3CredentialGenerator.getS3SecretKey()); + + if (s3CredentialGenerator.getS3SessionToken() != null) { + hadoopConf.set(Constants.SESSION_TOKEN, s3CredentialGenerator.getS3SessionToken()); + hadoopConf.set(Constants.AWS_CREDENTIALS_PROVIDER, TEMPORARY_CREDENTIALS_PROVIDER_CLASS); + } + } + + public static ArgumentArrayBuilder getArgumentArrayBuilderForUnitTests(BaseSqoopTestCase testCase, + S3CredentialGenerator s3CredentialGenerator) { + ArgumentArrayBuilder builder = new ArgumentArrayBuilder(); + return builder.withCommonHadoopFlags() + .withProperty(Constants.ACCESS_KEY, s3CredentialGenerator.getS3AccessKey()) + .withProperty(Constants.SECRET_KEY, s3CredentialGenerator.getS3SecretKey()) + .withProperty(Constants.SESSION_TOKEN, s3CredentialGenerator.getS3SessionToken()) + .withProperty(Constants.AWS_CREDENTIALS_PROVIDER, getTemporaryCredentialsProviderClass()) + .withOption("connect", testCase.getConnectString()) + .withOption("num-mappers", "1") + .withOption("table", testCase.getTableName()) + .withOption("target-dir", getTargetDirPath().toString()); + } + + private static List getInputData() { + List data = new ArrayList<>(); + data.add(new String[]{"1", "'Ironman'", "'Marvel'", "1963"}); + data.add(new String[]{"2", "'Wonder Woman'", "'DC'", "1941"}); + data.add(new String[]{"3", "'Batman'", "'DC'", "1939"}); + data.add(new String[]{"4", "'Hulk'", "'Marvel'", "1962"}); + return data; + } + + private static void createTestTableFromInputData(BaseSqoopTestCase testCase) { + String[] names = {"ID", "SUPERHERO", "COMICS", "DEBUT"}; + String[] types = { "INT", "VARCHAR(25)", "VARCHAR(25)", "INT"}; + List inputData = getInputData(); + testCase.createTableWithColTypesAndNames(names, types, new String[0]); + testCase.insertIntoTable(names, types, inputData.get(0)); + testCase.insertIntoTable(names, types, inputData.get(1)); + testCase.insertIntoTable(names, types, inputData.get(2)); + testCase.insertIntoTable(names, types, inputData.get(3)); + } + + public static String[] getExpectedTextOutput() { + return new String[] { + "1,Ironman,Marvel,1963", + "2,Wonder Woman,DC,1941", + "3,Batman,DC,1939", + "4,Hulk,Marvel,1962" + }; + } + + public static String[] getExpectedSequenceFileOutput() { + return new String[] { + "1,Ironman,Marvel,1963\n", + "2,Wonder Woman,DC,1941\n", + "3,Batman,DC,1939\n", + "4,Hulk,Marvel,1962\n" + }; + } + + public static String[] getExpectedAvroOutput() { + return new String[] { + "{\"ID\": 1, \"SUPERHERO\": \"Ironman\", \"COMICS\": \"Marvel\", \"DEBUT\": 1963}", + "{\"ID\": 2, \"SUPERHERO\": \"Wonder Woman\", \"COMICS\": \"DC\", \"DEBUT\": 1941}", + "{\"ID\": 3, \"SUPERHERO\": \"Batman\", \"COMICS\": \"DC\", \"DEBUT\": 1939}", + "{\"ID\": 4, \"SUPERHERO\": \"Hulk\", \"COMICS\": \"Marvel\", \"DEBUT\": 1962}" + }; + } + + public static void clearTargetDir(FileSystem s3Client) throws IOException{ + + try { + if (s3Client.exists(getTargetDirPath())) { + s3Client.delete(getTargetDirPath(), true); + } + } catch (Exception e) { + LOG.error("Issue with cleaning up output directory", e); + } + } +} diff --git a/src/test/org/apache/sqoop/testutil/SequenceFileTestUtils.java b/src/test/org/apache/sqoop/testutil/SequenceFileTestUtils.java new file mode 100644 index 00000000..ad7576db --- /dev/null +++ b/src/test/org/apache/sqoop/testutil/SequenceFileTestUtils.java @@ -0,0 +1,85 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.testutil; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableComparable; +import org.apache.sqoop.SqoopOptions; +import org.apache.sqoop.util.ClassLoaderStack; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class SequenceFileTestUtils { + + private static final String OUTPUT_FILE_NAME = "/part-m-00000"; + + public static final Log LOG = LogFactory.getLog( + SequenceFileTestUtils.class.getName()); + + /** + * Verify results at the given tablePath. + * @param testCase current instance of BaseSqoopTestCase + * @param expectedResults string array of expected results + * @param fileSystem current fileSystem + * @param tablePath path of the output table + */ + public static void verify(BaseSqoopTestCase testCase, String[] expectedResults, FileSystem fileSystem, Path tablePath) throws Exception{ + String outputFilePathString = tablePath.toString() + OUTPUT_FILE_NAME; + Path outputFilePath = new Path(outputFilePathString); + + Configuration conf = fileSystem.getConf(); + + ClassLoader prevClassLoader = ClassLoaderStack.addJarFile( + new Path(new Path(new SqoopOptions().getJarOutputDir()), testCase.getTableName() + ".jar").toString(), + testCase.getTableName()); + + // Needs to set the classLoader for the Configuration object otherwise SequenceFile cannot load custom classes + conf.setClassLoader(Thread.currentThread().getContextClassLoader()); + + try (SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(outputFilePath))) { + WritableComparable key = (WritableComparable) reader.getKeyClass().newInstance(); + Writable value = (Writable) reader.getValueClass().newInstance(); + boolean hasNextRecord = reader.next(key, value); + int i = 0; + + if (!hasNextRecord && expectedResults != null && expectedResults.length > 0) { + fail("Empty output file was not expected"); + } + + while (hasNextRecord) { + assertEquals(expectedResults[i++], value.toString()); + hasNextRecord = reader.next(key, value); + } + } catch (IOException ioe) { + LOG.error("Issue with verifying the output", ioe); + throw new RuntimeException(ioe); + } finally { + ClassLoaderStack.setCurrentClassLoader(prevClassLoader); + } + } +} diff --git a/src/test/org/apache/sqoop/testutil/TextFileTestUtils.java b/src/test/org/apache/sqoop/testutil/TextFileTestUtils.java new file mode 100644 index 00000000..df19cb8b --- /dev/null +++ b/src/test/org/apache/sqoop/testutil/TextFileTestUtils.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.testutil; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.Charset; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class TextFileTestUtils { + + private static final String OUTPUT_FILE_NAME = "/part-m-00000"; + + public static final Log LOG = LogFactory.getLog( + TextFileTestUtils.class.getName()); + + /** + * Verify results at the given tablePath. + * @param expectedResults string array of expected results + * @param fileSystem current filesystem + * @param tablePath path of the output table + */ + public static void verify(String[] expectedResults, FileSystem fileSystem, Path tablePath) throws IOException { + String outputFilePathString = tablePath.toString() + OUTPUT_FILE_NAME; + Path outputFilePath = new Path(outputFilePathString); + + try (BufferedReader br = new BufferedReader(new InputStreamReader(fileSystem.open(outputFilePath), Charset.forName("UTF-8")))) { + String line = br.readLine(); + int i = 0; + + if (line == null && expectedResults != null && expectedResults.length > 0) { + fail("Empty output file was not expected"); + } + + while (line != null) { + assertEquals(expectedResults[i++], line); + line = br.readLine(); + } + + } catch (IOException ioe) { + LOG.error("Issue with verifying the output", ioe); + throw new RuntimeException(ioe); + } + } +}