5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 20:11:54 +08:00

SQOOP-1087: Sqoop2: Integration: Abstract common functionality into src module

(Jarek Jarcec Cecho via Kate Ting)
This commit is contained in:
Kate Ting 2013-06-23 18:43:50 -04:00
parent fe54d473af
commit f980e90fc9
11 changed files with 367 additions and 106 deletions

View File

@ -33,7 +33,6 @@ limitations under the License.
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -0,0 +1,83 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.asserts;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.sqoop.test.utils.HdfsUtils;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.fail;
/**
* Assert methods suitable for checking HDFS files and directories.
*
* TODO: This module will require clean up to work on MiniCluster/Real cluster.
*/
public class HdfsAsserts {
private static final Logger LOG = Logger.getLogger(HdfsAsserts.class);
/**
* Verify that mapreduce output (across all files) is as expected.
*
* @param directory Mapreduce output directory
* @param lines Expected lines
* @throws IOException
*/
public static void assertMapreduceOutput(String directory, String... lines) throws IOException {
Set<String> setLines = new HashSet<String>(Arrays.asList(lines));
List<String> notFound = new LinkedList<String>();
String []files = HdfsUtils.getOutputMapreduceFiles(directory);
for(String file : files) {
String filePath = directory + "/" + file;
BufferedReader br = new BufferedReader(new FileReader((filePath)));
String line;
while ((line = br.readLine()) != null) {
if (!setLines.remove(line)) {
notFound.add(line);
}
}
br.close();
}
if(!setLines.isEmpty() || !notFound.isEmpty()) {
LOG.error("Expected lines that weren't present in the files:");
LOG.error("\t" + StringUtils.join(setLines, "\n\t"));
LOG.error("Extra lines in files that weren't expected:");
LOG.error("\t" + StringUtils.join(notFound, "\n\t"));
fail("Output do not match expectations.");
}
}
private HdfsAsserts() {
// Instantiation is prohibited
}
}

View File

@ -0,0 +1,74 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.asserts;
import org.apache.sqoop.test.db.DatabaseProvider;
import org.apache.log4j.Logger;
import java.sql.ResultSet;
import java.sql.SQLException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* Database provider related asserts.
*/
public class ProviderAsserts {
private static final Logger LOG = Logger.getLogger(ProviderAsserts.class);
/**
* Assert row in the table.
*
* @param provider Provider that should be used to query the database
* @param tableName Table name
* @param conditions Conditions for identifying the row
* @param values Values that should be present in the table
*/
public static void assertRow(DatabaseProvider provider, String tableName, Object []conditions, Object ...values) {
ResultSet rs = null;
try {
rs = provider.getRows(tableName, conditions);
if(! rs.next()) {
fail("No rows found.");
}
int i = 1;
for(Object expectedValue : values) {
Object actualValue = rs.getObject(i);
assertEquals("Columns do not match on position: " + i, expectedValue, actualValue);
i++;
}
if(rs.next()) {
fail("Found more than one row.");
}
} catch (SQLException e) {
LOG.error("Unexpected SQLException", e);
fail("Unexpected SQLException: " + e);
} finally {
provider.closeResultSetWithStatement(rs);
}
}
private ProviderAsserts() {
// Instantiation is prohibited
}
}

View File

@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.data;
import org.apache.sqoop.test.db.DatabaseProvider;
/**
* Simple listing of few world's cities to do basic sanity tests.
*/
public class Cities extends DataSet {
public Cities(DatabaseProvider provider, String tableBaseName) {
super(provider, tableBaseName);
}
@Override
public DataSet createTables() {
provider.createTable(
tableBaseName,
"id",
"id", "int",
"country", "varchar(50)",
"city", "varchar(50)"
);
return this;
}
@Override
public DataSet loadBasicData() {
provider.insertRow(tableBaseName, 1, "USA", "San Francisco");
provider.insertRow(tableBaseName, 2, "USA", "Sunnyvale");
provider.insertRow(tableBaseName, 3, "Czech Republic", "Brno");
provider.insertRow(tableBaseName, 4, "USA", "Palo Alto");
return this;
}
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.data;
import org.apache.sqoop.test.db.DatabaseProvider;
/**
* Abstract class for basic testing data sets.
*
* Each data set provides couple of generic methods that can be used to set up
* the tables and load example data.
*/
public abstract class DataSet {
/**
* Database provider that will be used to populate the data.
*/
protected DatabaseProvider provider;
/**
* Base name for created tables.
*/
protected String tableBaseName;
public DataSet(DatabaseProvider provider, String tableBaseName) {
setProvider(provider);
setTableBaseName(tableBaseName);
}
public DataSet setProvider(DatabaseProvider provider) {
this.provider = provider;
return this;
}
public DataSet setTableBaseName(String tableBaseName) {
this.tableBaseName = tableBaseName;
return this;
}
/**
* Crate all tables that this testing data set might need.
*/
public abstract DataSet createTables();
/**
* Load basic data.
*
* Basic data set should be small (around 10 rows) without any specialities.
*/
public abstract DataSet loadBasicData();
}

View File

@ -15,33 +15,31 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.integration.connector;
package org.apache.sqoop.test.testcases;
import org.apache.log4j.Logger;
import org.apache.sqoop.framework.configuration.OutputFormat;
import org.apache.sqoop.framework.configuration.StorageType;
import org.apache.sqoop.integration.TomcatTestCase;
import org.apache.sqoop.model.MConnection;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MPersistableEntity;
import org.apache.sqoop.test.asserts.ProviderAsserts;
import org.apache.sqoop.test.data.Cities;
import org.apache.sqoop.test.db.DatabaseProvider;
import org.apache.sqoop.test.db.DatabaseProviderFactory;
import org.apache.sqoop.validation.Status;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.sql.ResultSet;
import java.sql.SQLException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.fail;
/**
* Base test case for connector testing.
* Base test case suitable for connector testing.
*
* It will create and initialize database provider prior every test execution.
* In addition to pure Tomcat based test case it will also create and initialize
* the database provider prior every test execution.
*/
abstract public class ConnectorTestCase extends TomcatTestCase {
@ -124,22 +122,14 @@ protected void fillInputForm(MJob job) {
* Create table cities.
*/
protected void createTableCities() {
createTable("id",
"id", "int",
"country", "varchar(50)",
"city", "varchar(50)"
);
new Cities(provider, getTableName()).createTables();
}
/**
* Create table cities and load few rows.
*/
protected void createAndLoadTableCities() {
createTableCities();
insertRow(1, "USA", "San Francisco");
insertRow(2, "USA", "Sunnyvale");
insertRow(3, "Czech Republic", "Brno");
insertRow(4, "USA", "Palo Alto");
new Cities(provider, getTableName()).createTables().loadBasicData();
}
/**
@ -149,29 +139,7 @@ protected void createAndLoadTableCities() {
* @param values Values that are expected in the table (with corresponding types)
*/
protected void assertRow(Object []conditions, Object ...values) {
ResultSet rs = provider.getRows(getTableName(), conditions);
try {
if(! rs.next()) {
fail("No rows found.");
}
int i = 1;
for(Object expectedValue : values) {
Object actualValue = rs.getObject(i);
assertEquals("Columns do not match on position: " + i, expectedValue, actualValue);
i++;
}
if(rs.next()) {
fail("Found more than one row.");
}
} catch (SQLException e) {
LOG.error("Unexpected SQLException", e);
fail("Unexpected SQLException: " + e);
} finally {
provider.closeResultSetWithStatement(rs);
}
ProviderAsserts.assertRow(provider, getTableName(), conditions, values);
}
/**
@ -179,7 +147,7 @@ protected void assertRow(Object []conditions, Object ...values) {
*
* @param values Values that are expected
*/
protected void assertRowInCitiesTable(Object ... values) {
protected void assertRowInCities(Object... values) {
assertRow(new Object[]{"id", values[0]}, values);
}

View File

@ -15,30 +15,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.integration;
package org.apache.sqoop.test.testcases;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.sqoop.client.SqoopClient;
import org.apache.sqoop.test.asserts.HdfsAsserts;
import org.apache.sqoop.test.minicluster.TomcatSqoopMiniCluster;
import org.apache.sqoop.test.utils.HdfsUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestName;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.fail;
/**
* Basic test case that will bootstrap Sqoop server running in external Tomcat
@ -131,21 +120,6 @@ public String getMapreduceDirectory() {
return getTemporaryPath() + "/mapreduce-job-io";
}
/**
* Return list of file names that are outputs of mapreduce job.
*
* @return
*/
public String[] getOutputFilesMapreduce() {
File dir = new File(getMapreduceDirectory());
return dir.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("part-");
}
});
}
/**
* Assert that mapreduce has generated following lines.
*
@ -156,31 +130,7 @@ public boolean accept(File dir, String name) {
* @throws IOException
*/
protected void assertMapreduceOutput(String... lines) throws IOException {
Set<String> setLines = new HashSet<String>(Arrays.asList(lines));
List<String> notFound = new LinkedList<String>();
String []files = getOutputFilesMapreduce();
for(String file : files) {
String filePath = getMapreduceDirectory() + "/" + file;
BufferedReader br = new BufferedReader(new FileReader((filePath)));
String line;
while ((line = br.readLine()) != null) {
if (!setLines.remove(line)) {
notFound.add(line);
}
}
br.close();
}
if(!setLines.isEmpty() || !notFound.isEmpty()) {
LOG.error("Expected lines that weren't present in the files:");
LOG.error("\t" + StringUtils.join(setLines, "\n\t"));
LOG.error("Extra lines in files that weren't expected:");
LOG.error("\t" + StringUtils.join(notFound, "\n\t"));
fail("Output do not match expectations.");
}
HdfsAsserts.assertMapreduceOutput(getMapreduceDirectory(), lines);
}
/**
@ -191,7 +141,6 @@ protected void assertMapreduceOutput(String... lines) throws IOException {
* @throws IOException
*/
protected void createInputMapreduceFile(String filename, String...lines) throws IOException {
File outputFile = new File(getMapreduceDirectory(), filename);
FileUtils.writeLines(outputFile, Arrays.asList(lines));
HdfsUtils.createFile(getMapreduceDirectory(), filename, lines);
}
}

View File

@ -0,0 +1,69 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.utils;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.Arrays;
/**
* Handy utilities to work with HDFS
*
* TODO: This module will require clean up to work on MiniCluster/Real cluster.
*/
public class HdfsUtils {
private static final Logger LOG = Logger.getLogger(HdfsUtils.class);
/**
* Get list of mapreduce output files from given directory.
*
* @param directory Directory to be searched for files generated by MR
* @return
*/
public static String [] getOutputMapreduceFiles(String directory) {
File dir = new File(directory);
return dir.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("part-");
}
});
}
/**
* Create HDFS file with given content.
*
* @param directory Directory where the file should be created
* @param filename File name
* @param lines Individual lines that should be written into the file
* @throws IOException
*/
public static void createFile(String directory, String filename, String ...lines) throws IOException {
File outputFile = new File(directory, filename);
FileUtils.writeLines(outputFile, Arrays.asList(lines));
}
private HdfsUtils() {
// Instantiation is not allowed
}
}

View File

@ -18,7 +18,7 @@
package org.apache.sqoop.integration.connector.jdbc.generic;
import org.apache.log4j.Logger;
import org.apache.sqoop.integration.connector.ConnectorTestCase;
import org.apache.sqoop.test.testcases.ConnectorTestCase;
import org.apache.sqoop.model.MConnection;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
@ -70,10 +70,10 @@ public void testBasicImport() throws Exception {
} while(submission.getStatus().isRunning());
assertEquals(4L, rowCount());
assertRowInCitiesTable(1, "USA", "San Francisco");
assertRowInCitiesTable(2, "USA", "Sunnyvale");
assertRowInCitiesTable(3, "Czech Republic", "Brno");
assertRowInCitiesTable(4, "USA", "Palo Alto");
assertRowInCities(1, "USA", "San Francisco");
assertRowInCities(2, "USA", "Sunnyvale");
assertRowInCities(3, "Czech Republic", "Brno");
assertRowInCities(4, "USA", "Palo Alto");
// Clean up testing table
dropTable();

View File

@ -20,7 +20,7 @@
import org.apache.log4j.Logger;
import org.apache.sqoop.framework.configuration.OutputFormat;
import org.apache.sqoop.framework.configuration.StorageType;
import org.apache.sqoop.integration.connector.ConnectorTestCase;
import org.apache.sqoop.test.testcases.ConnectorTestCase;
import org.apache.sqoop.model.MConnection;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;

View File

@ -19,7 +19,7 @@
import org.apache.sqoop.client.request.VersionRequest;
import org.apache.sqoop.common.VersionInfo;
import org.apache.sqoop.integration.TomcatTestCase;
import org.apache.sqoop.test.testcases.TomcatTestCase;
import org.apache.sqoop.json.VersionBean;
import org.junit.Test;