5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-03 02:51:00 +08:00

SQOOP-2. MySQLExportMapper does not send username.

If specified, pass --user argument to mysqlimport in MySQLExportMapper.

From: Aaron Kimball <aaron@cloudera.com>

git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149930 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Bayer 2011-07-22 20:04:01 +00:00
parent 35e35bc328
commit d215cbac6e
4 changed files with 100 additions and 9 deletions

View File

@ -148,6 +148,11 @@ private void initMySQLImportProcess() throws IOException {
args.add("--defaults-file=" + passwordFile);
}
String username = conf.get(MySQLUtils.USERNAME_KEY);
if (null != username) {
args.add("--user=" + username);
}
args.add("--host=" + hostname);
if (-1 != port) {
args.add("--port=" + Integer.toString(port));

View File

@ -96,7 +96,7 @@ public void setUp() {
}
private String getRecordLine(int recordNum, ColumnGenerator... extraCols) {
protected String getRecordLine(int recordNum, ColumnGenerator... extraCols) {
String idStr = Integer.toString(recordNum);
StringBuilder sb = new StringBuilder();
@ -345,8 +345,9 @@ private void assertColValForRowId(int id, String colName, String expectedVal)
*/
private void assertColMinAndMax(String colName, ColumnGenerator generator)
throws SQLException {
int minId = getMinRowId();
int maxId = getMaxRowId();
Connection conn = getConnection();
int minId = getMinRowId(conn);
int maxId = getMaxRowId(conn);
LOG.info("Checking min/max for column " + colName + " with type "
+ generator.getType());

View File

@ -18,12 +18,19 @@
package com.cloudera.sqoop.manager;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.SQLException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
@ -145,4 +152,74 @@ public void testMultiTxExport() throws IOException, SQLException {
multiFileTest(1, 20, 1,
"-D", MySQLExportMapper.MYSQL_CHECKPOINT_BYTES_KEY + "=10");
}
/**
* Test an authenticated export using mysqlimport.
*/
public void testAuthExport() throws IOException, SQLException {
SqoopOptions options = new SqoopOptions(MySQLAuthTest.AUTH_CONNECT_STRING,
getTableName());
options.setUsername(MySQLAuthTest.AUTH_TEST_USER);
options.setPassword(MySQLAuthTest.AUTH_TEST_PASS);
manager = new DirectMySQLManager(options);
Connection connection = null;
Statement st = null;
String tableName = getTableName();
try {
connection = manager.getConnection();
connection.setAutoCommit(false);
st = connection.createStatement();
// create a target database table.
st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
st.executeUpdate("CREATE TABLE " + tableName + " ("
+ "id INT NOT NULL PRIMARY KEY, "
+ "msg VARCHAR(24) NOT NULL)");
connection.commit();
// Write a file containing a record to export.
Path tablePath = getTablePath();
Path filePath = new Path(tablePath, "datafile");
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
FileSystem fs = FileSystem.get(conf);
fs.mkdirs(tablePath);
OutputStream os = fs.create(filePath);
BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
w.write(getRecordLine(0));
w.write(getRecordLine(1));
w.write(getRecordLine(2));
w.close();
os.close();
// run the export and verify that the results are good.
runExport(getArgv(true, 10, 10,
"--username", MySQLAuthTest.AUTH_TEST_USER,
"--password", MySQLAuthTest.AUTH_TEST_PASS,
"--connect", MySQLAuthTest.AUTH_CONNECT_STRING));
verifyExport(3, connection);
} catch (SQLException sqlE) {
LOG.error("Encountered SQL Exception: " + sqlE);
sqlE.printStackTrace();
fail("SQLException when accessing target table. " + sqlE);
} finally {
try {
if (null != st) {
st.close();
}
if (null != connection) {
connection.close();
}
} catch (SQLException sqlE) {
LOG.warn("Got SQLException when closing connection: " + sqlE);
}
}
}
}

View File

@ -153,8 +153,7 @@ protected String getMsgPrefix() {
/** @return the minimum 'id' value in the table */
protected int getMinRowId() throws SQLException {
Connection conn = getConnection();
protected int getMinRowId(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement(
"SELECT MIN(id) FROM " + getTableName(),
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
@ -175,8 +174,7 @@ protected int getMinRowId() throws SQLException {
}
/** @return the maximum 'id' value in the table */
protected int getMaxRowId() throws SQLException {
Connection conn = getConnection();
protected int getMaxRowId(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement(
"SELECT MAX(id) FROM " + getTableName(),
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
@ -204,7 +202,17 @@ protected int getMaxRowId() throws SQLException {
protected void verifyExport(int expectedNumRecords)
throws IOException, SQLException {
Connection conn = getConnection();
verifyExport(expectedNumRecords, conn);
}
/**
* Check that we got back the expected row set.
* @param expectedNumRecords The number of records we expected to load
* into the database.
* @param conn the db connection to use.
*/
protected void verifyExport(int expectedNumRecords, Connection conn)
throws IOException, SQLException {
LOG.info("Verifying export: " + getTableName());
// Check that we got back the correct number of records.
PreparedStatement statement = conn.prepareStatement(
@ -232,11 +240,11 @@ protected void verifyExport(int expectedNumRecords)
}
// Check that we start with row 0.
int minVal = getMinRowId();
int minVal = getMinRowId(conn);
assertEquals("Minimum row was not zero", 0, minVal);
// Check that the last row we loaded is numRows - 1
int maxVal = getMaxRowId();
int maxVal = getMaxRowId(conn);
assertEquals("Maximum row had invalid id", expectedNumRecords - 1, maxVal);
// Check that the string values associated with these points match up.