mirror of
https://github.com/apache/sqoop.git
synced 2025-05-06 10:01:30 +08:00
SQOOP-937: Dont generate ORM files for Direct mode connectors
(Venkat Ranganathan via Jarek Jarcec Cecho)
This commit is contained in:
parent
86812b853f
commit
f2ba7eaae2
@ -639,5 +639,20 @@ public String datetimeToQueryString(String datetime, int columnType) {
|
|||||||
public String getInputBoundsQuery(String splitByCol, String sanitizedQuery) {
|
public String getInputBoundsQuery(String splitByCol, String sanitizedQuery) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method allows the ConnManager to override the generation of ORM
|
||||||
|
* classes if the SQOOP generated classes are not used by it.
|
||||||
|
* A return value of false from this method means that the SQOOP ORM
|
||||||
|
* classes are needed to use with the connector.
|
||||||
|
* A return value of true indicates that the connection manager does not
|
||||||
|
* use the SQOOP ORM classes. For example, in the Direct mode of some of
|
||||||
|
* the connectors, the text files are directly processed by DB specific
|
||||||
|
* facilities without even being passed through the SQOOP process and
|
||||||
|
* in those circumstances, it makes sense to disable the ORM generation.
|
||||||
|
*/
|
||||||
|
public boolean isORMFacilitySelfManaged() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -246,4 +246,9 @@ private void handleNetezzaExtraArgs(SqoopOptions opts)
|
|||||||
public boolean supportsStagingForExport() {
|
public boolean supportsStagingForExport() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isORMFacilitySelfManaged() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,9 @@
|
|||||||
|
|
||||||
package org.apache.sqoop.manager;
|
package org.apache.sqoop.manager;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.sqoop.util.Jars;
|
||||||
|
|
||||||
import com.cloudera.sqoop.SqoopOptions;
|
import com.cloudera.sqoop.SqoopOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -35,6 +38,10 @@ public ExportJobContext(final String table, final String jar,
|
|||||||
final SqoopOptions opts) {
|
final SqoopOptions opts) {
|
||||||
this.tableName = table;
|
this.tableName = table;
|
||||||
this.jarFile = jar;
|
this.jarFile = jar;
|
||||||
|
if (this.jarFile == null) {
|
||||||
|
// Set the jarFile to the hadoop core jar file.
|
||||||
|
this.jarFile = Jars.getJarPathForClass(Configuration.class);
|
||||||
|
}
|
||||||
this.options = opts;
|
this.options = opts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,7 +21,10 @@
|
|||||||
import org.apache.hadoop.mapreduce.InputFormat;
|
import org.apache.hadoop.mapreduce.InputFormat;
|
||||||
import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
|
import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
|
||||||
import com.cloudera.sqoop.SqoopOptions;
|
import com.cloudera.sqoop.SqoopOptions;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.sqoop.util.Jars;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A set of parameters describing an import operation; this is passed to
|
* A set of parameters describing an import operation; this is passed to
|
||||||
@ -40,6 +43,10 @@ public ImportJobContext(final String table, final String jar,
|
|||||||
final SqoopOptions opts, final Path destination) {
|
final SqoopOptions opts, final Path destination) {
|
||||||
this.tableName = table;
|
this.tableName = table;
|
||||||
this.jarFile = jar;
|
this.jarFile = jar;
|
||||||
|
if (this.jarFile == null) {
|
||||||
|
// Set the jarFile to the hadoop core jar file.
|
||||||
|
this.jarFile = Jars.getJarPathForClass(Configuration.class);
|
||||||
|
}
|
||||||
this.options = opts;
|
this.options = opts;
|
||||||
this.inputFormatClass = DataDrivenDBInputFormat.class;
|
this.inputFormatClass = DataDrivenDBInputFormat.class;
|
||||||
this.destination = destination;
|
this.destination = destination;
|
||||||
|
@ -321,8 +321,14 @@ public void runExport() throws ExportException, IOException {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
String tableClassName =
|
|
||||||
new TableClassName(options).getClassForTable(outputTableName);
|
String tableClassName = null;
|
||||||
|
if (!cmgr.isORMFacilitySelfManaged()) {
|
||||||
|
tableClassName =
|
||||||
|
new TableClassName(options).getClassForTable(outputTableName);
|
||||||
|
}
|
||||||
|
// For ORM self managed, we leave the tableClassName to null so that
|
||||||
|
// we don't check for non-existing classes.
|
||||||
String ormJarFile = context.getJarFile();
|
String ormJarFile = context.getJarFile();
|
||||||
|
|
||||||
LOG.info("Beginning export of " + outputTableName);
|
LOG.info("Beginning export of " + outputTableName);
|
||||||
|
@ -196,9 +196,14 @@ public void runImport(String tableName, String ormJarFile, String splitByCol,
|
|||||||
} else {
|
} else {
|
||||||
LOG.info("Beginning query import.");
|
LOG.info("Beginning query import.");
|
||||||
}
|
}
|
||||||
|
String tableClassName = null;
|
||||||
|
if (!getContext().getConnManager().isORMFacilitySelfManaged()) {
|
||||||
|
tableClassName =
|
||||||
|
new TableClassName(options).getClassForTable(tableName);
|
||||||
|
}
|
||||||
|
// For ORM self managed, we leave the tableClassName to null so that
|
||||||
|
// we don't check for non-existing classes.
|
||||||
|
|
||||||
String tableClassName =
|
|
||||||
new TableClassName(options).getClassForTable(tableName);
|
|
||||||
loadJars(conf, ormJarFile, tableClassName);
|
loadJars(conf, ormJarFile, tableClassName);
|
||||||
|
|
||||||
Job job = new Job(conf);
|
Job job = new Job(conf);
|
||||||
|
@ -220,6 +220,7 @@ private void addDirToCache(File dir, FileSystem fs, Set<String> localUrls) {
|
|||||||
*/
|
*/
|
||||||
protected void loadJars(Configuration conf, String ormJarFile,
|
protected void loadJars(Configuration conf, String ormJarFile,
|
||||||
String tableClassName) throws IOException {
|
String tableClassName) throws IOException {
|
||||||
|
|
||||||
boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
|
boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
|
||||||
|| "local".equals(conf.get("mapred.job.tracker"));
|
|| "local".equals(conf.get("mapred.job.tracker"));
|
||||||
if (isLocal) {
|
if (isLocal) {
|
||||||
|
@ -1064,13 +1064,24 @@ private void generateHadoopWrite(Map<String, Integer> columnTypes,
|
|||||||
return cleanedColNames;
|
return cleanedColNames;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Made this a separate method to overcome the 150 line limit of checkstyle.
|
||||||
|
*/
|
||||||
|
private void logORMSelfGenerationMessage() {
|
||||||
|
LOG.info("The connection manager declares that it self manages mapping"
|
||||||
|
+ " between records & fields and rows & columns. No class will"
|
||||||
|
+ " will be generated.");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate the ORM code for the class.
|
* Generate the ORM code for the class.
|
||||||
*/
|
*/
|
||||||
public void generate() throws IOException {
|
public void generate() throws IOException {
|
||||||
Map<String, Integer> columnTypes = getColumnTypes();
|
Map<String, Integer> columnTypes = getColumnTypes();
|
||||||
|
if (connManager.isORMFacilitySelfManaged()) {
|
||||||
|
logORMSelfGenerationMessage();
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (columnTypes == null) {
|
if (columnTypes == null) {
|
||||||
throw new IOException("No columns to generate for ClassWriter");
|
throw new IOException("No columns to generate for ClassWriter");
|
||||||
}
|
}
|
||||||
@ -1110,7 +1121,6 @@ public void generate() throws IOException {
|
|||||||
}
|
}
|
||||||
columnTypes.put(identifier, type);
|
columnTypes.put(identifier, type);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that all explicitly mapped columns are present in result set
|
// Check that all explicitly mapped columns are present in result set
|
||||||
Properties mapping = options.getMapColumnJava();
|
Properties mapping = options.getMapColumnJava();
|
||||||
if (mapping != null && !mapping.isEmpty()) {
|
if (mapping != null && !mapping.isEmpty()) {
|
||||||
@ -1207,7 +1217,6 @@ public void generate() throws IOException {
|
|||||||
// ignored because we're closing.
|
// ignored because we're closing.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (null != ostream) {
|
if (null != ostream) {
|
||||||
try {
|
try {
|
||||||
ostream.close();
|
ostream.close();
|
||||||
|
@ -71,10 +71,24 @@ public String generateORM(SqoopOptions options, String tableName)
|
|||||||
// This code generator is being invoked as part of an import or export
|
// This code generator is being invoked as part of an import or export
|
||||||
// process, and the user has pre-specified a jar and class to use.
|
// process, and the user has pre-specified a jar and class to use.
|
||||||
// Don't generate.
|
// Don't generate.
|
||||||
|
if (manager.isORMFacilitySelfManaged()) {
|
||||||
|
// No need to generated any ORM. Ignore any jar file given on
|
||||||
|
// command line also.
|
||||||
|
LOG.info("The connection manager declares that it self manages mapping"
|
||||||
|
+ " between records & fields and rows & columns. The jar file "
|
||||||
|
+ " provided will have no effect");
|
||||||
|
}
|
||||||
LOG.info("Using existing jar: " + existingJar);
|
LOG.info("Using existing jar: " + existingJar);
|
||||||
return existingJar;
|
return existingJar;
|
||||||
}
|
}
|
||||||
|
if (manager.isORMFacilitySelfManaged()) {
|
||||||
|
// No need to generated any ORM. Ignore any jar file given on
|
||||||
|
// command line also.
|
||||||
|
LOG.info("The connection manager declares that it self manages mapping"
|
||||||
|
+ " between records & fields and rows & columns. No class will"
|
||||||
|
+ " will be generated.");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
LOG.info("Beginning code generation");
|
LOG.info("Beginning code generation");
|
||||||
CompilationManager compileMgr = new CompilationManager(options);
|
CompilationManager compileMgr = new CompilationManager(options);
|
||||||
ClassWriter classWriter = new ClassWriter(options, manager, tableName,
|
ClassWriter classWriter = new ClassWriter(options, manager, tableName,
|
||||||
|
@ -38,6 +38,7 @@
|
|||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.cloudera.sqoop.SqoopOptions;
|
import com.cloudera.sqoop.SqoopOptions;
|
||||||
|
import com.cloudera.sqoop.TestConnFactory.DummyManager;
|
||||||
import com.cloudera.sqoop.manager.ConnManager;
|
import com.cloudera.sqoop.manager.ConnManager;
|
||||||
import com.cloudera.sqoop.testutil.DirUtil;
|
import com.cloudera.sqoop.testutil.DirUtil;
|
||||||
import com.cloudera.sqoop.testutil.HsqldbTestServer;
|
import com.cloudera.sqoop.testutil.HsqldbTestServer;
|
||||||
@ -485,4 +486,51 @@ public void testBrokenUserMapping() throws Exception {
|
|||||||
}
|
}
|
||||||
fail("we shouldn't successfully generate code");
|
fail("we shouldn't successfully generate code");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void runFailedGenerationTest(String [] argv,
|
||||||
|
String classNameToCheck) {
|
||||||
|
File codeGenDirFile = new File(CODE_GEN_DIR);
|
||||||
|
File classGenDirFile = new File(JAR_GEN_DIR);
|
||||||
|
|
||||||
|
try {
|
||||||
|
options = new ImportTool().parseArguments(argv,
|
||||||
|
null, options, true);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.error("Could not parse options: " + e.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
CompilationManager compileMgr = new CompilationManager(options);
|
||||||
|
ClassWriter writer = new ClassWriter(options, manager,
|
||||||
|
HsqldbTestServer.getTableName(), compileMgr);
|
||||||
|
|
||||||
|
try {
|
||||||
|
writer.generate();
|
||||||
|
compileMgr.compile();
|
||||||
|
fail("ORM class file generation succeeded when it was expected to fail");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
LOG.error("Got IOException from ORM generation as expected : "
|
||||||
|
+ ioe.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A dummy manager that declares that it ORM is self managed.
|
||||||
|
*/
|
||||||
|
public static class DummyDirectManager extends DummyManager {
|
||||||
|
@Override
|
||||||
|
public boolean isORMFacilitySelfManaged() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNoClassGeneration() throws Exception {
|
||||||
|
manager = new DummyDirectManager();
|
||||||
|
String [] argv = {
|
||||||
|
"--bindir",
|
||||||
|
JAR_GEN_DIR,
|
||||||
|
"--outdir",
|
||||||
|
CODE_GEN_DIR,
|
||||||
|
};
|
||||||
|
runFailedGenerationTest(argv, HsqldbTestServer.getTableName());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user