mirror of
https://github.com/apache/sqoop.git
synced 2025-05-04 02:52:19 +08:00
SQOOP-205. Add getJob() to JobBase
(Garrett Wu via Aaron Kimball) From: Aaron Kimball <akimball83@gmail.com> git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1150032 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0efc5a4d55
commit
f9f45cf55d
@ -254,7 +254,6 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
|
|||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run an export job to dump a table from HDFS to a database. If a staging
|
* Run an export job to dump a table from HDFS to a database. If a staging
|
||||||
* table is specified and the connection manager supports staging of data,
|
* table is specified and the connection manager supports staging of data,
|
||||||
@ -333,7 +332,7 @@ public void runExport() throws ExportException, IOException {
|
|||||||
configureMapper(job, tableName, tableClassName);
|
configureMapper(job, tableName, tableClassName);
|
||||||
configureNumTasks(job);
|
configureNumTasks(job);
|
||||||
cacheJars(job, context.getConnManager());
|
cacheJars(job, context.getConnManager());
|
||||||
|
setJob(job);
|
||||||
boolean success = runJob(job);
|
boolean success = runJob(job);
|
||||||
if (!success) {
|
if (!success) {
|
||||||
throw new ExportException("Export job failed!");
|
throw new ExportException("Export job failed!");
|
||||||
|
@ -163,6 +163,7 @@ public void runImport(String tableName, String ormJarFile, String splitByCol,
|
|||||||
cacheJars(job, getContext().getConnManager());
|
cacheJars(job, getContext().getConnManager());
|
||||||
|
|
||||||
jobSetup(job);
|
jobSetup(job);
|
||||||
|
setJob(job);
|
||||||
boolean success = runJob(job);
|
boolean success = runJob(job);
|
||||||
if (!success) {
|
if (!success) {
|
||||||
throw new ImportException("Import job failed!");
|
throw new ImportException("Import job failed!");
|
||||||
|
@ -61,6 +61,8 @@ public class JobBase {
|
|||||||
protected Class<? extends InputFormat> inputFormatClass;
|
protected Class<? extends InputFormat> inputFormatClass;
|
||||||
protected Class<? extends OutputFormat> outputFormatClass;
|
protected Class<? extends OutputFormat> outputFormatClass;
|
||||||
|
|
||||||
|
private Job mrJob;
|
||||||
|
|
||||||
private ClassLoader prevClassLoader = null;
|
private ClassLoader prevClassLoader = null;
|
||||||
|
|
||||||
public JobBase() {
|
public JobBase() {
|
||||||
@ -292,6 +294,19 @@ protected int configureNumTasks(Job job) throws IOException {
|
|||||||
return numMapTasks;
|
return numMapTasks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Set the main job that will be run. */
|
||||||
|
protected void setJob(Job job) {
|
||||||
|
mrJob = job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the main MapReduce job that is being run, or null if no
|
||||||
|
* job has started.
|
||||||
|
*/
|
||||||
|
public Job getJob() {
|
||||||
|
return mrJob;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Actually run the MapReduce job.
|
* Actually run the MapReduce job.
|
||||||
*/
|
*/
|
||||||
|
@ -132,6 +132,7 @@ public boolean runMergeJob() throws IOException {
|
|||||||
|
|
||||||
// Make sure Sqoop and anything else we need is on the classpath.
|
// Make sure Sqoop and anything else we need is on the classpath.
|
||||||
cacheJars(job, null);
|
cacheJars(job, null);
|
||||||
|
setJob(job);
|
||||||
return this.runJob(job);
|
return this.runJob(job);
|
||||||
} catch (InterruptedException ie) {
|
} catch (InterruptedException ie) {
|
||||||
throw new IOException(ie);
|
throw new IOException(ie);
|
||||||
|
Loading…
Reference in New Issue
Block a user