mirror of
https://github.com/apache/sqoop.git
synced 2025-05-03 04:42:22 +08:00
Show imported row count after job completion.
From: Aaron Kimball <aaron@cloudera.com> git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149867 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bb29ce9492
commit
b72a134b52
@ -36,6 +36,7 @@
|
||||
import org.apache.hadoop.mapreduce.JobContext;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
|
||||
@ -225,11 +226,15 @@ protected void runJob(Job job) throws ClassNotFoundException, IOException,
|
||||
PerfCounters counters = new PerfCounters();
|
||||
counters.startClock();
|
||||
|
||||
boolean success = job.waitForCompletion(false);
|
||||
boolean success = job.waitForCompletion(true);
|
||||
counters.stopClock();
|
||||
counters.addBytes(job.getCounters().getGroup("FileSystemCounters")
|
||||
.findCounter("HDFS_BYTES_WRITTEN").getValue());
|
||||
LOG.info("Transferred " + counters.toString());
|
||||
long numRecords = job.getCounters()
|
||||
.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getValue();
|
||||
LOG.info("Retrieved " + numRecords + " records.");
|
||||
|
||||
if (!success) {
|
||||
throw new ImportException("Import job failed!");
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user