From b72a134b52e8638da9ecdd914de4b276d16e37ec Mon Sep 17 00:00:00 2001 From: Andrew Bayer Date: Fri, 22 Jul 2011 20:03:36 +0000 Subject: [PATCH] Show imported row count after job completion. From: Aaron Kimball git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149867 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java b/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java index 9eeedb6d..0aabad6c 100644 --- a/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java +++ b/src/java/org/apache/hadoop/sqoop/mapreduce/ImportJobBase.java @@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; +import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.db.DBConfiguration; @@ -225,11 +226,15 @@ protected void runJob(Job job) throws ClassNotFoundException, IOException, PerfCounters counters = new PerfCounters(); counters.startClock(); - boolean success = job.waitForCompletion(false); + boolean success = job.waitForCompletion(true); counters.stopClock(); counters.addBytes(job.getCounters().getGroup("FileSystemCounters") .findCounter("HDFS_BYTES_WRITTEN").getValue()); LOG.info("Transferred " + counters.toString()); + long numRecords = job.getCounters() + .findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getValue(); + LOG.info("Retrieved " + numRecords + " records."); + if (!success) { throw new ImportException("Import job failed!"); }