5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-21 11:21:39 +08:00

SQOOP-436 Enable verbose logging for MapReduce jobs

git-svn-id: https://svn.apache.org/repos/asf/sqoop/trunk@1351503 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Bilung Lee 2012-06-18 21:49:42 +00:00
parent 78d107f171
commit cf8602eb77
12 changed files with 100 additions and 12 deletions

View File

@ -38,6 +38,7 @@
import com.cloudera.sqoop.tool.SqoopTool;
import com.cloudera.sqoop.util.RandomHash;
import com.cloudera.sqoop.util.StoredAsProperty;
import org.apache.sqoop.util.LoggingUtils;
/**
* Configurable state used by Sqoop tools.
@ -85,6 +86,7 @@ public String toString() {
// arguments in the appropriate tools. The names of all command-line args
// are stored as constants in BaseSqoopTool.
@StoredAsProperty("verbose") private boolean verbose;
@StoredAsProperty("db.connect.string") private String connectString;
@StoredAsProperty("db.table") private String tableName;
private String [] columns; // Array stored as db.column.list.
@ -560,6 +562,11 @@ public void loadProperties(Properties props) {
// Delimiters were previously memoized; don't let the tool override
// them with defaults.
this.areDelimsManuallySet = true;
// If we loaded true verbose flag, we need to apply it
if (this.verbose) {
LoggingUtils.setDebugLevel();
}
}
/**
@ -806,6 +813,9 @@ private void initDefaults(Configuration baseConfiguration) {
// Creating instances for user specific mapping
this.mapColumnHive = new Properties();
this.mapColumnJava = new Properties();
// We do not want to be verbose too much if not explicitly needed
this.verbose = false;
}
/**
@ -893,6 +903,14 @@ public static char toChar(String charish) throws InvalidOptionsException {
}
}
public boolean getVerbose() {
return verbose;
}
public void setVerbose(boolean beVerbose) {
this.verbose = beVerbose;
}
/**
* Get the temporary directory; guaranteed to end in File.separator
* (e.g., '/').

View File

@ -22,13 +22,12 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Mapper;
/**
* Identity mapper that continuously reports progress via a background thread.
*/
public class AutoProgressMapper<KEYIN, VALIN, KEYOUT, VALOUT>
extends Mapper<KEYIN, VALIN, KEYOUT, VALOUT> {
extends SqoopMapper<KEYIN, VALIN, KEYOUT, VALOUT> {
public static final Log LOG = LogFactory.getLog(
AutoProgressMapper.class.getName());

View File

@ -351,6 +351,7 @@ public void runExport() throws ExportException, IOException {
// Set the external jar to use for the job.
job.getConfiguration().set("mapred.jar", ormJarFile);
propagateOptionsToJob(job);
configureInputFormat(job, tableName, tableClassName, null);
configureOutputFormat(job, tableName, tableClassName);
configureMapper(job, tableName, tableClassName);

View File

@ -190,6 +190,7 @@ public void runImport(String tableName, String ormJarFile, String splitByCol,
// Set the external jar to use for the job.
job.getConfiguration().set("mapred.jar", ormJarFile);
propagateOptionsToJob(job);
configureInputFormat(job, tableName, tableClassName, splitByCol);
configureOutputFormat(job, tableName, tableClassName);
configureMapper(job, tableName, tableClassName);

View File

@ -57,6 +57,8 @@ public class JobBase {
private ClassLoader prevClassLoader = null;
public static final String PROPERTY_VERBOSE = "sqoop.verbose";
public JobBase() {
this(null);
}
@ -322,4 +324,18 @@ protected void displayRetiredJobNotice(Log log) {
log.info("A jobtracker restart is required for these settings");
log.info("to take effect.");
}
/**
* Save interesting options to constructed job. Goal here is to propagate some
* of them to the job itself, so that they can be easily accessed. We're
* propagating only interesting global options (like verbose flag).
*
* @param job Destination job to save options
*/
protected void propagateOptionsToJob(Job job) {
Configuration configuration = job.getConfiguration();
// So far, propagate only verbose flag
configuration.setBoolean(PROPERTY_VERBOSE, options.getVerbose());
}
}

View File

@ -102,6 +102,8 @@ public boolean runMergeJob() throws IOException {
oldPath = oldPath.makeQualified(fs);
newPath = newPath.makeQualified(fs);
propagateOptionsToJob(job);
FileInputFormat.addInputPath(job, oldPath);
FileInputFormat.addInputPath(job, newPath);

View File

@ -30,7 +30,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.sqoop.util.AsyncSink;
import org.apache.sqoop.util.JdbcUrl;
import org.apache.sqoop.util.PerfCounters;
@ -46,7 +45,7 @@
* Mapper that opens up a pipe to mysqldump and pulls data directly.
*/
public class MySQLDumpMapper
extends Mapper<String, NullWritable, String, NullWritable> {
extends SqoopMapper<String, NullWritable, String, NullWritable> {
public static final Log LOG = LogFactory.getLog(
MySQLDumpMapper.class.getName());
@ -496,7 +495,9 @@ public void map(String splitConditions, NullWritable val, Context context)
// CHECKSTYLE:ON
@Override
protected void setup(Context context) {
protected void setup(Context context)
throws IOException, InterruptedException {
super.setup(context);
this.conf = context.getConfiguration();
}
}

View File

@ -28,7 +28,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.sqoop.util.AsyncSink;
import org.apache.sqoop.util.JdbcUrl;
import org.apache.sqoop.util.LoggingAsyncSink;
@ -48,7 +47,7 @@
* used to interface with mysqlimport.
*/
public class MySQLExportMapper<KEYIN, VALIN>
extends Mapper<KEYIN, VALIN, NullWritable, NullWritable> {
extends SqoopMapper<KEYIN, VALIN, NullWritable, NullWritable> {
public static final Log LOG = LogFactory.getLog(
MySQLExportMapper.class.getName());

View File

@ -22,7 +22,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import com.cloudera.sqoop.manager.MySQLUtils;
import com.cloudera.sqoop.mapreduce.MySQLExportMapper;;
import com.cloudera.sqoop.mapreduce.MySQLExportMapper;
/**
* mysqlimport-based exporter which accepts lines of text from files

View File

@ -0,0 +1,46 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.mapreduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.sqoop.util.LoggingUtils;
import java.io.IOException;
/**
* Base sqoop mapper class that is convenient place for common functionality.
* Other specific mappers are highly encouraged to inherit from this class.
*/
public abstract class SqoopMapper<KI, VI, KO, VO>
extends Mapper<KI, VI, KO, VO> {
@Override
protected void setup(Context context)
throws IOException, InterruptedException {
super.setup(context);
Configuration configuration = context.getConfiguration();
// Propagate verbose flag if needed
if (configuration.getBoolean(JobBase.PROPERTY_VERBOSE, false)) {
LoggingUtils.setDebugLevel();
}
}
}

View File

@ -33,8 +33,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import com.cloudera.sqoop.ConnFactory;
import com.cloudera.sqoop.Sqoop;
@ -45,6 +43,7 @@
import com.cloudera.sqoop.lib.DelimiterSet;
import com.cloudera.sqoop.manager.ConnManager;
import com.cloudera.sqoop.metastore.JobData;
import org.apache.sqoop.util.LoggingUtils;
/**
* Layer on top of SqoopTool that provides some basic common code
@ -630,8 +629,8 @@ protected void applyCommonOptions(CommandLine in, SqoopOptions out)
// common options.
if (in.hasOption(VERBOSE_ARG)) {
// Immediately switch into DEBUG logging.
Logger.getLogger("org.apache.sqoop").setLevel(Level.DEBUG);
Logger.getLogger("com.cloudera.apache").setLevel(Level.DEBUG);
out.setVerbose(true);
LoggingUtils.setDebugLevel();
LOG.debug("Enabled debug logging.");
}

View File

@ -21,6 +21,8 @@
import java.sql.SQLException;
import org.apache.commons.logging.Log;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
/**
* A helper class for logging.
@ -44,5 +46,9 @@ public static void logAll(Log log, SQLException e) {
}
}
public static void setDebugLevel() {
Logger.getLogger("org.apache.sqoop").setLevel(Level.DEBUG);
Logger.getLogger("com.cloudera.apache").setLevel(Level.DEBUG);
}
}