mirror of
https://github.com/apache/sqoop.git
synced 2025-05-10 02:21:34 +08:00
SQOOP-1664: Sqoop2: Remove the unused SqoopFileOutputFormat class
(Veena Basavaraj via Jarek Jarcec Cecho)
This commit is contained in:
parent
bb52e54eea
commit
f43878bc75
@ -1,101 +0,0 @@
|
|||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.sqoop.job.mr;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.io.NullWritable;
|
|
||||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
|
||||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
|
||||||
import org.apache.hadoop.mapreduce.JobContext;
|
|
||||||
import org.apache.hadoop.mapreduce.JobStatus;
|
|
||||||
import org.apache.hadoop.mapreduce.OutputCommitter;
|
|
||||||
import org.apache.hadoop.mapreduce.RecordWriter;
|
|
||||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
|
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
|
||||||
import org.apache.log4j.Logger;
|
|
||||||
import org.apache.sqoop.common.Direction;
|
|
||||||
import org.apache.sqoop.job.MRJobConstants;
|
|
||||||
import org.apache.sqoop.job.io.SqoopWritable;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An output format for MapReduce job.
|
|
||||||
*/
|
|
||||||
public class SqoopFileOutputFormat
|
|
||||||
extends FileOutputFormat<SqoopWritable, NullWritable> {
|
|
||||||
|
|
||||||
public static final Logger LOG =
|
|
||||||
Logger.getLogger(SqoopFileOutputFormat.class);
|
|
||||||
|
|
||||||
public static final Class<? extends CompressionCodec> DEFAULT_CODEC =
|
|
||||||
DefaultCodec.class;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public RecordWriter<SqoopWritable, NullWritable> getRecordWriter(
|
|
||||||
TaskAttemptContext context) throws IOException {
|
|
||||||
Configuration conf = context.getConfiguration();
|
|
||||||
|
|
||||||
Path filepath = getDefaultWorkFile(context, "");
|
|
||||||
String filename = filepath.toString();
|
|
||||||
conf.set(MRJobConstants.JOB_MR_OUTPUT_FILE, filename);
|
|
||||||
|
|
||||||
boolean isCompressed = getCompressOutput(context);
|
|
||||||
if (isCompressed) {
|
|
||||||
String codecname =
|
|
||||||
conf.get(MRJobConstants.HADOOP_COMPRESS_CODEC, DEFAULT_CODEC.getName());
|
|
||||||
conf.set(MRJobConstants.JOB_MR_OUTPUT_CODEC, codecname);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new SqoopOutputFormatLoadExecutor(context).getRecordWriter();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException {
|
|
||||||
Path output = getOutputPath(context);
|
|
||||||
return new DestroyerFileOutputCommitter(output, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
public class DestroyerFileOutputCommitter extends FileOutputCommitter {
|
|
||||||
|
|
||||||
public DestroyerFileOutputCommitter(Path outputPath, TaskAttemptContext context) throws IOException {
|
|
||||||
super(outputPath, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void commitJob(JobContext context) throws IOException {
|
|
||||||
super.commitJob(context);
|
|
||||||
|
|
||||||
Configuration config = context.getConfiguration();
|
|
||||||
SqoopDestroyerExecutor.executeDestroyer(true, config, Direction.FROM);
|
|
||||||
SqoopDestroyerExecutor.executeDestroyer(true, config, Direction.TO);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void abortJob(JobContext context, JobStatus.State state) throws IOException {
|
|
||||||
super.abortJob(context, state);
|
|
||||||
|
|
||||||
Configuration config = context.getConfiguration();
|
|
||||||
SqoopDestroyerExecutor.executeDestroyer(false, config, Direction.FROM);
|
|
||||||
SqoopDestroyerExecutor.executeDestroyer(false, config, Direction.TO);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -26,12 +26,7 @@
|
|||||||
import org.apache.hadoop.mapreduce.Mapper;
|
import org.apache.hadoop.mapreduce.Mapper;
|
||||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||||
import org.apache.sqoop.job.io.SqoopWritable;
|
import org.apache.sqoop.job.io.SqoopWritable;
|
||||||
import org.apache.sqoop.job.mr.SqoopFileOutputFormat;
|
|
||||||
import org.apache.sqoop.job.mr.SqoopInputFormat;
|
|
||||||
import org.apache.sqoop.job.mr.SqoopMapper;
|
|
||||||
import org.apache.sqoop.job.mr.SqoopNullOutputFormat;
|
|
||||||
import org.apache.sqoop.job.mr.SqoopSplit;
|
import org.apache.sqoop.job.mr.SqoopSplit;
|
||||||
import org.junit.Assert;
|
|
||||||
|
|
||||||
public class JobUtils {
|
public class JobUtils {
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user