mirror of
https://github.com/apache/sqoop.git
synced 2025-05-03 21:00:13 +08:00
SQOOP-1682: Test cases *LobAvroImportTest are failing
(Jarek Jarcec Cecho via Abraham Elmahrek)
This commit is contained in:
parent
69203028b2
commit
63d1fc2214
@ -48,8 +48,8 @@ public class LargeObjectLoader extends org.apache.sqoop.lib.LargeObjectLoader {
|
||||
* Create a new LargeObjectLoader.
|
||||
* @param conf the Configuration to use
|
||||
*/
|
||||
public LargeObjectLoader(Configuration conf)
|
||||
public LargeObjectLoader(Configuration conf, Path workPath)
|
||||
throws IOException {
|
||||
super(conf);
|
||||
super(conf, workPath);
|
||||
}
|
||||
}
|
||||
|
@ -70,10 +70,10 @@ public class LargeObjectLoader implements Closeable {
|
||||
* Create a new LargeObjectLoader.
|
||||
* @param conf the Configuration to use
|
||||
*/
|
||||
public LargeObjectLoader(Configuration conf)
|
||||
public LargeObjectLoader(Configuration conf, Path workPath)
|
||||
throws IOException {
|
||||
this.conf = conf;
|
||||
this.workPath = new Path(System.getProperty("java.io.tmpdir"), "SQOOP");
|
||||
this.workPath = workPath;
|
||||
this.fs = FileSystem.get(conf);
|
||||
this.curBlobWriter = null;
|
||||
this.curClobWriter = null;
|
||||
|
@ -51,7 +51,7 @@ protected void setup(Context context)
|
||||
throws IOException, InterruptedException {
|
||||
Configuration conf = context.getConfiguration();
|
||||
schema = AvroJob.getMapOutputSchema(conf);
|
||||
lobLoader = new LargeObjectLoader(conf);
|
||||
lobLoader = new LargeObjectLoader(conf, FileOutputFormat.getWorkOutputPath(context));
|
||||
bigDecimalFormatString = conf.getBoolean(
|
||||
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
|
||||
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
|
||||
|
@ -54,7 +54,7 @@ public class HBaseBulkImportMapper
|
||||
protected void setup(Context context)
|
||||
throws IOException, InterruptedException {
|
||||
this.conf = context.getConfiguration();
|
||||
this.lobLoader = new LargeObjectLoader(this.conf);
|
||||
this.lobLoader = new LargeObjectLoader(this.conf, new Path( this.conf.get("sqoop.hbase.lob.extern.dir", "/tmp/sqoop-hbase-" + context.getTaskAttemptID())));
|
||||
|
||||
// Get the implementation of PutTransformer to use.
|
||||
// By default, we call toString() on every non-null field.
|
||||
|
@ -24,8 +24,10 @@
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.generic.GenericRecord;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.sqoop.avro.AvroUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -50,7 +52,7 @@ protected void setup(Context context)
|
||||
bigDecimalFormatString = conf.getBoolean(
|
||||
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
|
||||
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
|
||||
lobLoader = new LargeObjectLoader(conf);
|
||||
lobLoader = new LargeObjectLoader(conf, new Path(conf.get("sqoop.kite.lob.extern.dir", "/tmp/sqoop-parquet-" + context.getTaskAttemptID())));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -38,7 +38,7 @@ public class SequenceFileImportMapper
|
||||
@Override
|
||||
protected void setup(Context context)
|
||||
throws IOException, InterruptedException {
|
||||
this.lobLoader = new LargeObjectLoader(context.getConfiguration());
|
||||
this.lobLoader = new LargeObjectLoader(context.getConfiguration(), FileOutputFormat.getWorkOutputPath(context));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -45,7 +45,7 @@ public TextImportMapper() {
|
||||
@Override
|
||||
protected void setup(Context context)
|
||||
throws IOException, InterruptedException {
|
||||
this.lobLoader = new LargeObjectLoader(context.getConfiguration());
|
||||
this.lobLoader = new LargeObjectLoader(context.getConfiguration(), FileOutputFormat.getWorkOutputPath(context));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -112,7 +112,7 @@ public SqoopHCatImportHelper(Configuration conf) throws IOException,
|
||||
hCatFullTableSchema.append(hfs);
|
||||
}
|
||||
fieldCount = hCatFullTableSchema.size();
|
||||
lobLoader = new LargeObjectLoader(conf);
|
||||
lobLoader = new LargeObjectLoader(conf, new Path(jobInfo.getTableInfo().getTableLocation()));
|
||||
bigDecimalFormatString = conf.getBoolean(
|
||||
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
|
||||
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
|
||||
|
@ -56,7 +56,7 @@ public void setUp() throws IOException, InterruptedException {
|
||||
}
|
||||
fs.mkdirs(outDir);
|
||||
|
||||
loader = new LargeObjectLoader(conf);
|
||||
loader = new LargeObjectLoader(conf, outDir);
|
||||
}
|
||||
|
||||
public void testReadClobRef()
|
||||
|
@ -207,8 +207,9 @@ public void testBlobAvroImportExternal() throws IOException, SQLException {
|
||||
String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
|
||||
+ data.length() + ")";
|
||||
|
||||
assertTrue(returnVal.startsWith(expectedStart));
|
||||
assertTrue(returnVal.endsWith(expectedEnd));
|
||||
assertNotNull(returnVal);
|
||||
assertTrue("ExpectedStart: " + expectedStart + ", value: " + returnVal, returnVal.startsWith(expectedStart));
|
||||
assertTrue("ExpectedEnd: " + expectedEnd + ", value: " + returnVal, returnVal.endsWith(expectedEnd));
|
||||
|
||||
// Verify that blob data stored in the external lob file is correct.
|
||||
BlobRef br = BlobRef.parse(returnVal);
|
||||
@ -295,8 +296,9 @@ public void testBlobCompressedAvroImportExternal()
|
||||
String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
|
||||
+ data.length() + ")";
|
||||
|
||||
assertTrue(returnVal.startsWith(expectedStart));
|
||||
assertTrue(returnVal.endsWith(expectedEnd));
|
||||
assertNotNull(returnVal);
|
||||
assertTrue("ExpectedStart: " + expectedStart + ", value: " + returnVal, returnVal.startsWith(expectedStart));
|
||||
assertTrue("ExpectedEnd: " + expectedEnd + ", value: " + returnVal, returnVal.endsWith(expectedEnd));
|
||||
|
||||
// Verify that blob data stored in the external lob file is correct.
|
||||
BlobRef br = BlobRef.parse(returnVal);
|
||||
|
Loading…
Reference in New Issue
Block a user