5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 09:40:35 +08:00

SQOOP-1682: Test cases *LobAvroImportTest are failing

(Jarek Jarcec Cecho via Abraham Elmahrek)
This commit is contained in:
Abraham Elmahrek 2014-11-05 16:12:06 -08:00
parent 69203028b2
commit 63d1fc2214
10 changed files with 19 additions and 15 deletions

View File

@ -48,8 +48,8 @@ public class LargeObjectLoader extends org.apache.sqoop.lib.LargeObjectLoader {
* Create a new LargeObjectLoader. * Create a new LargeObjectLoader.
* @param conf the Configuration to use * @param conf the Configuration to use
*/ */
public LargeObjectLoader(Configuration conf) public LargeObjectLoader(Configuration conf, Path workPath)
throws IOException { throws IOException {
super(conf); super(conf, workPath);
} }
} }

View File

@ -70,10 +70,10 @@ public class LargeObjectLoader implements Closeable {
* Create a new LargeObjectLoader. * Create a new LargeObjectLoader.
* @param conf the Configuration to use * @param conf the Configuration to use
*/ */
public LargeObjectLoader(Configuration conf) public LargeObjectLoader(Configuration conf, Path workPath)
throws IOException { throws IOException {
this.conf = conf; this.conf = conf;
this.workPath = new Path(System.getProperty("java.io.tmpdir"), "SQOOP"); this.workPath = workPath;
this.fs = FileSystem.get(conf); this.fs = FileSystem.get(conf);
this.curBlobWriter = null; this.curBlobWriter = null;
this.curClobWriter = null; this.curClobWriter = null;

View File

@ -51,7 +51,7 @@ protected void setup(Context context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
Configuration conf = context.getConfiguration(); Configuration conf = context.getConfiguration();
schema = AvroJob.getMapOutputSchema(conf); schema = AvroJob.getMapOutputSchema(conf);
lobLoader = new LargeObjectLoader(conf); lobLoader = new LargeObjectLoader(conf, FileOutputFormat.getWorkOutputPath(context));
bigDecimalFormatString = conf.getBoolean( bigDecimalFormatString = conf.getBoolean(
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT, ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT); ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);

View File

@ -54,7 +54,7 @@ public class HBaseBulkImportMapper
protected void setup(Context context) protected void setup(Context context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
this.conf = context.getConfiguration(); this.conf = context.getConfiguration();
this.lobLoader = new LargeObjectLoader(this.conf); this.lobLoader = new LargeObjectLoader(this.conf, new Path( this.conf.get("sqoop.hbase.lob.extern.dir", "/tmp/sqoop-hbase-" + context.getTaskAttemptID())));
// Get the implementation of PutTransformer to use. // Get the implementation of PutTransformer to use.
// By default, we call toString() on every non-null field. // By default, we call toString() on every non-null field.

View File

@ -24,8 +24,10 @@
import org.apache.avro.Schema; import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord; import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.sqoop.avro.AvroUtil; import org.apache.sqoop.avro.AvroUtil;
import java.io.IOException; import java.io.IOException;
@ -50,7 +52,7 @@ protected void setup(Context context)
bigDecimalFormatString = conf.getBoolean( bigDecimalFormatString = conf.getBoolean(
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT, ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT); ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
lobLoader = new LargeObjectLoader(conf); lobLoader = new LargeObjectLoader(conf, new Path(conf.get("sqoop.kite.lob.extern.dir", "/tmp/sqoop-parquet-" + context.getTaskAttemptID())));
} }
@Override @Override

View File

@ -38,7 +38,7 @@ public class SequenceFileImportMapper
@Override @Override
protected void setup(Context context) protected void setup(Context context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
this.lobLoader = new LargeObjectLoader(context.getConfiguration()); this.lobLoader = new LargeObjectLoader(context.getConfiguration(), FileOutputFormat.getWorkOutputPath(context));
} }
@Override @Override

View File

@ -45,7 +45,7 @@ public TextImportMapper() {
@Override @Override
protected void setup(Context context) protected void setup(Context context)
throws IOException, InterruptedException { throws IOException, InterruptedException {
this.lobLoader = new LargeObjectLoader(context.getConfiguration()); this.lobLoader = new LargeObjectLoader(context.getConfiguration(), FileOutputFormat.getWorkOutputPath(context));
} }
@Override @Override

View File

@ -112,7 +112,7 @@ public SqoopHCatImportHelper(Configuration conf) throws IOException,
hCatFullTableSchema.append(hfs); hCatFullTableSchema.append(hfs);
} }
fieldCount = hCatFullTableSchema.size(); fieldCount = hCatFullTableSchema.size();
lobLoader = new LargeObjectLoader(conf); lobLoader = new LargeObjectLoader(conf, new Path(jobInfo.getTableInfo().getTableLocation()));
bigDecimalFormatString = conf.getBoolean( bigDecimalFormatString = conf.getBoolean(
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT, ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT); ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);

View File

@ -56,7 +56,7 @@ public void setUp() throws IOException, InterruptedException {
} }
fs.mkdirs(outDir); fs.mkdirs(outDir);
loader = new LargeObjectLoader(conf); loader = new LargeObjectLoader(conf, outDir);
} }
public void testReadClobRef() public void testReadClobRef()

View File

@ -207,8 +207,9 @@ public void testBlobAvroImportExternal() throws IOException, SQLException {
String expectedEnd = getTableNum() + "_m_0000000.lob,68," String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
+ data.length() + ")"; + data.length() + ")";
assertTrue(returnVal.startsWith(expectedStart)); assertNotNull(returnVal);
assertTrue(returnVal.endsWith(expectedEnd)); assertTrue("ExpectedStart: " + expectedStart + ", value: " + returnVal, returnVal.startsWith(expectedStart));
assertTrue("ExpectedEnd: " + expectedEnd + ", value: " + returnVal, returnVal.endsWith(expectedEnd));
// Verify that blob data stored in the external lob file is correct. // Verify that blob data stored in the external lob file is correct.
BlobRef br = BlobRef.parse(returnVal); BlobRef br = BlobRef.parse(returnVal);
@ -295,8 +296,9 @@ public void testBlobCompressedAvroImportExternal()
String expectedEnd = getTableNum() + "_m_0000000.lob,68," String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
+ data.length() + ")"; + data.length() + ")";
assertTrue(returnVal.startsWith(expectedStart)); assertNotNull(returnVal);
assertTrue(returnVal.endsWith(expectedEnd)); assertTrue("ExpectedStart: " + expectedStart + ", value: " + returnVal, returnVal.startsWith(expectedStart));
assertTrue("ExpectedEnd: " + expectedEnd + ", value: " + returnVal, returnVal.endsWith(expectedEnd));
// Verify that blob data stored in the external lob file is correct. // Verify that blob data stored in the external lob file is correct.
BlobRef br = BlobRef.parse(returnVal); BlobRef br = BlobRef.parse(returnVal);