5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-02 23:21:22 +08:00

SQOOP-3014: Sqoop with HCatalog import loose precision for large numbers that does not fit into double

(Zoltan Toth via Boglarka Egyed)
This commit is contained in:
Boglarka Egyed 2017-09-12 16:12:56 +02:00
parent e13dd21209
commit f378328421
3 changed files with 111 additions and 3 deletions

View File

@ -149,6 +149,13 @@ public SqoopHCatImportHelper(Configuration conf) throws IOException,
LOG.debug("Static partition key used : " + partKeysString);
}
/* This construct is only for testing and avoiding static method
* usage
*/
SqoopHCatImportHelper() {
}
public HCatRecord convertToHCatRecord(SqoopRecord sqr) throws IOException,
InterruptedException {
try {
@ -441,13 +448,24 @@ private Object convertNumberTypes(Object val, HCatFieldSchema hfs) {
HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
return hChar;
} else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
BigDecimal bd = new BigDecimal(n.doubleValue(),
MathContext.DECIMAL128);
return HiveDecimal.create(bd);
return convertNumberIntoHiveDecimal(n);
}
return null;
}
HiveDecimal convertNumberIntoHiveDecimal(Number number) {
BigDecimal bigDecimal = null;
if(number instanceof BigDecimal) {
bigDecimal = (BigDecimal) number;
} else if(number instanceof Long) {
bigDecimal = BigDecimal.valueOf((Long)number);
} else if (number instanceof Double) {
bigDecimal = BigDecimal.valueOf((Double) number);
}
return HiveDecimal.create(bigDecimal);
}
public void cleanup() throws IOException {
if (null != lobLoader) {
lobLoader.close();

View File

@ -42,6 +42,7 @@
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hive.hcatalog.data.schema.HCatSchema;
@ -400,6 +401,36 @@ public void testFloatTypes() throws Exception {
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
@Test
public void testDecimalTypes() throws Exception{
final int TOTAL_RECORDS = 1 * 10;
String hcatTable = getTableName().toUpperCase();
boolean allowRoundig = false;
String dbTypeNumeric = "numeric";
String dbTypeDecimal = "decimal";
int sqlTypeNumeric = Types.NUMERIC;
int sqlTypeDecimal = Types.DECIMAL;
HCatFieldSchema.Type hcatTypeDecimal = HCatFieldSchema.Type.DECIMAL;
BigDecimal inputValue1 = new BigDecimal("454018528782.42006329");
HiveDecimal expectedValue1 = HiveDecimal.create(new BigDecimal("454018528782.42006"), allowRoundig);
BigDecimal inputValue2 = new BigDecimal("87658675864540185.123456789123456789");
HiveDecimal expectedValue2 = HiveDecimal.create(new BigDecimal("87658675864540185.12346"), allowRoundig);
int precision = 22;
int scale = 5;
ColumnGenerator[] hcatColumns = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), dbTypeNumeric, sqlTypeNumeric,
hcatTypeDecimal, precision, scale, expectedValue1, inputValue1, KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), dbTypeDecimal, sqlTypeDecimal,
hcatTypeDecimal, precision, scale, expectedValue2, inputValue2, KeyType.NOT_A_KEY)
};
List<String> addlArgsArray = new ArrayList<String>();
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, hcatTable, hcatColumns, null);
}
@Test
public void testNumberTypes() throws Exception {
final int TOTAL_RECORDS = 1 * 10;

View File

@ -0,0 +1,59 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.mapreduce.hcat;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.junit.Before;
import org.junit.Test;
import java.math.BigDecimal;
import static org.junit.Assert.assertEquals;
public class TestSqoopHCatImportHelper {
private SqoopHCatImportHelper importHelper;
@Before
public void init() {
importHelper = new SqoopHCatImportHelper();
}
@Test
public void convertLongNumberIntoBigDecimalWithoutRounding() {
Long input = new Long("20160523112914897");
HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input);
assertEquals(new BigDecimal("20160523112914897"), actual.bigDecimalValue());
}
@Test
public void convertDoubleNumberIntoBigDecimalWithoutRounding() {
Double input = new Double("0.12345678912345678");
HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input);
assertEquals(new BigDecimal("0.12345678912345678"), actual.bigDecimalValue());
}
@Test
public void keepBigDecimalNumberIfInputIsBigDecimal() {
BigDecimal input = new BigDecimal("87658675864540185.123456789123456789");
HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input);
assertEquals(new BigDecimal("87658675864540185.123456789123456789"), actual.bigDecimalValue());
}
}