From 5dd8c8aad1c7732754fae190eb5424371ed6fef4 Mon Sep 17 00:00:00 2001 From: Szabolcs Vasas Date: Wed, 17 Oct 2018 10:16:40 +0200 Subject: [PATCH] SQOOP-3381: Upgrade the Parquet library from 1.6.0 to 1.9.0 (Fero Szabo via Szabolcs Vasas) --- build.gradle | 26 +++-- gradle.properties | 14 +-- gradle/sqoop-package.gradle | 2 +- ivy.xml | 26 +++-- ivy/libraries.properties | 14 ++- src/java/org/apache/sqoop/avro/AvroUtil.java | 10 +- .../org/apache/sqoop/hive/HiveImport.java | 6 ++ .../sqoop/mapreduce/hcat/DerbyPolicy.java | 96 +++++++++++++++++++ .../mapreduce/hcat/SqoopHCatUtilities.java | 4 + .../HadoopParquetExportJobConfigurator.java | 2 +- .../HadoopParquetImportJobConfigurator.java | 6 +- .../HadoopParquetMergeJobConfigurator.java | 2 +- .../org/apache/sqoop/TestParquetExport.java | 8 +- .../org/apache/sqoop/TestParquetImport.java | 5 +- .../TestParquetIncrementalImportMerge.java | 4 +- .../hive/TestHiveServer2ParquetImport.java | 2 +- .../hive/minicluster/HiveMiniCluster.java | 2 + .../org/apache/sqoop/util/ParquetReader.java | 16 ++-- testdata/hcatalog/conf/hive-site.xml | 8 ++ 19 files changed, 199 insertions(+), 54 deletions(-) create mode 100644 src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java diff --git a/build.gradle b/build.gradle index fc7fc0c4..7a0712e3 100644 --- a/build.gradle +++ b/build.gradle @@ -81,6 +81,7 @@ configurations.all { force group: 'org.apache.avro', name: 'avro', version: avroVersion force group: 'org.apache.avro', name: 'avro-mapred', version: avroVersion force group: 'com.google.guava', name: 'guava', version: guavaVersion + force group: 'com.google.protobuf', name: 'protobuf-java', version: hiveProtobufVersion } exclude group: 'org.apache.hadoop', module: 'avro' } @@ -102,33 +103,40 @@ dependencies { common group: 'org.apache.accumulo', name: 'accumulo-core', version: accumuloVersion common group: 'org.apache.accumulo', name: 'accumulo-minicluster', version: accumuloVersion + + common group: 'org.eclipse.jetty', name: 'jetty-runner', version: jettyVersion + common group: 'org.apache.hbase', name: 'hbase-hadoop2-compat', version: hbaseVersion common group: 'org.apache.hbase', name: 'hbase-server', version: hbaseVersion common group: 'org.apache.hbase', name: 'hbase-client', version: hbaseVersion common group: 'org.apache.hbase', name: 'hbase-common', version: hbaseVersion - common (group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: hcatalogVersion) { - exclude group: 'org.apache.avro', module: 'avro' - exclude group: 'org.apache.hive', module: 'hive-exec' + common(group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: hcatalogVersion) { + exclude group: 'org.apache.avro', module: 'avro' + exclude group: 'org.apache.hive', module: 'hive-exec' + } + common(group: 'org.apache.hive', name: 'hive-exec', version: hcatalogVersion, classifier: 'core') { + exclude group : 'org.apache.calcite', module :'calcite-core' } - common group: 'org.apache.hive', name: 'hive-exec', version: hcatalogVersion, classifier: 'core' // Kryo and calcite are dependencies of hive-exec:core - common group: 'com.esotericsoftware.kryo', name: 'kryo', version: kryoVersion + common group: 'com.esotericsoftware', name: 'kryo', version: kryoVersion common group: 'org.apache.calcite', name: 'calcite-core', version: calciteVersion - common (group: 'org.apache.hive', name: 'hive-jdbc', version: hcatalogVersion) { - exclude group: 'org.apache.avro', module: 'avro' + common(group: 'org.apache.hive', name: 'hive-jdbc', version: hcatalogVersion) { + exclude group: 'org.apache.avro', module: 'avro' + exclude group: 'asm', module: 'asm' } common group: 'commons-cli', name: 'commons-cli', version: commonscliVersion common group: 'commons-logging', name: 'commons-logging', version: commonsloggingVersion common group: 'commons-net', name: 'commons-net', version: commonsnetVersion common group: 'log4j', name: 'log4j', version: log4jVersion common group: 'org.postgresql', name: 'postgresql', version: postgresqlVersion + common group: 'org.apache.parquet', name: 'parquet-hadoop-bundle', version: parquetVersion testCompile group: 'com.h2database', name: 'h2', version: h2Version testCompile group: 'org.apache.hbase', name: 'hbase-server', version: hbaseVersion, classifier: 'tests' testCompile group: 'org.apache.hbase', name: 'hbase-hadoop2-compat', version: hbaseVersion, classifier: 'tests' testCompile group: 'org.apache.hbase', name: 'hbase-hadoop-compat', version: hbaseVersion, classifier: 'tests' - testCompile( group: 'org.apache.hadoop', name: 'hadoop-minikdc', version: hadoopVersion) { - exclude group: 'org.apache.directory.api', module: 'api-ldap-schema-data' + testCompile (group: 'org.apache.hadoop', name: 'hadoop-minikdc', version: hadoopVersion) { + exclude group: 'org.apache.directory.api', module: 'api-ldap-schema-data' } testCompile group: 'junit', name: 'junit', version: junitVersion testCompile group: 'org.assertj', name: 'assertj-core', version: assertjVersion diff --git a/gradle.properties b/gradle.properties index 0d30378d..4808ec7d 100644 --- a/gradle.properties +++ b/gradle.properties @@ -20,14 +20,16 @@ javaSourceCompatibilityVersion=1.8 avroVersion=1.8.1 -parquetVersion=1.6.0 +parquetVersion=1.9.0 hadoopVersion=2.8.0 aspectjVersion=1.7.4 zookeeperVersion=3.4.6 hbaseVersion=1.2.4 -hcatalogVersion=1.2.1 -kryoVersion=2.22 -calciteVersion=1.2.0-incubating +hcatalogVersion=2.1.1 +kryoVersion=3.0.3 +calciteVersion=1.6.0 +# Hive 2.1.1 transitively depends on protobuff 2.5.0 +hiveProtobufVersion=2.5.0 guavaVersion=14.0.1 accumuloVersion=1.6.2 @@ -54,7 +56,7 @@ version=1.5.0-SNAPSHOT postgresqlVersion=9.2-1003-jdbc4 +jettyVersion=9.3.20.v20170531 + oldHash=b0f391e75154be86f95378ab141f6dd1b3b59475 oldVersion=1.4.7 - -org.gradle.daemon=true diff --git a/gradle/sqoop-package.gradle b/gradle/sqoop-package.gradle index 1a8d994d..c7465e9d 100644 --- a/gradle/sqoop-package.gradle +++ b/gradle/sqoop-package.gradle @@ -39,7 +39,7 @@ dependencies { } redist group: 'hsqldb', name: 'hsqldb', version: hsqldbVersion redist group: 'org.apache.commons', name: 'commons-lang3', version: commonslang3Version - redist group: 'com.twitter', name: 'parquet-avro', version: parquetVersion + redist group: 'org.apache.parquet', name: 'parquet-avro', version: parquetVersion } //Jar tasks diff --git a/ivy.xml b/ivy.xml index 670cb32d..91157ca7 100644 --- a/ivy.xml +++ b/ivy.xml @@ -61,7 +61,7 @@ under the License. - + @@ -116,7 +116,7 @@ under the License. conf="common->default;redist->default"/> - + @@ -135,7 +135,6 @@ under the License. - @@ -143,7 +142,6 @@ under the License. conf="common->default"> - @@ -200,19 +198,33 @@ under the License. - + + + + + + + + - - + + + + + + + + + diff --git a/ivy/libraries.properties b/ivy/libraries.properties index 8f3dab2b..2ca95ee9 100644 --- a/ivy/libraries.properties +++ b/ivy/libraries.properties @@ -19,6 +19,8 @@ # It drives ivy and the generation of a maven POM avro.version=1.8.1 +parquet.version=1.9.0 + checkstyle.version=5.0 @@ -43,6 +45,7 @@ mockito-all.version=1.9.5 h2.version=1.3.170 log4j.version=1.2.16 +log4j-2.version=2.8.2 mvn.version=2.0.10 @@ -57,9 +60,12 @@ slf4j.version=1.7.7 hadoop.version=2.8.0 hbase.version=1.2.4 -hcatalog.version=1.2.1 -kryo.version=2.22 -calcite.version=1.2.0-incubating +hcatalog.version=2.1.1 +kryo.version=3.0.3 +calcite.version=1.6.0 +hive.protobuf.version=2.5.0 + +jetty.version=9.3.20.v20170531 +jersey.version=1.19.4 jackson-databind.version=2.9.5 -parquet.version=1.6.0 diff --git a/src/java/org/apache/sqoop/avro/AvroUtil.java b/src/java/org/apache/sqoop/avro/AvroUtil.java index 1663b1d1..428920ef 100644 --- a/src/java/org/apache/sqoop/avro/AvroUtil.java +++ b/src/java/org/apache/sqoop/avro/AvroUtil.java @@ -40,11 +40,11 @@ import org.apache.sqoop.lib.BlobRef; import org.apache.sqoop.lib.ClobRef; import org.apache.sqoop.orm.ClassWriter; -import parquet.avro.AvroSchemaConverter; -import parquet.format.converter.ParquetMetadataConverter; -import parquet.hadoop.ParquetFileReader; -import parquet.hadoop.metadata.ParquetMetadata; -import parquet.schema.MessageType; +import org.apache.parquet.avro.AvroSchemaConverter; +import org.apache.parquet.format.converter.ParquetMetadataConverter; +import org.apache.parquet.hadoop.ParquetFileReader; +import org.apache.parquet.hadoop.metadata.ParquetMetadata; +import org.apache.parquet.schema.MessageType; import java.io.IOException; import java.math.BigDecimal; diff --git a/src/java/org/apache/sqoop/hive/HiveImport.java b/src/java/org/apache/sqoop/hive/HiveImport.java index 48800366..b2af5c78 100644 --- a/src/java/org/apache/sqoop/hive/HiveImport.java +++ b/src/java/org/apache/sqoop/hive/HiveImport.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.security.Policy; import java.util.Arrays; import java.util.LinkedList; import java.util.List; @@ -34,6 +35,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Shell; +import org.apache.sqoop.mapreduce.hcat.DerbyPolicy; import org.apache.sqoop.util.Executor; import org.apache.sqoop.util.LoggingAsyncSink; import org.apache.sqoop.util.SubprocessSecurityManager; @@ -239,6 +241,7 @@ public void importTable(String inputTableName, String outputTableName, private void executeScript(String filename, List env) throws IOException { SubprocessSecurityManager subprocessSM = null; + Policy originalPolicy = Policy.getPolicy(); if (testMode) { // We use external mock hive process for test mode as @@ -263,6 +266,8 @@ private void executeScript(String filename, List env) subprocessSM = new SubprocessSecurityManager(); subprocessSM.install(); + Policy.setPolicy(new DerbyPolicy()); + String[] argv = getHiveArgs("-f", filename); // And invoke the static method on this array. @@ -300,6 +305,7 @@ private void executeScript(String filename, List env) if (null != subprocessSM) { // Uninstall the SecurityManager used to trap System.exit(). subprocessSM.uninstall(); + Policy.setPolicy(originalPolicy); } } } diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java b/src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java new file mode 100644 index 00000000..ce1ac65c --- /dev/null +++ b/src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.mapreduce.hcat; + +import org.apache.derby.security.SystemPermission; + +import java.security.CodeSource; +import java.security.Permission; +import java.security.PermissionCollection; +import java.security.Policy; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.List; + +/** + * + * Initially copied from Hive. + * + * A security policy that grants usederbyinternals + * + *

+ * HCatalog tests use Security Manager to handle exits. With Derby version 10.14.1, if a + * security manager is configured, embedded Derby requires usederbyinternals permission, and + * that is checked directly using AccessController.checkPermission. This class will be used to + * setup a security policy to grant usederbyinternals, in tests that use NoExitSecurityManager. + *

+ */ +public class DerbyPolicy extends Policy { + + private static PermissionCollection perms; + + public DerbyPolicy() { + super(); + if (perms == null) { + perms = new DerbyPermissionCollection(); + addPermissions(); + } + } + + @Override + public PermissionCollection getPermissions(CodeSource codesource) { + return perms; + } + + private void addPermissions() { + SystemPermission systemPermission = new SystemPermission("engine", "usederbyinternals"); + perms.add(systemPermission); + } + + class DerbyPermissionCollection extends PermissionCollection { + + List perms = new ArrayList<>(); + + @Override + public void add(Permission p) { + perms.add(p); + } + + @Override + public boolean implies(Permission p) { + for (Permission perm : perms) { + if (perm.implies(p)) { + return true; + } + } + return false; + } + + @Override + public Enumeration elements() { + return Collections.enumeration(perms); + } + + @Override + public boolean isReadOnly() { + return false; + } + } +} \ No newline at end of file diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java index 784b5f2a..234b7a87 100644 --- a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java +++ b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java @@ -25,6 +25,7 @@ import java.io.OutputStreamWriter; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.security.Policy; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; @@ -1199,12 +1200,14 @@ public void executeExternalHCatProgram(List env, String[] cmdLine) void executeHCatProgramInProcess(String[] argv) throws IOException { SubprocessSecurityManager subprocessSM = null; + Policy originalPolicy = Policy.getPolicy(); final ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader(); try { Class cliDriverClass = Class.forName(HCAT_CLI_MAIN_CLASS); subprocessSM = new SubprocessSecurityManager(); subprocessSM.install(); + Policy.setPolicy(new DerbyPolicy()); Method mainMethod = cliDriverClass.getMethod("main", argv.getClass()); mainMethod.invoke(null, (Object) argv); } catch (ClassNotFoundException cnfe) { @@ -1230,6 +1233,7 @@ void executeHCatProgramInProcess(String[] argv) throws IOException { if (null != subprocessSM) { subprocessSM.uninstall(); } + Policy.setPolicy(originalPolicy); Thread.currentThread().setContextClassLoader(originalClassLoader); } } diff --git a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java index 2180cc20..1fd3d057 100644 --- a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java +++ b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java @@ -23,7 +23,7 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.sqoop.mapreduce.parquet.ParquetExportJobConfigurator; -import parquet.avro.AvroParquetInputFormat; +import org.apache.parquet.avro.AvroParquetInputFormat; import java.io.IOException; diff --git a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java index 90b910a3..e8215430 100644 --- a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java +++ b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java @@ -27,9 +27,9 @@ import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.sqoop.SqoopOptions; import org.apache.sqoop.mapreduce.parquet.ParquetImportJobConfigurator; -import parquet.avro.AvroParquetOutputFormat; -import parquet.hadoop.ParquetOutputFormat; -import parquet.hadoop.metadata.CompressionCodecName; +import org.apache.parquet.avro.AvroParquetOutputFormat; +import org.apache.parquet.hadoop.ParquetOutputFormat; +import org.apache.parquet.hadoop.metadata.CompressionCodecName; import java.io.IOException; diff --git a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java index 66ebc5b8..d3be2283 100644 --- a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java +++ b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java @@ -30,7 +30,7 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.sqoop.mapreduce.MergeParquetMapper; import org.apache.sqoop.mapreduce.parquet.ParquetMergeJobConfigurator; -import parquet.avro.AvroParquetInputFormat; +import org.apache.parquet.avro.AvroParquetInputFormat; import java.io.IOException; diff --git a/src/test/org/apache/sqoop/TestParquetExport.java b/src/test/org/apache/sqoop/TestParquetExport.java index be1d8164..662111e2 100644 --- a/src/test/org/apache/sqoop/TestParquetExport.java +++ b/src/test/org/apache/sqoop/TestParquetExport.java @@ -29,7 +29,7 @@ import org.junit.Test; import org.junit.rules.ExpectedException; -import parquet.avro.AvroParquetWriter; +import org.apache.parquet.avro.AvroParquetWriter; import java.io.IOException; import java.nio.ByteBuffer; @@ -44,9 +44,9 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -import static parquet.hadoop.ParquetWriter.DEFAULT_BLOCK_SIZE; -import static parquet.hadoop.ParquetWriter.DEFAULT_PAGE_SIZE; -import static parquet.hadoop.metadata.CompressionCodecName.SNAPPY; +import static org.apache.parquet.hadoop.ParquetWriter.DEFAULT_BLOCK_SIZE; +import static org.apache.parquet.hadoop.ParquetWriter.DEFAULT_PAGE_SIZE; +import static org.apache.parquet.hadoop.metadata.CompressionCodecName.SNAPPY; /** diff --git a/src/test/org/apache/sqoop/TestParquetImport.java b/src/test/org/apache/sqoop/TestParquetImport.java index 2810e318..93dbca73 100644 --- a/src/test/org/apache/sqoop/TestParquetImport.java +++ b/src/test/org/apache/sqoop/TestParquetImport.java @@ -18,6 +18,7 @@ package org.apache.sqoop; +import org.apache.avro.util.Utf8; import org.apache.sqoop.testutil.CommonArgs; import org.apache.sqoop.testutil.HsqldbTestServer; import org.apache.sqoop.testutil.ImportJobTestCase; @@ -165,7 +166,7 @@ private void runParquetImportTest(String codec, String expectedCodec) throws IOE assertEquals("DATA_COL2", 200L, record1.get("DATA_COL2")); assertEquals("DATA_COL3", 1.0f, record1.get("DATA_COL3")); assertEquals("DATA_COL4", 2.0, record1.get("DATA_COL4")); - assertEquals("DATA_COL5", "s", record1.get("DATA_COL5")); + assertEquals("DATA_COL5", new Utf8("s"), record1.get("DATA_COL5")); Object object = record1.get("DATA_COL6"); assertTrue(object instanceof ByteBuffer); ByteBuffer b = ((ByteBuffer) object); @@ -191,7 +192,7 @@ public void testOverrideTypeMapping() throws IOException { List genericRecords = new ParquetReader(getTablePath()).readAll(); GenericRecord record1 = genericRecords.get(0); - assertEquals("DATA_COL0", "10", record1.get("DATA_COL0")); + assertEquals("DATA_COL0", new Utf8("10"), record1.get("DATA_COL0")); assertEquals(1, genericRecords.size()); } diff --git a/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java b/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java index adad0cc1..c9b5725a 100644 --- a/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java +++ b/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java @@ -24,14 +24,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import parquet.hadoop.metadata.CompressionCodecName; +import org.apache.parquet.hadoop.metadata.CompressionCodecName; import java.util.Arrays; import java.util.List; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; -import static parquet.hadoop.metadata.CompressionCodecName.GZIP; +import static org.apache.parquet.hadoop.metadata.CompressionCodecName.GZIP; public class TestParquetIncrementalImportMerge extends ImportJobTestCase { diff --git a/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java b/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java index b55179a4..65f07946 100644 --- a/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java +++ b/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java @@ -37,7 +37,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import parquet.hadoop.metadata.CompressionCodecName; +import org.apache.parquet.hadoop.metadata.CompressionCodecName; import java.io.IOException; import java.util.Arrays; diff --git a/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java b/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java index 9dd54486..4b83d584 100644 --- a/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java +++ b/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java @@ -81,6 +81,8 @@ private void createHiveConf() { config.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, getHostName()); config.setInt(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname, getPort()); config.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, getMetastoreConnectUrl()); + // setting port to -1 to turn the webui off + config.setInt(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, -1); for (Map.Entry authConfig : authenticationConfiguration.getAuthenticationConfig().entrySet()) { config.set(authConfig.getKey(), authConfig.getValue()); diff --git a/src/test/org/apache/sqoop/util/ParquetReader.java b/src/test/org/apache/sqoop/util/ParquetReader.java index f1c2fe10..908ce566 100644 --- a/src/test/org/apache/sqoop/util/ParquetReader.java +++ b/src/test/org/apache/sqoop/util/ParquetReader.java @@ -23,13 +23,13 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import parquet.avro.AvroParquetReader; -import parquet.hadoop.Footer; -import parquet.hadoop.ParquetFileReader; -import parquet.hadoop.metadata.BlockMetaData; -import parquet.hadoop.metadata.ColumnChunkMetaData; -import parquet.hadoop.metadata.CompressionCodecName; -import parquet.hadoop.util.HiddenFileFilter; +import org.apache.parquet.avro.AvroParquetReader; +import org.apache.parquet.hadoop.Footer; +import org.apache.parquet.hadoop.ParquetFileReader; +import org.apache.parquet.hadoop.metadata.BlockMetaData; +import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; +import org.apache.parquet.hadoop.metadata.CompressionCodecName; +import org.apache.parquet.hadoop.util.HiddenFileFilter; import java.io.IOException; import java.util.ArrayDeque; @@ -52,7 +52,7 @@ public class ParquetReader implements AutoCloseable { private final Deque filesToRead; - private parquet.hadoop.ParquetReader reader; + private org.apache.parquet.hadoop.ParquetReader reader; public ParquetReader(Path pathToRead, Configuration configuration) { this.pathToRead = pathToRead; diff --git a/testdata/hcatalog/conf/hive-site.xml b/testdata/hcatalog/conf/hive-site.xml index 8a84a5d3..69c4ae10 100644 --- a/testdata/hcatalog/conf/hive-site.xml +++ b/testdata/hcatalog/conf/hive-site.xml @@ -40,4 +40,12 @@ hive.querylog.location ${test.build.data}/sqoop/logs + + datanucleus.schema.autoCreateAll + true + + + hive.metastore.schema.verification + false +