env)
if (null != subprocessSM) {
// Uninstall the SecurityManager used to trap System.exit().
subprocessSM.uninstall();
+ Policy.setPolicy(originalPolicy);
}
}
}
diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java b/src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java
new file mode 100644
index 00000000..ce1ac65c
--- /dev/null
+++ b/src/java/org/apache/sqoop/mapreduce/hcat/DerbyPolicy.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.mapreduce.hcat;
+
+import org.apache.derby.security.SystemPermission;
+
+import java.security.CodeSource;
+import java.security.Permission;
+import java.security.PermissionCollection;
+import java.security.Policy;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.List;
+
+/**
+ *
+ * Initially copied from Hive.
+ *
+ * A security policy that grants usederbyinternals
+ *
+ *
+ * HCatalog tests use Security Manager to handle exits. With Derby version 10.14.1, if a
+ * security manager is configured, embedded Derby requires usederbyinternals permission, and
+ * that is checked directly using AccessController.checkPermission. This class will be used to
+ * setup a security policy to grant usederbyinternals, in tests that use NoExitSecurityManager.
+ *
+ */
+public class DerbyPolicy extends Policy {
+
+ private static PermissionCollection perms;
+
+ public DerbyPolicy() {
+ super();
+ if (perms == null) {
+ perms = new DerbyPermissionCollection();
+ addPermissions();
+ }
+ }
+
+ @Override
+ public PermissionCollection getPermissions(CodeSource codesource) {
+ return perms;
+ }
+
+ private void addPermissions() {
+ SystemPermission systemPermission = new SystemPermission("engine", "usederbyinternals");
+ perms.add(systemPermission);
+ }
+
+ class DerbyPermissionCollection extends PermissionCollection {
+
+ List perms = new ArrayList<>();
+
+ @Override
+ public void add(Permission p) {
+ perms.add(p);
+ }
+
+ @Override
+ public boolean implies(Permission p) {
+ for (Permission perm : perms) {
+ if (perm.implies(p)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public Enumeration elements() {
+ return Collections.enumeration(perms);
+ }
+
+ @Override
+ public boolean isReadOnly() {
+ return false;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java
index 784b5f2a..234b7a87 100644
--- a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java
+++ b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java
@@ -25,6 +25,7 @@
import java.io.OutputStreamWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
+import java.security.Policy;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
@@ -1199,12 +1200,14 @@ public void executeExternalHCatProgram(List env, String[] cmdLine)
void executeHCatProgramInProcess(String[] argv) throws IOException {
SubprocessSecurityManager subprocessSM = null;
+ Policy originalPolicy = Policy.getPolicy();
final ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader();
try {
Class> cliDriverClass = Class.forName(HCAT_CLI_MAIN_CLASS);
subprocessSM = new SubprocessSecurityManager();
subprocessSM.install();
+ Policy.setPolicy(new DerbyPolicy());
Method mainMethod = cliDriverClass.getMethod("main", argv.getClass());
mainMethod.invoke(null, (Object) argv);
} catch (ClassNotFoundException cnfe) {
@@ -1230,6 +1233,7 @@ void executeHCatProgramInProcess(String[] argv) throws IOException {
if (null != subprocessSM) {
subprocessSM.uninstall();
}
+ Policy.setPolicy(originalPolicy);
Thread.currentThread().setContextClassLoader(originalClassLoader);
}
}
diff --git a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java
index 2180cc20..1fd3d057 100644
--- a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java
+++ b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetExportJobConfigurator.java
@@ -23,7 +23,7 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.sqoop.mapreduce.parquet.ParquetExportJobConfigurator;
-import parquet.avro.AvroParquetInputFormat;
+import org.apache.parquet.avro.AvroParquetInputFormat;
import java.io.IOException;
diff --git a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java
index 90b910a3..e8215430 100644
--- a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java
+++ b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetImportJobConfigurator.java
@@ -27,9 +27,9 @@
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.sqoop.SqoopOptions;
import org.apache.sqoop.mapreduce.parquet.ParquetImportJobConfigurator;
-import parquet.avro.AvroParquetOutputFormat;
-import parquet.hadoop.ParquetOutputFormat;
-import parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.avro.AvroParquetOutputFormat;
+import org.apache.parquet.hadoop.ParquetOutputFormat;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import java.io.IOException;
diff --git a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java
index 66ebc5b8..d3be2283 100644
--- a/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java
+++ b/src/java/org/apache/sqoop/mapreduce/parquet/hadoop/HadoopParquetMergeJobConfigurator.java
@@ -30,7 +30,7 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.sqoop.mapreduce.MergeParquetMapper;
import org.apache.sqoop.mapreduce.parquet.ParquetMergeJobConfigurator;
-import parquet.avro.AvroParquetInputFormat;
+import org.apache.parquet.avro.AvroParquetInputFormat;
import java.io.IOException;
diff --git a/src/test/org/apache/sqoop/TestParquetExport.java b/src/test/org/apache/sqoop/TestParquetExport.java
index be1d8164..662111e2 100644
--- a/src/test/org/apache/sqoop/TestParquetExport.java
+++ b/src/test/org/apache/sqoop/TestParquetExport.java
@@ -29,7 +29,7 @@
import org.junit.Test;
import org.junit.rules.ExpectedException;
-import parquet.avro.AvroParquetWriter;
+import org.apache.parquet.avro.AvroParquetWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -44,9 +44,9 @@
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
-import static parquet.hadoop.ParquetWriter.DEFAULT_BLOCK_SIZE;
-import static parquet.hadoop.ParquetWriter.DEFAULT_PAGE_SIZE;
-import static parquet.hadoop.metadata.CompressionCodecName.SNAPPY;
+import static org.apache.parquet.hadoop.ParquetWriter.DEFAULT_BLOCK_SIZE;
+import static org.apache.parquet.hadoop.ParquetWriter.DEFAULT_PAGE_SIZE;
+import static org.apache.parquet.hadoop.metadata.CompressionCodecName.SNAPPY;
/**
diff --git a/src/test/org/apache/sqoop/TestParquetImport.java b/src/test/org/apache/sqoop/TestParquetImport.java
index 2810e318..93dbca73 100644
--- a/src/test/org/apache/sqoop/TestParquetImport.java
+++ b/src/test/org/apache/sqoop/TestParquetImport.java
@@ -18,6 +18,7 @@
package org.apache.sqoop;
+import org.apache.avro.util.Utf8;
import org.apache.sqoop.testutil.CommonArgs;
import org.apache.sqoop.testutil.HsqldbTestServer;
import org.apache.sqoop.testutil.ImportJobTestCase;
@@ -165,7 +166,7 @@ private void runParquetImportTest(String codec, String expectedCodec) throws IOE
assertEquals("DATA_COL2", 200L, record1.get("DATA_COL2"));
assertEquals("DATA_COL3", 1.0f, record1.get("DATA_COL3"));
assertEquals("DATA_COL4", 2.0, record1.get("DATA_COL4"));
- assertEquals("DATA_COL5", "s", record1.get("DATA_COL5"));
+ assertEquals("DATA_COL5", new Utf8("s"), record1.get("DATA_COL5"));
Object object = record1.get("DATA_COL6");
assertTrue(object instanceof ByteBuffer);
ByteBuffer b = ((ByteBuffer) object);
@@ -191,7 +192,7 @@ public void testOverrideTypeMapping() throws IOException {
List genericRecords = new ParquetReader(getTablePath()).readAll();
GenericRecord record1 = genericRecords.get(0);
- assertEquals("DATA_COL0", "10", record1.get("DATA_COL0"));
+ assertEquals("DATA_COL0", new Utf8("10"), record1.get("DATA_COL0"));
assertEquals(1, genericRecords.size());
}
diff --git a/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java b/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java
index adad0cc1..c9b5725a 100644
--- a/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java
+++ b/src/test/org/apache/sqoop/TestParquetIncrementalImportMerge.java
@@ -24,14 +24,14 @@
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
-import parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import java.util.Arrays;
import java.util.List;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
-import static parquet.hadoop.metadata.CompressionCodecName.GZIP;
+import static org.apache.parquet.hadoop.metadata.CompressionCodecName.GZIP;
public class TestParquetIncrementalImportMerge extends ImportJobTestCase {
diff --git a/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java b/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java
index b55179a4..65f07946 100644
--- a/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java
+++ b/src/test/org/apache/sqoop/hive/TestHiveServer2ParquetImport.java
@@ -37,7 +37,7 @@
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-import parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import java.io.IOException;
import java.util.Arrays;
diff --git a/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java b/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java
index 9dd54486..4b83d584 100644
--- a/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java
+++ b/src/test/org/apache/sqoop/hive/minicluster/HiveMiniCluster.java
@@ -81,6 +81,8 @@ private void createHiveConf() {
config.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, getHostName());
config.setInt(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname, getPort());
config.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, getMetastoreConnectUrl());
+ // setting port to -1 to turn the webui off
+ config.setInt(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, -1);
for (Map.Entry authConfig : authenticationConfiguration.getAuthenticationConfig().entrySet()) {
config.set(authConfig.getKey(), authConfig.getValue());
diff --git a/src/test/org/apache/sqoop/util/ParquetReader.java b/src/test/org/apache/sqoop/util/ParquetReader.java
index f1c2fe10..908ce566 100644
--- a/src/test/org/apache/sqoop/util/ParquetReader.java
+++ b/src/test/org/apache/sqoop/util/ParquetReader.java
@@ -23,13 +23,13 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import parquet.avro.AvroParquetReader;
-import parquet.hadoop.Footer;
-import parquet.hadoop.ParquetFileReader;
-import parquet.hadoop.metadata.BlockMetaData;
-import parquet.hadoop.metadata.ColumnChunkMetaData;
-import parquet.hadoop.metadata.CompressionCodecName;
-import parquet.hadoop.util.HiddenFileFilter;
+import org.apache.parquet.avro.AvroParquetReader;
+import org.apache.parquet.hadoop.Footer;
+import org.apache.parquet.hadoop.ParquetFileReader;
+import org.apache.parquet.hadoop.metadata.BlockMetaData;
+import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.hadoop.util.HiddenFileFilter;
import java.io.IOException;
import java.util.ArrayDeque;
@@ -52,7 +52,7 @@ public class ParquetReader implements AutoCloseable {
private final Deque filesToRead;
- private parquet.hadoop.ParquetReader reader;
+ private org.apache.parquet.hadoop.ParquetReader reader;
public ParquetReader(Path pathToRead, Configuration configuration) {
this.pathToRead = pathToRead;
diff --git a/testdata/hcatalog/conf/hive-site.xml b/testdata/hcatalog/conf/hive-site.xml
index 8a84a5d3..69c4ae10 100644
--- a/testdata/hcatalog/conf/hive-site.xml
+++ b/testdata/hcatalog/conf/hive-site.xml
@@ -40,4 +40,12 @@
hive.querylog.location
${test.build.data}/sqoop/logs
+
+ datanucleus.schema.autoCreateAll
+ true
+
+
+ hive.metastore.schema.verification
+ false
+