mirror of
https://github.com/apache/sqoop.git
synced 2025-05-10 05:09:41 +08:00
SQOOP-2505: Sqoop2: Add ability to specify custom configuration properties for HDFS connector
(Jarek Jarcec Cecho via Abraham Elmahrek)
This commit is contained in:
parent
09a21649d4
commit
3668aea7db
@ -64,6 +64,10 @@ public void setBoolean(String key, boolean value) {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setAll(Map<String, String> map) {
|
public void setAll(Map<String, String> map) {
|
||||||
|
if(map == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
getOptions().putAll(map);
|
getOptions().putAll(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,6 +95,9 @@ public void testSetAll() {
|
|||||||
assertEquals(context.getLong("long", -1), 1L);
|
assertEquals(context.getLong("long", -1), 1L);
|
||||||
assertEquals(context.getInt("integer", -1), 13);
|
assertEquals(context.getInt("integer", -1), 13);
|
||||||
assertEquals(context.getBoolean("boolean", false), true);
|
assertEquals(context.getBoolean("boolean", false), true);
|
||||||
|
|
||||||
|
// Verify that we're resilient against null
|
||||||
|
context.setAll(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -38,5 +38,6 @@ public class HdfsFromInitializer extends Initializer<LinkConfiguration, FromJobC
|
|||||||
public void initialize(InitializerContext context, LinkConfiguration linkConfig, FromJobConfiguration jobConfig) {
|
public void initialize(InitializerContext context, LinkConfiguration linkConfig, FromJobConfiguration jobConfig) {
|
||||||
Configuration configuration = HdfsUtils.createConfiguration(linkConfig);
|
Configuration configuration = HdfsUtils.createConfiguration(linkConfig);
|
||||||
HdfsUtils.configurationToContext(configuration, context.getContext());
|
HdfsUtils.configurationToContext(configuration, context.getContext());
|
||||||
|
context.getContext().setAll(linkConfig.linkConfig.configOverrides);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -48,6 +48,8 @@ public void initialize(InitializerContext context, LinkConfiguration linkConfig,
|
|||||||
|
|
||||||
Configuration configuration = HdfsUtils.createConfiguration(linkConfig);
|
Configuration configuration = HdfsUtils.createConfiguration(linkConfig);
|
||||||
HdfsUtils.configurationToContext(configuration, context.getContext());
|
HdfsUtils.configurationToContext(configuration, context.getContext());
|
||||||
|
context.getContext().setAll(linkConfig.linkConfig.configOverrides);
|
||||||
|
|
||||||
boolean appendMode = Boolean.TRUE.equals(jobConfig.toJobConfig.appendMode);
|
boolean appendMode = Boolean.TRUE.equals(jobConfig.toJobConfig.appendMode);
|
||||||
|
|
||||||
// Verification that given HDFS directory either don't exists or is empty
|
// Verification that given HDFS directory either don't exists or is empty
|
||||||
|
@ -24,6 +24,8 @@
|
|||||||
import org.apache.sqoop.validation.validators.AbstractValidator;
|
import org.apache.sqoop.validation.validators.AbstractValidator;
|
||||||
import org.apache.sqoop.validation.validators.DirectoryExistsValidator;
|
import org.apache.sqoop.validation.validators.DirectoryExistsValidator;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
@ -34,6 +36,12 @@ public class LinkConfig {
|
|||||||
@Input(size = 255, validators = { @Validator(DirectoryExistsValidator.class)})
|
@Input(size = 255, validators = { @Validator(DirectoryExistsValidator.class)})
|
||||||
public String confDir;
|
public String confDir;
|
||||||
|
|
||||||
|
@Input public Map<String, String> configOverrides;
|
||||||
|
|
||||||
|
public LinkConfig() {
|
||||||
|
configOverrides = new HashMap<>();
|
||||||
|
}
|
||||||
|
|
||||||
public static class ConfigValidator extends AbstractValidator<LinkConfig> {
|
public static class ConfigValidator extends AbstractValidator<LinkConfig> {
|
||||||
private static final Pattern URI_PATTERN = Pattern.compile("((?<=\\()[A-Za-z][A-Za-z0-9\\+\\.\\-]*:([A-Za-z0-9\\.\\-_~:/\\?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=]|%[A-Fa-f0-9]{2})+(?=\\)))|([A-Za-z][A-Za-z0-9\\+\\.\\-]*:([A-Za-z0-9\\.\\-_~:/\\?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=]|%[A-Fa-f0-9]{2})+)");
|
private static final Pattern URI_PATTERN = Pattern.compile("((?<=\\()[A-Za-z][A-Za-z0-9\\+\\.\\-]*:([A-Za-z0-9\\.\\-_~:/\\?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=]|%[A-Fa-f0-9]{2})+(?=\\)))|([A-Za-z][A-Za-z0-9\\+\\.\\-]*:([A-Za-z0-9\\.\\-_~:/\\?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=]|%[A-Fa-f0-9]{2})+)");
|
||||||
|
|
||||||
|
@ -27,6 +27,9 @@ linkConfig.uri.help = HDFS URI used to connect to HDFS
|
|||||||
linkConfig.confDir.label = Hadoop conf directory
|
linkConfig.confDir.label = Hadoop conf directory
|
||||||
linkConfig.confDir.help = Directory with Hadoop configuration files. The connector will load all -site.xml files.
|
linkConfig.confDir.help = Directory with Hadoop configuration files. The connector will load all -site.xml files.
|
||||||
|
|
||||||
|
linkConfig.configOverrides.label = Override configuration
|
||||||
|
linkConfig.configOverrides.help = Map of properties that that should be set for the Hadoop's configuration object on top of the files loaded from configuration directory.
|
||||||
|
|
||||||
# To Job Config
|
# To Job Config
|
||||||
#
|
#
|
||||||
toJobConfig.label = To HDFS configuration
|
toJobConfig.label = To HDFS configuration
|
||||||
|
@ -0,0 +1,47 @@
|
|||||||
|
/**
|
||||||
|
* Created by jarcec on 8/18/15.
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.sqoop.connector.hdfs;
|
||||||
|
|
||||||
|
import org.apache.sqoop.common.MutableMapContext;
|
||||||
|
import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
|
||||||
|
import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
|
||||||
|
import org.apache.sqoop.job.etl.Initializer;
|
||||||
|
import org.apache.sqoop.job.etl.InitializerContext;
|
||||||
|
import org.testng.annotations.Test;
|
||||||
|
|
||||||
|
import static org.testng.Assert.assertEquals;
|
||||||
|
|
||||||
|
public class TestFromInitializer {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testConfigOverrides() {
|
||||||
|
LinkConfiguration linkConfig = new LinkConfiguration();
|
||||||
|
FromJobConfiguration jobConfig = new FromJobConfiguration();
|
||||||
|
|
||||||
|
linkConfig.linkConfig.uri = "file:///";
|
||||||
|
linkConfig.linkConfig.configOverrides.put("key", "value");
|
||||||
|
|
||||||
|
InitializerContext initializerContext = new InitializerContext(new MutableMapContext());
|
||||||
|
|
||||||
|
Initializer initializer = new HdfsFromInitializer();
|
||||||
|
initializer.initialize(initializerContext, linkConfig, jobConfig);
|
||||||
|
|
||||||
|
assertEquals(initializerContext.getString("key"), "value");
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user