diff --git a/client/src/main/java/org/apache/sqoop/client/request/JobResourceRequest.java b/client/src/main/java/org/apache/sqoop/client/request/JobResourceRequest.java index 488318e4..ab9c93be 100644 --- a/client/src/main/java/org/apache/sqoop/client/request/JobResourceRequest.java +++ b/client/src/main/java/org/apache/sqoop/client/request/JobResourceRequest.java @@ -90,7 +90,7 @@ public ValidationResultBean update(String serverUrl, MJob job) { JobBean jobBean = new JobBean(job); // Extract all config inputs including sensitive inputs JSONObject jobJson = jobBean.extract(false); - String response = super.put(serverUrl + RESOURCE + job.getPersistenceId(), + String response = super.put(serverUrl + RESOURCE + UrlSafeUtils.urlPathEncode(job.getName()), jobJson.toJSONString()); ValidationResultBean validationBean = new ValidationResultBean(); validationBean.restore(JSONUtils.parse(response)); diff --git a/client/src/main/java/org/apache/sqoop/client/request/LinkResourceRequest.java b/client/src/main/java/org/apache/sqoop/client/request/LinkResourceRequest.java index bc6e0a9e..b3262bdc 100644 --- a/client/src/main/java/org/apache/sqoop/client/request/LinkResourceRequest.java +++ b/client/src/main/java/org/apache/sqoop/client/request/LinkResourceRequest.java @@ -76,7 +76,8 @@ public ValidationResultBean update(String serverUrl, MLink link) { LinkBean linkBean = new LinkBean(link); // Extract all config inputs including sensitive inputs JSONObject linkJson = linkBean.extract(false); - String response = super.put(serverUrl + LINK_RESOURCE + link.getPersistenceId(), linkJson.toJSONString()); + String response = super.put(serverUrl + LINK_RESOURCE + UrlSafeUtils.urlPathEncode(link.getName()), + linkJson.toJSONString()); ValidationResultBean validationBean = new ValidationResultBean(); validationBean.restore(JSONUtils.parse(response)); return validationBean; @@ -88,9 +89,9 @@ public void delete(String serverUrl, String arg) { public void enable(String serverUrl, String lArg, Boolean enabled) { if (enabled) { - super.put(serverUrl + LINK_RESOURCE + lArg + ENABLE, null); + super.put(serverUrl + LINK_RESOURCE + UrlSafeUtils.urlPathEncode(lArg) + ENABLE, null); } else { - super.put(serverUrl + LINK_RESOURCE + lArg + DISABLE, null); + super.put(serverUrl + LINK_RESOURCE + UrlSafeUtils.urlPathEncode(lArg) + DISABLE, null); } } } diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/InformalJobNameExecuteTest.java b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/InformalJobNameExecuteTest.java new file mode 100644 index 00000000..411b07e7 --- /dev/null +++ b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/InformalJobNameExecuteTest.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.integration.connector.hdfs; + +import org.apache.sqoop.model.MDriverConfig; +import org.apache.sqoop.model.MJob; +import org.apache.sqoop.model.MLink; +import org.apache.sqoop.test.testcases.ConnectorTestCase; +import org.testng.annotations.*; + +import static org.testng.Assert.assertEquals; + +public class InformalJobNameExecuteTest extends ConnectorTestCase { + + private String jobName; + + @Factory(dataProvider="special-job-name-executed-test") + public InformalJobNameExecuteTest(String specialChar) { + this.jobName = "job" + specialChar + "name"; + } + + @DataProvider(name="special-job-name-executed-test", parallel=true) + public static Object[][] data() { + // The special char used for test. Merge into 3 test cases to reduce the test time. + return new Object[][] {{" \t/.?&*[]("}, {")`~!@#$%^-"}, {"_=+;:\"<>,"}}; + } + + @BeforeMethod(alwaysRun = true) + public void createTable() { + createTableCities(); + } + + @AfterMethod(alwaysRun = true) + public void dropTable() { + super.dropTable(); + } + + @Test + public void test() throws Exception { + createFromFile("input-0001", + "1,'USA','2004-10-23','San Francisco'", + "2,'USA','2004-10-24','Sunnyvale'" + ); + + // RDBMS link + MLink rdbmsLink = getClient().createLink("generic-jdbc-connector"); + fillRdbmsLinkConfig(rdbmsLink); + saveLink(rdbmsLink); + + // HDFS link + MLink hdfsLink = getClient().createLink("hdfs-connector"); + fillHdfsLink(hdfsLink); + saveLink(hdfsLink); + + // Job creation + MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName()); + + // set hdfs "FROM" config for the job, since the connector test case base class only has utilities for hdfs! + fillHdfsFromConfig(job); + + // set the rdbms "TO" config here + fillRdbmsToConfig(job); + + // driver config + MDriverConfig driverConfig = job.getDriverConfig(); + driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(3); + job.setName(jobName); + saveJob(job); + + executeJob(job); + + assertEquals(2L, provider.rowCount(getTableName())); + assertRowInCities(1, "USA", "2004-10-23", "San Francisco"); + assertRowInCities(2, "USA", "2004-10-24", "Sunnyvale"); + } +} diff --git a/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java b/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java index 16480c32..920679fc 100644 --- a/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java +++ b/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java @@ -18,7 +18,9 @@ package org.apache.sqoop.integration.server; import static org.testng.Assert.assertEquals; +import static org.testng.Assert.fail; +import com.google.common.collect.Iterables; import org.apache.sqoop.connector.hdfs.configuration.ToFormat; import org.apache.sqoop.model.MJob; import org.apache.sqoop.model.MLink; @@ -27,37 +29,82 @@ import org.apache.sqoop.test.infrastructure.providers.DatabaseInfrastructureProvider; import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider; import org.apache.sqoop.test.infrastructure.providers.SqoopInfrastructureProvider; +import org.apache.sqoop.test.utils.ParametrizedUtils; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Factory; import org.testng.annotations.Test; @Infrastructure(dependencies = {HadoopInfrastructureProvider.class, SqoopInfrastructureProvider.class, DatabaseInfrastructureProvider.class}) public class InformalObjectNameTest extends SqoopTestCase { - private static final String LINK_NAME_CONTAINS_WHITESPACE = "link name"; - private static final String LINK_NAME_CONTAINS_SLASH = "link/name"; - private static final String JOB_NAME_CONTAINS_WHITESPACE = "job name"; - private static final String JOB_NAME_CONTAINS_SLASH= "job/name"; + private String target; + private String specialChar; - public InformalObjectNameTest() { + private static final String TARGET_JOB = "Job"; + private static final String TARGET_LINK = "Link"; + + /** + * The object used for test, job and link. + */ + public static Object[] TARGETS = new Object [] { + TARGET_JOB, + TARGET_LINK, + }; + + /** + * The special char used for test. + */ + public static Object [] SPECIAL_CHAR = new Object[] { + " ", "\t", "/", ".", "?", "&", "*", "[", "]", "(", ")", "`", "~", "!", "@", + "#", "$", "%", "^", "-", "_", "=", "+", ";", ":", "\"", "<", ">", ",", + }; + + @Factory(dataProvider="special-name-integration-test") + public InformalObjectNameTest(String target, String specialChar) { + this.target = target; + this.specialChar = specialChar; + } + + @DataProvider(name="special-name-integration-test", parallel=true) + public static Object[][] data() { + return Iterables.toArray(ParametrizedUtils.crossProduct(TARGETS, SPECIAL_CHAR), Object[].class); } @Test - public void testInformalLinkName() throws Exception { - // RDBMS link + public void testInformalName() throws Exception { + if (TARGET_LINK.equals(target)) { + verifyActionsForLink("link" + specialChar + "name"); + } else if (TARGET_JOB.equals(target)) { + verifyActionsForJob("job" + specialChar + "name"); + } + } + + private void verifyActionsForLink(String linkName) { + // create link MLink rdbmsLink = getClient().createLink("generic-jdbc-connector"); fillRdbmsLinkConfig(rdbmsLink); - rdbmsLink.setName(LINK_NAME_CONTAINS_WHITESPACE); + rdbmsLink.setName(linkName); saveLink(rdbmsLink); - assertEquals(rdbmsLink, getClient().getLink(LINK_NAME_CONTAINS_WHITESPACE)); + // read link + assertEquals(rdbmsLink, getClient().getLink(linkName)); - rdbmsLink = getClient().createLink("generic-jdbc-connector"); - fillRdbmsLinkConfig(rdbmsLink); - rdbmsLink.setName(LINK_NAME_CONTAINS_SLASH); - saveLink(rdbmsLink); - assertEquals(rdbmsLink, getClient().getLink(LINK_NAME_CONTAINS_SLASH)); + // update link + getClient().updateLink(rdbmsLink); + + // enable link + getClient().enableLink(linkName, true); + + // delete link + getClient().deleteLink(linkName); + try { + getClient().getLink(linkName); + fail("The link doesn't exist, exception should be thrown."); + } catch (Exception e) { + // ignore the exception + } } - @Test - public void testInformalJobName() throws Exception { + private void verifyActionsForJob(String jobName) throws Exception { // RDBMS link MLink rdbmsLink = getClient().createLink("generic-jdbc-connector"); fillRdbmsLinkConfig(rdbmsLink); @@ -68,6 +115,7 @@ public void testInformalJobName() throws Exception { fillHdfsLinkConfig(hdfsLink); saveLink(hdfsLink); + // Job creation MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName()); @@ -77,21 +125,25 @@ public void testInformalJobName() throws Exception { // hdfs "TO" config fillHdfsToConfig(job, ToFormat.TEXT_FILE); - job.setName(JOB_NAME_CONTAINS_WHITESPACE); - saveJob(job); - assertEquals(job, getClient().getJob(JOB_NAME_CONTAINS_WHITESPACE)); - - job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName()); - - // rdms "FROM" config - fillRdbmsFromConfig(job, "id"); - - // hdfs "TO" config - fillHdfsToConfig(job, ToFormat.TEXT_FILE); - - job.setName(JOB_NAME_CONTAINS_SLASH); + job.setName(jobName); saveJob(job); - assertEquals(job, getClient().getJob(JOB_NAME_CONTAINS_SLASH)); + // read job + assertEquals(job, getClient().getJob(jobName)); + + // update job + getClient().updateJob(job); + + // enable job + getClient().enableJob(jobName, true); + + // delete job + getClient().deleteJob(jobName); + try { + getClient().getJob(jobName); + fail("The job doesn't exist, exception should be thrown."); + } catch (Exception e) { + // ignore the exception + } } }