5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-19 02:10:54 +08:00

SQOOP-2678: Sqoop2: Remove the id from public interface for Link

(Colin Ma via Jarek Jarcec Cecho)
This commit is contained in:
Jarek Jarcec Cecho 2015-11-13 11:54:05 -08:00
parent 7112964e75
commit aaee89dc6c
27 changed files with 104 additions and 134 deletions

View File

@ -37,53 +37,24 @@ public static MJob getJobFromIdentifier(String identifier) {
return job;
}
public static long getLinkIdFromIdentifier(String identifier) {
// support linkName or linkId for the api
// NOTE: linkId is a fallback for older sqoop clients if any, since we want
// to primarily use unique linkNames
long linkId;
public static MLink getLinkFromLinkName(String linkName) {
Repository repository = RepositoryManager.getInstance().getRepository();
MLink link = repository.findLink(identifier);
if (link != null) {
linkId = link.getPersistenceId();
} else {
try {
linkId = Long.parseLong(identifier);
} catch (NumberFormatException ex) {
// this means name nor Id existed and we want to throw a user friendly
// message than a number format exception
throw new SqoopException(ServerError.SERVER_0005, "Invalid link: " + identifier
+ " requested");
}
MLink link = repository.findLink(linkName);
if (link == null) {
throw new SqoopException(ServerError.SERVER_0006, "Invalid link name: " + linkName
+ " doesn't exist");
}
return linkId;
return link;
}
public static String getLinkNameFromIdentifier(String identifier) {
// support linkName or linkId for the api
// NOTE: linkId is a fallback for older sqoop clients if any, since we want
// to primarily use unique linkNames
public static MLink getLinkFromLinkId(Long linkId) {
Repository repository = RepositoryManager.getInstance().getRepository();
MLink link = repository.findLink(identifier);
MLink link = repository.findLink(linkId);
if (link == null) {
long linkId;
try {
linkId = Long.parseLong(identifier);
} catch (NumberFormatException ex) {
// this means name nor Id existed and we want to throw a user friendly
// message than a number format exception
throw new SqoopException(ServerError.SERVER_0005, "Invalid link: " + identifier
+ " requested");
}
link = repository.findLink(linkId);
if (link == null) {
throw new SqoopException(ServerError.SERVER_0006, "Link: " + identifier
+ " doesn't exist");
}
throw new SqoopException(ServerError.SERVER_0006, "Invalid link id: " + linkId
+ " doesn't exist");
}
return link.getName();
return link;
}
public static long getConnectorIdFromIdentifier(String identifier) {

View File

@ -37,14 +37,7 @@
import org.apache.sqoop.json.JsonBean;
import org.apache.sqoop.json.SubmissionBean;
import org.apache.sqoop.json.ValidationResultBean;
import org.apache.sqoop.model.ConfigUtils;
import org.apache.sqoop.model.MDriverConfig;
import org.apache.sqoop.model.MFromConfig;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MPersistableEntity;
import org.apache.sqoop.model.MResource;
import org.apache.sqoop.model.MSubmission;
import org.apache.sqoop.model.MToConfig;
import org.apache.sqoop.model.*;
import org.apache.sqoop.repository.Repository;
import org.apache.sqoop.repository.RepositoryManager;
import org.apache.sqoop.request.HttpEventContext;
@ -186,17 +179,14 @@ private JsonBean createUpdateJob(RequestContext ctx, boolean create) {
// Job object
MJob postedJob = jobs.get(0);
MLink fromLink = HandlerUtils.getLinkFromLinkId(postedJob.getFromLinkId());
MLink toLink = HandlerUtils.getLinkFromLinkId(postedJob.getToLinkId());
// Authorization check
if (create) {
AuthorizationEngine.createJob(ctx.getUserName(),
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getFromLinkId())),
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getToLinkId())));
AuthorizationEngine.createJob(ctx.getUserName(), fromLink.getName(), toLink.getName());
} else {
AuthorizationEngine.updateJob(ctx.getUserName(),
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getFromLinkId())),
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getToLinkId())),
postedJob.getName());
AuthorizationEngine.updateJob(ctx.getUserName(), fromLink.getName(), toLink.getName(), postedJob.getName());
}
// Verify that user is not trying to spoof us

View File

@ -90,15 +90,15 @@ public JsonBean handleEvent(RequestContext ctx) {
*/
private JsonBean deleteLink(RequestContext ctx) {
Repository repository = RepositoryManager.getInstance().getRepository();
String linkIdentifier = ctx.getLastURLElement();
// support linkName or linkId for the api
String linkName = HandlerUtils.getLinkNameFromIdentifier(linkIdentifier);
String linkName = ctx.getLastURLElement();
// make sure the link exist, otherwise, the exception will be thrown
MLink link = HandlerUtils.getLinkFromLinkName(linkName);
// Authorization check
AuthorizationEngine.deleteLink(ctx.getUserName(), linkName);
AuthorizationEngine.deleteLink(ctx.getUserName(), link.getName());
AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
ctx.getRequest().getRemoteAddr(), "delete", "link", linkIdentifier);
ctx.getRequest().getRemoteAddr(), "delete", "link", link.getName());
repository.deleteLink(linkName);
MResource resource = new MResource(linkName, MResource.TYPE.LINK);
@ -142,7 +142,7 @@ private JsonBean createUpdateLink(RequestContext ctx, boolean create) {
} else {
AuthorizationEngine.updateLink(ctx.getUserName(),
HandlerUtils.getConnectorNameFromIdentifier(String.valueOf(postedLink.getConnectorId())),
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedLink.getPersistenceId())));
postedLink.getName());
}
MLinkConfig linkConfig = ConnectorManager.getInstance()
@ -152,11 +152,9 @@ private JsonBean createUpdateLink(RequestContext ctx, boolean create) {
}
// if update get the link id from the request URI
if (!create) {
String linkIdentifier = ctx.getLastURLElement();
// support linkName or linkId for the api
String linkName = HandlerUtils.getLinkNameFromIdentifier(linkIdentifier);
String linkName = ctx.getLastURLElement();
MLink existingLink = repository.findLink(linkName);
if (postedLink.getPersistenceId() == MPersistableEntity.PERSISTANCE_ID_DEFAULT) {
MLink existingLink = repository.findLink(linkName);
postedLink.setPersistenceId(existingLink.getPersistenceId());
}
}
@ -194,15 +192,15 @@ private JsonBean createUpdateLink(RequestContext ctx, boolean create) {
}
private JsonBean getLinks(RequestContext ctx) {
String identifier = ctx.getLastURLElement();
String linkName = ctx.getLastURLElement();
LinkBean linkBean;
List<MLink> links;
Locale locale = ctx.getAcceptLanguageHeader();
Repository repository = RepositoryManager.getInstance().getRepository();
AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(), ctx.getRequest().getRemoteAddr(), "get", "link", identifier);
AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(), ctx.getRequest().getRemoteAddr(), "get", "link", linkName);
if(identifier.equals("all")) { // Return all links (by perhaps only for given connector)
if(linkName.equals("all")) { // Return all links (by perhaps only for given connector)
String connectorName = ctx.getParameterValue(CONNECTOR_NAME_QUERY_PARAM);
if(StringUtils.isEmpty(connectorName)) {
@ -214,16 +212,16 @@ private JsonBean getLinks(RequestContext ctx) {
links = repository.findLinksForConnector(connectorName);
}
} else { // Return one specific link with name or id stored in identifier
String linkName = HandlerUtils.getLinkNameFromIdentifier(identifier);
MLink link = HandlerUtils.getLinkFromLinkName(linkName);
links = new LinkedList<>();
links.add(repository.findLink(linkName));
links.add(link);
}
// Authorization check
links = AuthorizationEngine.filterResource(ctx.getUserName(), MResource.TYPE.LINK, links);
// Return bean entity (we have to separate what we're returning here)
if(identifier.equals("all")) {
if(linkName.equals("all")) {
linkBean = createLinksBean(links, locale);
} else {
linkBean = createLinkBean(links, locale);
@ -257,13 +255,13 @@ private void addConnectorConfigBundle(Locale locale, LinkBean bean) {
private JsonBean enableLink(RequestContext ctx, boolean enabled) {
Repository repository = RepositoryManager.getInstance().getRepository();
String[] elements = ctx.getUrlElements();
String linkIdentifier = elements[elements.length - 2];
String linkName = HandlerUtils.getLinkNameFromIdentifier(linkIdentifier);
String linkName = elements[elements.length - 2];
MLink link = HandlerUtils.getLinkFromLinkName(linkName);
// Authorization check
AuthorizationEngine.enableDisableLink(ctx.getUserName(), linkName);
AuthorizationEngine.enableDisableLink(ctx.getUserName(), link.getName());
repository.enableLink(linkName, enabled);
repository.enableLink(link.getName(), enabled);
return JsonBean.EMPTY_BEAN;
}
}

View File

@ -44,7 +44,7 @@ public void test() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");

View File

@ -51,7 +51,7 @@ public void test() throws Exception {
fillHdfsLink(hdfsLinkTo);
saveLink(hdfsLinkTo);
MJob job = getClient().createJob(hdfsLinkFrom.getPersistenceId(), hdfsLinkTo.getPersistenceId());
MJob job = getClient().createJob(hdfsLinkFrom.getName(), hdfsLinkTo.getName());
fillHdfsFromConfig(job);

View File

@ -56,7 +56,7 @@ public void testBasic() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
fillHdfsFromConfig(job);
job.getFromJobConfig().getEnumInput("incremental.incrementalType").setValue(IncrementalType.NEW_FILES);
fillRdbmsToConfig(job);

View File

@ -54,7 +54,7 @@ public void testOutputDirectoryIsAFile() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -90,7 +90,7 @@ public void testOutputDirectoryIsNotEmpty() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -125,7 +125,7 @@ public void testOutputDirectoryIsEmpty() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");

View File

@ -106,7 +106,7 @@ public void createLinks() {
@Test
public void testCities() throws Exception {
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), kiteLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), kiteLink.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");

View File

@ -94,7 +94,7 @@ public void testFrom() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Fill rdbms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -139,7 +139,7 @@ public void testTo() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
fillHdfsFromConfig(job);
// Set the rdbms "TO" config here

View File

@ -64,7 +64,7 @@ public void testBasic() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
// set hdfs "FROM" config for the job, since the connector test case base class only has utilities for hdfs!
fillHdfsFromConfig(job);

View File

@ -47,7 +47,7 @@ public void testCities() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -89,7 +89,7 @@ public void testStories() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// Connector values
fillRdbmsFromConfig(job, "id");
@ -131,7 +131,7 @@ public void testColumns() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// Connector values
fillRdbmsFromConfig(job, "id");
@ -173,7 +173,7 @@ public void testSql() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// Connector values
MConfigList configs = job.getFromJobConfig();
@ -213,7 +213,7 @@ public void testDuplicateColumns() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// Connector values
String partitionColumn = provider.escapeTableName(getTableName().getTableName()) + "." + provider.escapeColumnName("id");
@ -263,7 +263,7 @@ public void testAllowNullsWithOneExtractor() throws Exception {
saveLink(hdfsConnection);
// Job creation
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), hdfsConnection.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");

View File

@ -90,7 +90,7 @@ public void testTable() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// Set the rdbms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -142,7 +142,7 @@ public void testQuery() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
String query = "SELECT * FROM " + provider.escapeTableName(getTableName().getTableName()) + " WHERE ${CONDITIONS}";

View File

@ -92,7 +92,7 @@ public void testSplitter() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// set the rdbms "FROM" config
fillRdbmsFromConfig(job, partitionColumn);

View File

@ -56,8 +56,8 @@ public void testStagedTransfer() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(hdfsLink.getPersistenceId(),
rdbmsLink.getPersistenceId());
MJob job = getClient().createJob(hdfsLink.getName(),
rdbmsLink.getName());
// fill HDFS "FROM" config
fillHdfsFromConfig(job);

View File

@ -49,7 +49,7 @@ public void testBasic() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(hdfsLink.getPersistenceId(), kafkaLink.getPersistenceId());
MJob job = getClient().createJob(hdfsLink.getName(), kafkaLink.getName());
// Job connector configs
fillHdfsFromConfig(job);

View File

@ -52,7 +52,7 @@ public void testBasic() throws Exception {
saveLink(rdbmsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), kafkaLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), kafkaLink.getName());
// set rdbms "FROM" job config
fillRdbmsFromConfig(job, "id");

View File

@ -67,7 +67,7 @@ public void testCities() throws Exception {
saveLink(kiteLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), kiteLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), kiteLink.getName());
// Set rdbms "FROM" config
fillRdbmsFromConfig(job, "id");

View File

@ -175,7 +175,7 @@ private MJob prepareJob() {
MLink testConnection = getClient().createLink("test-connector");
saveLink(testConnection);
MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), testConnection.getPersistenceId());
MJob job = getClient().createJob(rdbmsConnection.getName(), testConnection.getName());
fillRdbmsFromConfig(job, "id");

View File

@ -69,8 +69,8 @@ public Map<Integer, Integer> getNumberOfSubmissions() {
}
@Override
public Integer[] getDisabledLinkIds() {
return new Integer[] {4};
public String[] getDisabledLinkNames() {
return new String[] {linkIdToNameMap.get(4L)};
}
@Override
@ -79,8 +79,9 @@ public String[] getDisabledJobNames() {
}
@Override
public Integer[] getDeleteLinkIds() {
return new Integer[] {1, 2, 3, 4, 5};
public String[] getDeleteLinkNames() {
return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
}
@Test

View File

@ -71,8 +71,8 @@ public Map<Integer, Integer> getNumberOfSubmissions() {
}
@Override
public Integer[] getDisabledLinkIds() {
return new Integer[] {4, 5};
public String[] getDisabledLinkNames() {
return new String[] {linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
}
@Override
@ -81,7 +81,8 @@ public String[] getDisabledJobNames() {
}
@Override
public Integer[] getDeleteLinkIds() {
return new Integer[] {1, 2, 3, 4, 5, 6};
public String[] getDeleteLinkNames() {
return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), linkIdToNameMap.get(5L), linkIdToNameMap.get(6L)};
}
}

View File

@ -80,8 +80,8 @@ public Map<Integer, Integer> getNumberOfSubmissions() {
}
@Override
public Integer[] getDisabledLinkIds() {
return new Integer[] {4, 5};
public String[] getDisabledLinkNames() {
return new String[] {linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
}
@Override
@ -90,8 +90,9 @@ public String[] getDisabledJobNames() {
}
@Override
public Integer[] getDeleteLinkIds() {
return new Integer[] {1, 2, 3, 4, 5, 6};
public String[] getDeleteLinkNames() {
return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), linkIdToNameMap.get(5L), linkIdToNameMap.get(6L)};
}
@Test
@ -101,7 +102,7 @@ public void testJobNameNotNull() {
assertNotNull(job.getName());
}
MJob job = getClient().createJob(1, 1);
MJob job = getClient().createJob(linkIdToNameMap.get(1L), linkIdToNameMap.get(1L));
assertNull(job.getName());
assertEquals(getClient().saveJob(job), Status.ERROR);
}

View File

@ -81,8 +81,8 @@ public Map<Integer, Integer> getNumberOfSubmissions() {
}
@Override
public Integer[] getDisabledLinkIds() {
return new Integer[] {4, 5};
public String[] getDisabledLinkNames() {
return new String[] {linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
}
@Override
@ -91,8 +91,9 @@ public String[] getDisabledJobNames() {
}
@Override
public Integer[] getDeleteLinkIds() {
return new Integer[] {1, 2, 3, 4, 5, 6};
public String[] getDeleteLinkNames() {
return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), linkIdToNameMap.get(5L), linkIdToNameMap.get(6L)};
}
@Test
@ -102,7 +103,7 @@ public void testJobNameNotNull() {
assertNotNull(job.getName());
}
MJob job = getClient().createJob(1, 1);
MJob job = getClient().createJob(linkIdToNameMap.get(1L), linkIdToNameMap.get(1L));
assertNull(job.getName());
assertEquals(getClient().saveJob(job), Status.ERROR);
}

View File

@ -20,6 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.client.SqoopClient;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MLink;
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
import org.apache.sqoop.test.testcases.JettyTestCase;
import org.apache.sqoop.test.utils.CompressionUtils;
@ -56,6 +57,7 @@ public abstract class DerbyRepositoryUpgradeTest extends JettyTestCase {
private static final Logger LOG = Logger.getLogger(DerbyRepositoryUpgradeTest.class);
protected Map<Long, String> jobIdToNameMap;
protected Map<Long, String> linkIdToNameMap;
/**
* Custom Sqoop mini cluster that points derby repository to real on-disk structures.
@ -109,7 +111,7 @@ protected Map<String, String> getRepositoryConfiguration() {
/**
* List of link ids that should be disabled
*/
public abstract Integer[] getDisabledLinkIds();
public abstract String[] getDisabledLinkNames();
/**
* List of job ids that should be disabled
@ -119,7 +121,7 @@ protected Map<String, String> getRepositoryConfiguration() {
/**
* List of link ids that we should delete using the id
*/
public abstract Integer[] getDeleteLinkIds();
public abstract String[] getDeleteLinkNames();
public String getRepositoryPath() {
return HdfsUtils.joinPathFragments(getTemporaryJettyPath(), "repo");
@ -159,6 +161,11 @@ public void startSqoopMiniCluster(ITestContext context) throws Exception {
for(MJob job : getClient().getJobs()) {
jobIdToNameMap.put(job.getPersistenceId(), job.getName());
}
linkIdToNameMap = new HashMap<Long, String>();
for(MLink link : getClient().getLinks()) {
linkIdToNameMap.put(link.getPersistenceId(), link.getName());
}
}
@AfterMethod
@ -185,8 +192,8 @@ public void testPostUpgrade() throws Exception {
}
// Verify that disabled status is preserved
for(Integer id : getDisabledLinkIds()) {
assertFalse(getClient().getLink(id).getEnabled());
for(String linkName : getDisabledLinkNames()) {
assertFalse(getClient().getLink(linkName).getEnabled());
}
for(String name : getDisabledJobNames()) {
assertFalse(getClient().getJob(name).getEnabled());
@ -196,8 +203,8 @@ public void testPostUpgrade() throws Exception {
for(String name : jobIdToNameMap.values()) {
getClient().deleteJob(name);
}
for(Integer id : getDeleteLinkIds()) {
getClient().deleteLink(id);
for(String linkName : getDeleteLinkNames()) {
getClient().deleteLink(linkName);
}
// We should end up with empty repository

View File

@ -69,7 +69,7 @@ public void testInformalJobName() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// rdms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -81,7 +81,7 @@ public void testInformalJobName() throws Exception {
saveJob(job);
assertEquals(job, getClient().getJob(JOB_NAME_CONTAINS_WHITESPACE));
job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// rdms "FROM" config
fillRdbmsFromConfig(job, "id");

View File

@ -216,8 +216,8 @@ void validate() throws Exception {
@Override
void validate() throws Exception {
assertResponseCode(500);
assertServerException("org.apache.sqoop.server.common.ServerError", "SERVER_0005");
assertContains("Invalid link: i-dont-exists");
assertServerException("org.apache.sqoop.server.common.ServerError", "SERVER_0006");
assertContains("Invalid link name: i-dont-exists");
}}),
new TestDescription("Get links for non existing connector", "v1/link/all?cname=i-dont-exists", "GET", null, new Validator() {
@Override

View File

@ -55,7 +55,7 @@ public void testShowJobInOrder() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// rdms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -66,7 +66,7 @@ public void testShowJobInOrder() throws Exception {
saveJob(job);
// Job creation
job = getClient().createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
// rdms "To" config
fillRdbmsToConfig(job);
@ -77,7 +77,7 @@ public void testShowJobInOrder() throws Exception {
saveJob(job);
// Job creation
job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
// rdms "FROM" config
fillRdbmsFromConfig(job, "id");
@ -88,7 +88,7 @@ public void testShowJobInOrder() throws Exception {
saveJob(job);
// Job creation
job = getClient().createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
// hdfs "From" config

View File

@ -89,7 +89,7 @@ public void testWithDisabledObjects() throws Exception {
saveLink(hdfsLink);
// Job creation
MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
job.setName(jobName);
// rdms "FROM" config
@ -101,7 +101,7 @@ public void testWithDisabledObjects() throws Exception {
saveJob(job);
// Disable model entities as per parameterized run
getClient().enableLink(rdbmsLink.getPersistenceId(), enabledLink);
getClient().enableLink(rdbmsLink.getName(), enabledLink);
getClient().enableJob(jobName, enabledJob);
// Try to execute the job and verify that the it was not executed