5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 23:42:14 +08:00

SQOOP-861: Integration: Create basic integration infrastructure

(Jarcec Cecho via Cheolsoo Park)
This commit is contained in:
Cheolsoo Park 2013-02-10 12:49:38 -08:00
parent 86787f2d9d
commit addc87ee4b
7 changed files with 756 additions and 0 deletions

28
pom.xml
View File

@ -93,6 +93,7 @@ limitations under the License.
<maven.compile.target>1.6</maven.compile.target>
<commons-dbcp.version>1.4</commons-dbcp.version>
<commons-lang.version>2.5</commons-lang.version>
<commons-io.version>2.4</commons-io.version>
<derby.version>10.8.2.2</derby.version>
<hadoop.1.version>1.0.3</hadoop.1.version>
<hadoop.2.version>2.0.2-alpha</hadoop.2.version>
@ -101,6 +102,7 @@ limitations under the License.
<junit.version>4.9</junit.version>
<log4j.version>1.2.16</log4j.version>
<servlet.version>2.5</servlet.version>
<cargo.version>1.3.2</cargo.version>
</properties>
<dependencies>
@ -126,6 +128,13 @@ limitations under the License.
<dependencyManagement>
<dependencies>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -138,6 +147,13 @@ limitations under the License.
<version>${hadoop.1.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-test</artifactId>
<version>${hadoop.1.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</profile>
@ -181,6 +197,12 @@ limitations under the License.
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<version>${hadoop.2.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</profile>
@ -310,6 +332,11 @@ limitations under the License.
<artifactId>derby</artifactId>
<version>${derby.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.cargo</groupId>
<artifactId>cargo-core-container-tomcat</artifactId>
<version>${cargo.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
@ -325,6 +352,7 @@ limitations under the License.
<module>execution</module>
<module>submission</module>
<module>dist</module>
<module>test</module>
</modules>
<build>

213
test/pom.xml Normal file
View File

@ -0,0 +1,213 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache</groupId>
<artifactId>sqoop</artifactId>
<version>2.0.0-SNAPSHOT</version>
</parent>
<groupId>org.apache.sqoop</groupId>
<artifactId>test</artifactId>
<name>Sqoop Integration Tests</name>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.sqoop</groupId>
<artifactId>sqoop-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.sqoop</groupId>
<artifactId>sqoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.sqoop.execution</groupId>
<artifactId>sqoop-execution-mapreduce</artifactId>
<classifier>hadoop${hadoop.profile}</classifier>
</dependency>
<dependency>
<groupId>org.apache.sqoop.submission</groupId>
<artifactId>sqoop-submission-mapreduce</artifactId>
<classifier>hadoop${hadoop.profile}</classifier>
</dependency>
<dependency>
<groupId>org.apache.sqoop.repository</groupId>
<artifactId>sqoop-repository-derby</artifactId>
</dependency>
<dependency>
<groupId>org.apache.sqoop.connector</groupId>
<artifactId>sqoop-connector-generic-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.cargo</groupId>
<artifactId>cargo-core-container-tomcat</artifactId>
</dependency>
</dependencies>
<!-- Add classifier name to the JAR name -->
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<classifier>hadoop${hadoop.profile}</classifier>
</configuration>
</plugin>
<!--
Exclude all integration tests on normal execution
Run "mvn integration-test" to execute integration tests
-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/integration/**</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/integration/**</include>
</includes>
<systemPropertyVariables>
<sqoop.integration.tmpdir>${project.build.directory}</sqoop.integration.tmpdir>
</systemPropertyVariables>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<!-- Profiles for various supported Hadoop distributions -->
<profiles>
<!-- Hadoop 1.x -->
<profile>
<id>hadoop100</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>100</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-test</artifactId>
</dependency>
</dependencies>
</profile>
<!-- Hadoop 2.x (active by default) -->
<profile>
<id>hadoop200</id>
<activation>
<activeByDefault>true</activeByDefault>
<property>
<name>hadoop.profile</name>
<value>200</value>
</property>
</activation>
<properties>
<hadoop.profile>200</hadoop.profile>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.minicluster;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.core.SqoopServer;
/**
* In process Sqoop server mini cluster.
*
* This class will create and initialize Sqoop mini cluster that will be accessible
* using usual manager calls and running inside one single thread. Created server
* won't be accessible over HTTP.
*/
public class InProcessSqoopMiniCluster extends SqoopMiniCluster {
/** {@inheritDoc} */
public InProcessSqoopMiniCluster(String temporaryPath) throws Exception {
super(temporaryPath);
}
/** {@inheritDoc} */
public InProcessSqoopMiniCluster(String temporaryPath, Configuration configuration) throws Exception {
super(temporaryPath, configuration);
}
/** {@inheritDoc} */
@Override
public void start() throws Exception {
prepareTemporaryPath();
SqoopServer.initialize();
}
/** {@inheritDoc} */
@Override
public void stop() throws Exception {
SqoopServer.destroy();
}
}

View File

@ -0,0 +1,201 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.minicluster;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.core.ConfigurationConstants;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Basic tools to bootstrap Sqoop Mini cluster.
*/
public abstract class SqoopMiniCluster {
/**
* Hadoop configuration.
*
* Either mini cluster generated or real one if we're running on real cluster.
*/
private Configuration configuration;
/**
* Temporary path for storing Sqoop server data (configuration files)
*/
private String temporaryPath;
/**
* Create Sqoop Mini cluster with default configuration
*
* @param temporaryPath Temporary path
* @throws Exception
*/
public SqoopMiniCluster(String temporaryPath) throws Exception {
this(temporaryPath, new Configuration());
}
/**
* Create Sqoop Mini cluster
*
* @param temporaryPath Temporary path
* @param configuration Hadoop configuration
* @throws Exception
*/
public SqoopMiniCluster(String temporaryPath, Configuration configuration) throws Exception {
this.temporaryPath = temporaryPath;
this.configuration = configuration;
}
/**
* Return temporary path
*
* @return Temporary path
*/
public String getTemporaryPath() {
return temporaryPath;
}
public String getConfigurationPath() {
return temporaryPath + "/config/";
}
public String getLogPath() {
return temporaryPath + "/log/";
}
/**
* Start Sqoop Mini cluster
*
* @throws Exception
*/
abstract public void start() throws Exception;
/**
* Stop Sqoop mini cluster
*
* @throws Exception
*/
abstract public void stop() throws Exception;
/**
* Prepare temporary directory for starting Sqoop server.
*
* @throws IOException
*/
protected void prepareTemporaryPath() throws IOException {
File tmpDir = new File(getTemporaryPath());
File configDir = new File(getConfigurationPath());
File logDir = new File(getLogPath());
FileUtils.deleteDirectory(tmpDir);
FileUtils.forceMkdir(tmpDir);
FileUtils.forceMkdir(configDir);
FileUtils.forceMkdir(logDir);
// Create configuration files
System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR, getConfigurationPath());
// sqoop_bootstrap.properties
FileUtils.writeStringToFile(new File(getConfigurationPath() + "sqoop_bootstrap.properties"), "sqoop.config.provider=org.apache.sqoop.core.PropertiesConfigurationProvider");
// sqoop.properties
// TODO: This should be generated more dynamically so that user can specify Repository, Submission and Execution engines
File f = new File(getConfigurationPath() + "sqoop.properties");
List<String> sqoopProperties = new LinkedList<String>();
mapToProperties(sqoopProperties, getLoggerConfiguration());
mapToProperties(sqoopProperties, getRepositoryConfiguration());
mapToProperties(sqoopProperties, getSubmissionEngineConfiguration());
mapToProperties(sqoopProperties, getExecutionEngineConfiguration());
FileUtils.writeLines(f, sqoopProperties);
// Hadoop configuration
OutputStream stream = FileUtils.openOutputStream(new File(getConfigurationPath() + "hadoop-site.xml"));
configuration.writeXml(stream);
stream.close();
}
private void mapToProperties(List<String> output, Map<String, String> input) {
for(Map.Entry<String, String> entry : input.entrySet()) {
output.add(entry.getKey() + "=" + entry.getValue());
}
}
/**
* Return properties for logger configuration.
*
* Default implementation will configure server to log into console.
*
* @return
*/
protected Map<String, String> getLoggerConfiguration() {
Map<String, String> properties = new HashMap<String, String>();
properties.put("org.apache.sqoop.log4j.appender.file", "org.apache.log4j.ConsoleAppender");
properties.put("org.apache.sqoop.log4j.appender.file.layout", "org.apache.log4j.PatternLayout");
properties.put("org.apache.sqoop.log4j.appender.file.layout.ConversionPattern", "%d{ISO8601} %-5p %c{2} [%l] %m%n");
properties.put("org.apache.sqoop.log4j.debug", "true");
properties.put("org.apache.sqoop.log4j.rootCategory", "WARN, file");
properties.put("org.apache.sqoop.log4j.category.org.apache.sqoop", "DEBUG");
properties.put("org.apache.sqoop.log4j.category.org.apache.derby", "INFO");
return properties;
}
protected Map<String, String> getRepositoryConfiguration() {
Map<String, String> properties = new HashMap<String, String>();
properties.put("org.apache.sqoop.repository.provider", "org.apache.sqoop.repository.JdbcRepositoryProvider");
properties.put("org.apache.sqoop.repository.jdbc.handler", "org.apache.sqoop.repository.derby.DerbyRepositoryHandler");
properties.put("org.apache.sqoop.repository.jdbc.transaction.isolation", "READ_COMMITTED");
properties.put("org.apache.sqoop.repository.jdbc.maximum.connections", "10");
properties.put("org.apache.sqoop.repository.jdbc.url=jdbc:derby:memory:myDB;create", "true");
properties.put("org.apache.sqoop.repository.jdbc.create.schema", "true");
properties.put("org.apache.sqoop.repository.jdbc.driver", "org.apache.derby.jdbc.EmbeddedDriver");
properties.put("org.apache.sqoop.repository.jdbc.user", "sa");
properties.put("org.apache.sqoop.repository.jdbc.password", "");
return properties;
}
protected Map<String, String> getSubmissionEngineConfiguration() {
Map<String, String> properties = new HashMap<String, String>();
properties.put("org.apache.sqoop.submission.engine", "org.apache.sqoop.submission.mapreduce.MapreduceSubmissionEngine");
properties.put("org.apache.sqoop.submission.engine.mapreduce.configuration.directory", getConfigurationPath());
return properties;
}
protected Map<String, String> getExecutionEngineConfiguration() {
Map<String, String> properties = new HashMap<String, String>();
properties.put("org.apache.sqoop.execution.engine", "org.apache.sqoop.execution.mapreduce.MapreduceExecutionEngine");
return properties;
}
}

View File

@ -0,0 +1,148 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.minicluster;
import org.apache.hadoop.conf.Configuration;
import org.codehaus.cargo.container.ContainerType;
import org.codehaus.cargo.container.InstalledLocalContainer;
import org.codehaus.cargo.container.configuration.ConfigurationType;
import org.codehaus.cargo.container.configuration.LocalConfiguration;
import org.codehaus.cargo.container.deployable.WAR;
import org.codehaus.cargo.container.installer.Installer;
import org.codehaus.cargo.container.installer.ZipURLInstaller;
import org.codehaus.cargo.generic.DefaultContainerFactory;
import org.codehaus.cargo.generic.configuration.DefaultConfigurationFactory;
import java.net.URL;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Embedded tomcat Sqoop server mini cluster.
*
* This mini cluster will start up embedded tomcat
*/
public class TomcatSqoopMiniCluster extends SqoopMiniCluster {
private InstalledLocalContainer container = null;
/** {@inheritDoc} */
public TomcatSqoopMiniCluster(String temporaryPath) throws Exception {
super(temporaryPath);
}
/** {@inheritDoc} */
public TomcatSqoopMiniCluster(String temporaryPath, Configuration configuration) throws Exception {
super(temporaryPath, configuration);
}
/** {@inheritDoc} */
@Override
public void start() throws Exception {
// Container has already been started
if(container != null) {
return;
}
prepareTemporaryPath();
// TODO(jarcec): We should parametrize those paths, version, etc...
// Source: http://cargo.codehaus.org/Functional+testing
Installer installer = new ZipURLInstaller(new URL("http://archive.apache.org/dist/tomcat/tomcat-6/v6.0.36/bin/apache-tomcat-6.0.36.zip"));
installer.install();
LocalConfiguration configuration = (LocalConfiguration) new DefaultConfigurationFactory().createConfiguration("tomcat6x", ContainerType.INSTALLED, ConfigurationType.STANDALONE);
container = (InstalledLocalContainer) new DefaultContainerFactory().createContainer("tomcat6x", ContainerType.INSTALLED, configuration);
// Set home to our installed tomcat instance
container.setHome(installer.getHome());
// Store tomcat logs into file as they are quite handy for debugging
container.setOutput(getTemporaryPath() + "/log/tomcat.log");
// Propagate system properties to the container
Map<String, String> map = new HashMap<String, String>((Map) System.getProperties());
container.setSystemProperties(map);
// Propagate Hadoop jars to the container classpath
// In real world, they would be installed manually by user
List<String> extraClassPath = new LinkedList<String>();
String []classpath = System.getProperty("java.class.path").split(":");
for(String jar : classpath) {
System.out.println("JAR: " + jar);
if(jar.contains("hadoop-") || // Hadoop jars
jar.contains("commons-") || // Apache Commons libraries
jar.contains("log4j-") || // Log4j
jar.contains("slf4j-") || // Slf4j
jar.contains("jackson-") || // Jackson
jar.contains("google") // Google libraries (guava, ...)
) {
extraClassPath.add(jar);
}
}
container.setExtraClasspath(extraClassPath.toArray(new String[extraClassPath.size()]));
// Finally deploy Sqoop server war file
configuration.addDeployable(new WAR("../server/target/sqoop.war"));
// Start Sqoop server
container.start();
}
/** {@inheritDoc} */
@Override
public void stop() throws Exception {
container.stop();
container = null;
}
/**
* Return properties for logger configuration.
*
* Tomcat implementation will log into log file instead of console.
*
* @return
*/
protected Map<String, String> getLoggerConfiguration() {
Map<String, String> properties = new HashMap<String, String>();
properties.put("org.apache.sqoop.log4j.appender.file", "org.apache.log4j.RollingFileAppender");
properties.put("org.apache.sqoop.log4j.appender.file.File", getLogPath() + "sqoop.log");
properties.put("org.apache.sqoop.log4j.appender.file.MaxFileSize", "25MB");
properties.put("org.apache.sqoop.log4j.appender.file.MaxBackupIndex", "5");
properties.put("org.apache.sqoop.log4j.appender.file.layout", "org.apache.log4j.PatternLayout");
properties.put("org.apache.sqoop.log4j.appender.file.layout.ConversionPattern", "%d{ISO8601} %-5p %c{2} [%l] %m%n");
properties.put("org.apache.sqoop.log4j.debug", "true");
properties.put("org.apache.sqoop.log4j.rootCategory", "WARN, file");
properties.put("org.apache.sqoop.log4j.category.org.apache.sqoop", "DEBUG");
properties.put("org.apache.sqoop.log4j.category.org.apache.derby", "INFO");
return properties;
}
/**
* Return server URL.
*/
public String getServerUrl() {
// We're not doing any changes, so return default URL
return "http://localhost:8080/sqoop/";
}
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.integration;
import org.apache.sqoop.test.minicluster.TomcatSqoopMiniCluster;
import org.junit.After;
import org.junit.Before;
/**
* Basic test case that will bootstrap Sqoop server running in external Tomcat
* process.
*/
abstract public class TomcatTestCase {
/**
* Temporary path that will be used for this test.
*
* By default we will take look for sqoop.integration.tmpdir property that is
* filled up by maven. If the test is not started from maven (IDE) we will
* pick up configured java.io.tmpdir value. The last results is /tmp/ directory
* in case that no property is set.
*/
private final String TMP_PATH =
System.getProperty("sqoop.integration.tmpdir", System.getProperty("java.io.tmpdir", "/tmp"))
+ "/sqoop-cargo-tests/" + getClass().getName() + "/";
/**
* Tomcat based Sqoop mini cluster
*/
private TomcatSqoopMiniCluster cluster;
@Before
public void setUp() throws Exception {
cluster = new TomcatSqoopMiniCluster(TMP_PATH);
cluster.start();
}
@After
public void cleanUp() throws Exception {
cluster.stop();
}
/**
* Return testing server URL
*
* @return
*/
public String getServerUrl() {
return cluster.getServerUrl();
}
}

View File

@ -0,0 +1,45 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.integration.server;
import org.apache.sqoop.client.request.VersionRequest;
import org.apache.sqoop.common.VersionInfo;
import org.apache.sqoop.integration.TomcatTestCase;
import org.apache.sqoop.json.VersionBean;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
/**
* Basic test to check that server is working and returning correct version info.
*/
public class VersionTest extends TomcatTestCase {
@Test
public void testVersion() {
VersionRequest versionRequest = new VersionRequest();
VersionBean versionBean = versionRequest.doGet(getServerUrl());
assertEquals(versionBean.getVersion(), VersionInfo.getVersion());
assertEquals(versionBean.getDate(), VersionInfo.getDate());
assertEquals(versionBean.getRevision(), VersionInfo.getRevision());
assertEquals(versionBean.getUser(), VersionInfo.getUser());
assertEquals(versionBean.getRevision(), VersionInfo.getRevision());
}
}