5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-10 22:13:07 +08:00

SQOOP-2877: Sqoop2: Enrich Kite Connector resource file

(Jarek Jarcec Cecho via Abraham Fine)
This commit is contained in:
Abraham Fine 2016-03-21 10:54:12 -07:00
parent 7ad7c27051
commit 2776f44eb3

View File

@ -13,42 +13,35 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# Kite Connector Resources connector.name = Kite connector
############################ linkConfig.label = Global configuration
# Link Config linkConfig.help = Global configuration options that will be used for both from and to sides.
#
linkConfig.label = Link Configuration
linkConfig.help = You must supply the information requested in order to create a \
connection object.
linkConfig.authority.label = HDFS host and port linkConfig.authority.label = HDFS host and port
linkConfig.authority.example = namenode.sqoop.org:8020
linkConfig.authority.help = Optional to override HDFS file system location. linkConfig.authority.help = Optional to override HDFS file system location.
linkConfig.confDir.label = Hadoop conf directory linkConfig.confDir.label = Hadoop conf directory
linkConfig.confDir.example = /etc/hadoop/conf/
linkConfig.confDir.help = Directory with Hadoop configuration files. This directory will be added to the classpath. linkConfig.confDir.help = Directory with Hadoop configuration files. This directory will be added to the classpath.
# To Job Config
# toJobConfig.label = Target configuration
toJobConfig.label = To Kite Dataset Configuration toJobConfig.help = Configuration options relevant to target dataset.
toJobConfig.help = You must supply the information requested in order to \
get information where you want to store your data.
toJobConfig.uri.label = Dataset URI toJobConfig.uri.label = Dataset URI
toJobConfig.uri.help = Location to store dataset (i.e. \ toJobConfig.uri.example = dataset:hdfs://namespace/table
"dataset:hdfs://<host>[:port]/<path>/<namespace>/<dataset>", \ toJobConfig.uri.help = Kite Dataset URI where should be data written to.
"dataset:hive://<namespace>/<dataset>")
toJobConfig.fileFormat.label = File format toJobConfig.fileFormat.label = File format
toJobConfig.fileFormat.help = Specify storage format to create a dataset and cannot be changed. toJobConfig.fileFormat.example = PARQUET
toJobConfig.fileFormat.help = Storage format that should be used when creating new dataset.
# From Job Config
# fromJobConfig.label = Source configuration
fromJobConfig.label = From Kite Dataset Configuration fromJobConfig.help = Configuration options relevant to source dataset.
fromJobConfig.help = You must supply the information requested in order to \
get information where you want to store your data.
fromJobConfig.uri.label = Dataset URI fromJobConfig.uri.label = Dataset URI
fromJobConfig.uri.help = Location to load dataset (i.e. \ fromJobConfig.uri.label = dataset:hdfs://namespace/table
"dataset:hdfs://<host>[:port]/<path>/<namespace>/<dataset>", \ fromJobConfig.uri.help = Kite Dataset URI from which data will be read.
"dataset:hive://<namespace>/<dataset>")