From b22904cbfecc52eb58bfd62d03d508bccebc947b Mon Sep 17 00:00:00 2001 From: Andrew Bayer Date: Fri, 22 Jul 2011 20:04:22 +0000 Subject: [PATCH] SQOOP-133. Removing shim layer mechanism. This change removes the ShimLoader and various Shim classes such as CDH3Shim etc. It introduces a couple of new classes - ConfigurationConstants and ConfigurationHelper - that provide a unique place for articulating interface related details such as configuration keys that can likely change from version to version of Hadoop. From: Arvind Prabhakar git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1149994 13f79535-47bb-0310-9956-ffa450edef68 --- COMPILING.txt | 20 -- bin/configure-sqoop | 9 - build.xml | 258 ++--------------- ivy.xml | 2 - ivy/apache-shim.xml | 49 ---- ...E-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt | 0 .../LICENSE-hadoop-0.22-SNAPSHOT-mrunit.txt | 244 ---------------- lib/apache/hadoop-0.22-SNAPSHOT-mrunit.jar | Bin 50491 -> 0 bytes .../hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar | Bin .../sqoop/config/ConfigurationConstants.java | 83 ++++++ .../sqoop/config/ConfigurationHelper.java | 171 +++++++++++ .../cloudera/sqoop/manager/MySQLUtils.java | 19 +- .../cloudera/sqoop/manager/OracleManager.java | 23 +- .../sqoop/mapreduce/AsyncSqlOutputFormat.java | 0 .../sqoop/mapreduce/AsyncSqlRecordWriter.java | 0 .../mapreduce/CombineShimRecordReader.java | 0 .../sqoop/mapreduce/DataDrivenImportJob.java | 17 +- .../mapreduce/DelegatingOutputFormat.java | 0 .../sqoop/mapreduce/ExportInputFormat.java | 0 .../sqoop/mapreduce/ExportJobBase.java | 17 +- .../sqoop/mapreduce/ExportOutputFormat.java | 0 .../sqoop/mapreduce/HBaseImportJob.java | 6 +- .../sqoop/mapreduce/ImportJobBase.java | 8 +- .../sqoop/mapreduce/JdbcUpdateExportJob.java | 9 +- .../com/cloudera/sqoop/mapreduce/JobBase.java | 9 +- .../cloudera/sqoop/mapreduce/MergeJob.java | 12 +- .../sqoop/mapreduce/MySQLDumpImportJob.java | 17 +- .../sqoop/mapreduce/MySQLDumpInputFormat.java | 0 .../mapreduce/OracleExportOutputFormat.java | 0 .../mapreduce/RawKeyTextOutputFormat.java | 0 .../sqoop/mapreduce/UpdateOutputFormat.java | 0 .../mapreduce/db/BigDecimalSplitter.java | 6 +- .../sqoop/mapreduce/db/DBInputFormat.java | 48 ++-- .../sqoop/mapreduce/db/DBOutputFormat.java | 54 ++-- .../mapreduce/db/DataDrivenDBInputFormat.java | 8 +- .../sqoop/mapreduce/db/DateSplitter.java | 5 +- .../sqoop/mapreduce/db/FloatSplitter.java | 4 +- .../sqoop/mapreduce/db/IntegerSplitter.java | 4 +- .../sqoop/mapreduce/db/TextSplitter.java | 7 +- .../com/cloudera/sqoop/shims/HadoopShim.java | 147 ---------- .../com/cloudera/sqoop/shims/ShimLoader.java | 266 ------------------ .../cloudera/sqoop/tool/BaseSqoopTool.java | 15 +- .../com/cloudera/sqoop/tool/SqoopTool.java | 6 +- src/java/com/cloudera/sqoop/util/Jars.java | 12 - src/java/com/cloudera/sqoop/util/TaskId.java | 5 +- src/scripts/hudson/run-code-quality.sh | 2 +- .../sqoop/shims/Apache22HadoopShim.java | 134 --------- .../sqoop/shims/CommonHadoopShim.java | 75 ----- .../sqoop/lib/TestLargeObjectLoader.java | 9 +- .../cloudera/sqoop/orm/TestParseMethods.java | 8 +- .../sqoop/testutil/BaseSqoopTestCase.java | 2 - .../sqoop/testutil/MockObjectFactory.java} | 78 +---- 52 files changed, 439 insertions(+), 1429 deletions(-) delete mode 100644 ivy/apache-shim.xml rename lib/{cloudera => }/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt (100%) delete mode 100644 lib/apache/LICENSE-hadoop-0.22-SNAPSHOT-mrunit.txt delete mode 100644 lib/apache/hadoop-0.22-SNAPSHOT-mrunit.jar rename lib/{cloudera => }/hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar (100%) create mode 100644 src/java/com/cloudera/sqoop/config/ConfigurationConstants.java create mode 100644 src/java/com/cloudera/sqoop/config/ConfigurationHelper.java rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/ExportInputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java (100%) rename src/{shims/common => java}/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java (100%) delete mode 100644 src/java/com/cloudera/sqoop/shims/HadoopShim.java delete mode 100644 src/java/com/cloudera/sqoop/shims/ShimLoader.java delete mode 100644 src/shims/apache/com/cloudera/sqoop/shims/Apache22HadoopShim.java delete mode 100644 src/shims/common/com/cloudera/sqoop/shims/CommonHadoopShim.java rename src/{shims/cloudera/com/cloudera/sqoop/shims/CDH3Shim.java => test/com/cloudera/sqoop/testutil/MockObjectFactory.java} (53%) diff --git a/COMPILING.txt b/COMPILING.txt index 5d4f4e33..6353d2d0 100644 --- a/COMPILING.txt +++ b/COMPILING.txt @@ -110,26 +110,6 @@ ant jar -Dhadoop.dist=cloudera To switch between builds, you will need to clear Ivy's dependency cache: +ant veryclean+ -=== Using a Local Hadoop Installation - -Sqoop can be compiled against a locally-installed version of Sqoop, -bypassing the maven repository. To do this you will need to set -three properties: - -- hadoop.dist should be set to "local" -- hadoop.shim should be set to "cloudera" or "apache", to tell Sqoop whether - to build the Cloudera or Apache-specific shim jar -- hadoop.home should be set to the path where Hadoop is installed. - -For example, the following will compile Sqoop against a locally-installed -version of CDH. - -++++ -ant jar jar-one-shim -Dhadoop.dist=local -Dhadoop.shim=cloudera \ - -Dhadoop.home=/usr/lib/hadoop -++++ - - == Code Quality Analysis We have three tools which can be used to analyze Sqoop's code quality. diff --git a/bin/configure-sqoop b/bin/configure-sqoop index bd0a5e86..72d176e1 100755 --- a/bin/configure-sqoop +++ b/bin/configure-sqoop @@ -62,17 +62,10 @@ fi # Where to find the main Sqoop jar SQOOP_JAR_DIR=$SQOOP_HOME -# Where to find the shim jars. -SQOOP_SHIM_DIR=$SQOOP_HOME/shims - # If there's a "build" subdir, override with this, so we use # the newly-compiled copy. if [ -d "$SQOOP_JAR_DIR/build" ]; then SQOOP_JAR_DIR="${SQOOP_JAR_DIR}/build" - - if [ -d "$SQOOP_JAR_DIR/shims" ]; then - SQOOP_SHIM_DIR="$SQOOP_JAR_DIR/shims" - fi fi function add_to_classpath() { @@ -115,9 +108,7 @@ add_to_classpath ${SQOOP_JAR_DIR} export SQOOP_CLASSPATH export SQOOP_CONF_DIR export SQOOP_JAR_DIR -export SQOOP_SHIM_DIR export HADOOP_CLASSPATH="${SQOOP_CLASSPATH}:${HADOOP_CLASSPATH}" export HADOOP_HOME export HBASE_HOME -export HADOOP_OPTS="-Dsqoop.shim.jar.dir=${SQOOP_SHIM_DIR} ${HADOOP_OPTS}" diff --git a/build.xml b/build.xml index f108dd58..95735002 100644 --- a/build.xml +++ b/build.xml @@ -42,7 +42,7 @@ - + @@ -50,7 +50,6 @@ - @@ -71,10 +70,6 @@ - - - - @@ -107,7 +102,7 @@ - + @@ -131,10 +126,6 @@ value="https://repository.cloudera.com/content/repositories/${mvn.repo}"/> - - @@ -162,8 +153,8 @@ - + + location="${build.ivy.lib.dir}/${name}/redist"/> - - + + @@ -267,60 +258,14 @@ - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - You must explicitly set ${hadoop.shim} to 'apache' or 'cloudera' - to use a local distribution. - - - You must set ${hadoop.home} to point to your local - Hadoop installation. - - - - - - - - - @@ -329,44 +274,9 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - - @@ -576,7 +478,7 @@ and copy it to sqoop-site.xml, overwriting any user-specified sqoop-site.xml in there. --> - @@ -617,15 +519,15 @@ + depends="compile-test,compile,test-prep"> - + - @@ -827,17 +727,6 @@ - - - - - - - - - @@ -858,14 +747,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -1140,7 +984,7 @@ - @@ -1170,65 +1014,28 @@ - + - - - - - - - - - - - - - - + + + - + - - - - - - - - - - - - - + conf="clouderatest" /> @@ -1263,9 +1070,6 @@ - - - diff --git a/ivy.xml b/ivy.xml index 72a4aaf9..9c0c52c6 100644 --- a/ivy.xml +++ b/ivy.xml @@ -139,8 +139,6 @@ rev="${commons-collections.version}" conf="releaseaudit->default"/> - diff --git a/ivy/apache-shim.xml b/ivy/apache-shim.xml deleted file mode 100644 index 8e804129..00000000 --- a/ivy/apache-shim.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - Sqoop shim for Apache Hadoop - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/cloudera/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt b/lib/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt similarity index 100% rename from lib/cloudera/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt rename to lib/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt diff --git a/lib/apache/LICENSE-hadoop-0.22-SNAPSHOT-mrunit.txt b/lib/apache/LICENSE-hadoop-0.22-SNAPSHOT-mrunit.txt deleted file mode 100644 index 59bcdbc9..00000000 --- a/lib/apache/LICENSE-hadoop-0.22-SNAPSHOT-mrunit.txt +++ /dev/null @@ -1,244 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - -APACHE HADOOP SUBCOMPONENTS: - -The Apache Hadoop project contains subcomponents with separate copyright -notices and license terms. Your use of the source code for the these -subcomponents is subject to the terms and conditions of the following -licenses. - -For the org.apache.hadoop.util.bloom.* classes: - -/** - * - * Copyright (c) 2005, European Commission project OneLab under contract - * 034819 (http://www.one-lab.org) - * All rights reserved. - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the distribution. - * - Neither the name of the University Catholique de Louvain - UCL - * nor the names of its contributors may be used to endorse or - * promote products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ diff --git a/lib/apache/hadoop-0.22-SNAPSHOT-mrunit.jar b/lib/apache/hadoop-0.22-SNAPSHOT-mrunit.jar deleted file mode 100644 index 00525e80686affcd235ca448ad480e1332365eac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 50491 zcmb5V18`-JIhT*j9*hAAd3G5g#w}c z3r+aR=B51?YW}yO{cZmZl@pc+NQkSbGRR3h$<0j5%Fr_`!pqRp%+1UJRI9-KJO zh@v>rNy*Mhsewd&BcFYS^XklrRz{UkR&mXx3A_x%aZQGsMPmz+c|!ALO^$wyrj3?S zc8NykdrEp{^Xl~M{BK=A{-w)*Y5cF-_rJY^vjyY-()RyE!2TD)$kE8e((E6ADE|qx zG%|H?aQp{6_Wvv1&e_f0%Jm<_NdHO9&dAZ(%+$@~pY){q|2Kf0gNe;Qs6p_Lqx`ch z$v?@unz^{Txmwx&!`y`bDDUNH=JMad|Ni<0=T;pgAwfX6{_godE?>^bQOw!O-OQQ6 z#Ma2grB>r39rYRg&x|=|@@$+?GoP5%Nu$sNLkV5bc^wp8kPK*agPeGc>qOjUAQ>tp z>UW*}w$*p_2FB2}c0(PZ&DFz)pKmhr7lfJ-kcLS6Ba7jzyPBcREO{}chjjkvl#ezP3d~+vldQ6NW{-HuXPG?%$Kn+Ii4WUc z7{HjAeYYd>o|)|ac+x-(O<=XG{?*`>XK?3U%7Z#yJ5UzR;pq(_*@Zdh>_HDZAmS+g z4R!icYs`R5tjXkqtY!wsoVZqf<+CJh4!1N(sje(qqOc$x_M%1!D+!4$ZhS5dt-s}iuW6c^yh*X(7eo zPF`bGYdB&?5_sOAzgCNn@}6V|zmt%>z-=s>;0D|5t;MTJo4YXM^emR>j-o|ckKs~P z`Lwlr-9q8UQpzaRszqw3-aEdcuzf2-4lRSbnp3-?W(QkH%0#xk#e+dKRwt1p>jRU; zP7>6`(MhbT=~Ja&>DHV<6morjN`I=6w@^EY{jjKb2sHM+4qrmYy0Ulf_CGkf7V~Rl z9Jzow8>sXub=gBeH^T2*x+T1D_q_IiGOVnpwdfvTz{Qkj;3FicrvQ4L#I^Aa(`EAWy^ zSgqy$h&G(J@*CFXHKuLBeg7x@-EpYj56Io%!%q%}(B4C&JNDQmPU z^xK_?5ChR=_RQctp=HeZ;{_X^6~|>9VscYRHE!Hy^NUee^?GMFt@HaBtWlC(nPJ<- z5ia$Vn9s_e-$+biMRrCqv`3lf>pH=R(eT_m0gro~N9=f=Z@(=bwMUmKlan9vLXh*b zhh6xR-Brx7pP3lyXl-Wj0S+nmRFgtY@%*~l1SS?*7BaRw$YOM#?-y@V* zpSJXKKN`S;0#J*oHX?3d)<^;L_S&w`2U*OPDZUaWO#wM5IUgFm(r2K{gBIbT;z) zw^{l*2hCfgB+V?92Qt2+$2_gAxM&##0`_hncoP}QOtJOoHKRP%c*AC~FmZON_9c?z zBgpPu3z=!g07V&LxifkQe=` z+LQ6YZH!#A(&m>(!DnLt$cain8%RoCLXb$7FM8qYY1oAdl4l7~F3yTUF2>Sp#_(E! z)47FMdB7UZ-?j+1b9}bReWsy6?3!ga=2B%d%X6W`M%%Im(P;{iN~ur+7|6IKH1M!7 z{cMI2Or&)+^+NR69v$>5>lfU_D`K~@hY8&D1l;sQ@cD`Bx?L_nQN#E@sR;HGK9dp7gp3>_ZiJ#~|L>3ntp^{oiinf=iOl7b8Jx^V4( z?e?t~aAzt1{-6l(Sh+3kg`_(D=0D==FPqe`Hx&PSqUEQFVAD`M7#lAZ+{O)94NAc( zsG}{$05i{++W@4Md&c7ph`N;Ee!FH>fFX<%;h_rJaJk(`DnhNYoznOl4O(lyXfXcY1d;ai@~H_yLw zn0ShTd<{Xs#XZprjKHFuMRQ`GNrJCCA)gs6EN5~w%oM70WF*D0_E?V9- z8fJtFWKaiX+J95Agk=S2Fc=T#Q1au2{)DG1vN)F9#FfJ94cl8m!%#4p{8k74Q%`!| z>^Io{!5TvX|Fp%76G68FwduYQ_Lto1iH`!zQk@pFw}HYTnlR&VEmhHR*#NV^4bHh^ zYUFf*8f7f{`uVt(Jbc$j9-kQR^+ptKM{GeewuCTmI10Dd-EK!Z#ft zts!j)c$%lV#&*BrfKTJ4P0bgIO~<(mK`vOx!G^}Lm5(oI{x)^ec^~=x=G@62$p1yI z{%wqV^LU8~p+GhU6w=otmaxwe2K`vGQ<$=16{w0Azpp}Mc%ajmAdo5zB zBQ{8kWGfnlpQDHix8VlE7TU;$4Avxp9~*Iz)(Th-qjLphwODPj6#|kmnlftPe|H8b zE41Iu#YjY(5kCLK%YEJJ@woHb|9xBZ`WmqR4myBjFP<=9%nISC6m}=Z<*N~PC%zDN zH+>I=t`PR1`rU|+o@Kt}K#LD^3{j1b@r*IzEq=&T>69@1A(=@Z=13WfQSu%}A3b$l zRX5HM6=V3Vaz+sL=*;L-B(1*fz^#Y4sy_uNhwc#iP!09IlWY+F&=$2XhBRIjMN5sp4mCQF;V@~C}TlP8-A3u|7NFq4%TlQ>OQJ~I;$)Q~uhc?0-e6f{@q zvIS0`x$4QqA0V%=$>)&A%)&j(pxqU;aD6OqU?x93HJ5YE7K_6@yf<%i^xI_zpK}?_ zA)m$hJ@Ig;$9~rONlVY3Nk!XTz8UV<3y0{7R&fE5e&lma8po`!^B~QV{bNH~zyu4! z`C5khf|s60Yi9JXq)H%!T}%`ysGqxY#M8orzqTFq`3rG=%=5#EZG^+%lXKPBXpnEa zDNXCz=*Ly!2HKrRr49YA@JrNpydEODrL7#BT6PsbhA4z#U{X;~_Z3lfd@H&Bqe|al zHIr@fu*hSDxn^AvD)7jAs$~hLjlHCtXfq;5p;vK_$Hb=c2`(11zc7qQJp-v>R-1=k zuLa+}sH~Za*a1z68yZJKQQ7HHPaxMg0ifxja8lEZ)&V`Oo6$(Gt}o}@nO#SVaOWHx z#2$uwJ9kn9Ltq1ChEF^!J9il^n~@s-65rrLeg0@wD@Nc1?M}>*NpK^qFIQ4}%(x}N z!EJ^{_Dl+isp7rIvEhXl!QZQeknp(7I%_?__7AV{`b>6ZcWZ!M zfONvq*CofuVuDedB4EUI(^&=BBV8AB5_FeL9O+fsp&a(-!x|OF-W&Hs(_pGAXoovj zGloIS`b8F{KyGVwuH+jwuLX3Q3gcAug)RZlTCD|YqRDtF4#r{cww+{& zFsClTf;-(i(aUI@X0l#jrB==CszLbnWHiTu_Z~X>m;|iT)JZylacaPNa?fimwUSX= zR@Ygsk^Qlvgp)~|)tzH1cAlwl_hX)qTIyXJ|3Wgot-Z(N0(eoze12rDFWal^=+w7! zz9DWY`9L~tnNNmaj)(p+SRng`a?N|W*CR5d*l6)QPW&fm>D@QcC8HjVw06@LS?^)X zo%)ZCjq5ba#YfvJy}Tv$cFUWrHObK&{HvM>!Mt^-2)oAfUQJ=GFBIh)AL66mOwj=i z89WS(yEFblGe24H)StakjSdpOm?MH)yn`A?eO5>ODfaONFud%XS*{o7m@ZJPF{rb} zs$9Pu^ow9N`crvlgov+!gEOd<8}XdokILrl#?2dPFAlS3HOOPj)lGpvwi6ygAn`^! z-Su#PSije;4eW0t`L17!?(k~v#~Rk+nyla(48}&P?PL&rkaH58UM61+65$i(e(?(Dzj48iB6T!?H(bYrZPy} zi1+q@T}v*Otghq-3Xe7FGKJ)5lu+9*sI5<-HhNgo?0E4Kg6GmcAmrosSNSBrcC($r zwDTJ!i2YYjE!rNqy@O%^K*|wS-w}0n@TRJ)9@8jJVX3uzA^b=Tg^T{pbhp1qC)TGaxEnZbEUX8@xi@AoapmEENdC&bJ zfBC(k+V;fCQPv&R%`ws&u|_+)oM9k9YyEW897=Z}!T8~9Y6z_p4|lC8a)*9RfS_jS zAL^VfBD0~Mv`+vM&wJ<2=0GZ6>EA>-=^vH%>O~h<7<>dN_DQU>Cn?((KRK?}Fs*Key~%(YVD#SfGo%k^V~t!XY|FIn!=P zdcp~fkPoD04R>B@^kdU}XHF&`?`cciLjOW7$-KcwfRw$|X#1`LItChgD6t@xQ(4Q+ zpUxMa){)EWp#gX}|ESm`9%*q0!h4&V37qs_k$*t{lW2g@_)^vXB^WP%8`J*>(NO+Z z)a2j3KN<7C$wq999Lx|CQs}-VyEb_DJqUwt1Z=SWAfY31P##g4Xg~!3po4fe1m;P_ zd^nfNI^*NZ6w?-t-Ubcp zWZ>1x7%14#PM27f@d^goBTE-ANctj!ou_BE9N`lyW)xu*ncj*(PjA>5&{Gy9}xsZ=zp^KfA55% z}GHOze6E99;j+)U-C)jORigC$^)K*AmYg_PC=6O2xiC|r z*^tTa%JrUuu=)>iRQjIx=z3Y&TGOM=f`fk!qNhI|+va7n+W^|bCq{DuXEU4odmp?X z*Q`&!1^4|S4M_!DjNv7`TpALyAagaNnZF@&FaBtxU^`MoB^V>^N&Fte3lt0D1bl1y zF)ZI=I1`Ds6szbs5?Jg@F|0&^H}Uul&|K-0NA48ioC zo1DL_)t^SQZa}P*%q-w2H9h7HjdawEmOQ}CwurBUq(sVO4UMxVbKK$$v!I$gXirUl zz--pkFh_{$BAAx=>X(rNyLQhjK#M)q8CnD~v!99dIPJMOc5tIcFR=HdlJbCZWSpVz z9I9Gy;}S39VIDDI9Wk$xB7VoBq=Q+s*lj+fk|jsV1qHO3LSmCwh1&j7+;Z{|viL>q z%AtdWrY_)qnt8{cI1a4J9Ozisf+Uo09~4J^x>IOe;dRy^GX6P({0i(jc)#g&o0<95K^C) zb;aI+>@SA`Lw=Px7JXbju+IovlRCy&o>be69apd`VT9l^SZKt;;dJqoS0!+%yQrq4 zNK%i-1{F+A@N?IHRiWRZA8HDc|{w3az(JtYF@l+a-;i5k0)43bl#X;McSvvVRY4DW`Hj7i(Fi#pSh2jJC>5ng&pa z8dyI)K(5G!;gy+SJ>rLifAOp>)kC>-dZw1-!DHyNU1)dbN(8etCAhxx_J^$sf^TE8 zH4Vws=B|MV4oTKp{>;5E&OQzUXZ@Chp&tu~XW9gBT9k6ZWGFOpg?Zu6T}GT$(G6qWIIyQGq_#X0>4`ZlaD=8WS;KMd0B=9S?dTh6%B*Z_LrLqtA#X{x z)ft40Ls5Smq_M8xjm4!q@B990xsGQ4GXUHMnlsgrRikT^KBb{6M5i3scrbAG$9fkN zUc|G(9|rsl8sSs+_*2B_D{A%)Ugv?={SXoD0dtO3!fn+WiA93g{Rr##ksV19$FkrS zeg`UOtzbyEUpZhV=o@h+Jljn7;+hr7cLV}XzoH`QyOiSV zndjHNUf?nNE{585J*7vM?fHToqTk{o?qGg^RNA<*rCdi$RjN{v8}79!GND2}$4^u) zw7~_S9GqWllNEGP=D~fOgy3O8{{nl^q!Y+`lsp1DnzCKrMTYBq?=UveITj0Ar(<`H zpBVJy1IABAno^x>Xhrn;5}^>JE@zrb2|c-f)&}p!(g%zF+}{guMXKios{dbXQanBtZ-Hi?>imUdD~ zzd;;Yr^6Hpjjmgp80`HvaJkp8nzgNlgD$gDZgO%V9YA{va58J6M~k2ge(B9fG2f2H zW}q+qEGkiLPE)>Y$3`)zyvvv~<>N?@UpkQ#%67V|7&&a3GSO!={NM)ApO1Y`YU2KO z9BaMjl_Jv;cN1?UN{$oz&7X*Nx057AU(31BT|t?&{7~X7mxNlSYCU(Fwcd7?plx?5 z)n?py(G0QVY{VWyH^r+JMhX)`OrS1!W>85)@8&5YER(3*>ey zSSJK}K?{yOwLDN1Z|2EPncyiy3l7q!d_(b%%2&dXdgJyd27>oU^%lfh%6NjWL2D-9 zu0Ll=u}wf;+1f)!D%PU?wubH>sVCbB6m?b7AwFD3@ekgIsi6Me1A}uUzIQu(M+OIU zhVH)if#y8oNBR08xFjCY#FJ^R_U&1-UjCkuX|+e8x#XBpqj=RA15j-KR;7?|$~5E5mMq#3Z--j;yb zT+veypV6&s+C+=|=zjk>B%G~<0MTlHPU5qR)EcUvEeFw%;jYizYTbjMtu-YLs+ z67A01^Qx(W9#Hi%1uSLLb<}amO$tzJPWm$Cu%#ku^W4myRs>!BEyW0#5Q>c!7z_`p zUpcdVo$?ZI&LD1CV)A?o&dvP&} zw=}6PeTg$W)klriCVC*My}Tf=@TGq0gETkXrCBb5#}q-!8cDl6sx(|2c}0o2K}}g~ z$Fy9l=&_Wg7d1nZDg(I$*yl)!0bI!Iy)*PkM4ze4b)8oehEIENNq(Y}eMtvqL|^sN|Kf3bc@jPKI>vF(dBbL!CM*eQEvp$Q~P0^z~Y_I|XV!#Jp-X&>>8mpoVv z{mOJ1`9+a?Rm(HyK)SX4fKYuXxNbVqeEj0#<>YS>c=(5*2){+0b-v^jT=~hEfc-8{ z9bPXIfK!Q#un;-hJD3UWd9GN0U0BQfhineDp%h3~^?fJqTNWMCJr%|a!C(t(Kd_uFQtx7oV&6itYO!T#Ho{R70f_ISnYzX8 z5gy8Jr)T30X?X-$dLsM+r(KE)^XbLM0sl!p149B~g8!0FqrXKREdK-Y`M|p|0BV(1%E)xbI-@?#|gwZ{U`-J z76O@W)UhR`8z(C}c9Oc6g#`+uvpzl~`0~-kOHMsdAyj$x;0gPx6DUO7pQ-xluQ5X0 zllyZ2Q0K!Q(6~>Ds)x8|_thKaL`(T;Ctx7#XF$h{v2x^XxdRnujEdT5$$}Lb8=JY< zXlP(-+Z)8filb2eK4U69*wbhCj=-NGpP5*24du0f7kydjI;6P39leuR?Wc=4Zv1c{ zMuN|-GPqLLkqD<>ksbL`xR>@UI!u4X`=%gl)eZ25{Kip(qkeD`6>8I!JHRZ8_Z|>%qm6 z+|QA{=_t&w(@J}YgM~tHaZXVJqE{`pCgih|LxK*R%+-ZQ4W$w7EEaMm>T*##@X$e< zW0-exvBkHj`?XZ|JnAaq6?i!jkL#&_C)u0<(L@rk=E zyh7NY$6Vm0mXfm-1e5m_#_MRPYTP@)5>q(Q%|+C#wMSfA`*}h5>cjVzfNQ54VjY5x!g|s)W4tDP{}~eE$IBW($sg zsDaHJyni)N%j*-NUqdl-Ukc}os6_Bk0p}~GzhP1Blf8am2T%xf7<%{c`ULx<>iB{u zZ71HIDOJWH5ffRr@qL(a9BCDE6W#ZZ@`D5XpTU6AItW$1av;-JdBKI}?I=y&`aP&V zpE{#yc4B+XP;gS^OmPjTUOfxunT%%!Ht&%y=g=)s(<~LoA)i!Yqi;J$<=UEK z9JV39ybXIz1rMG*G#za3PWst%Cj!ODMTu6jQgbmU%FNyv76%4@FYF# z+$9ORsj@QHv+2_YbG1ehnn-JEr)+Mt=BW|$#DA=t(>tjd^jHm3RmrG9w4D9-cgpKd z?0QA)eB1Q98K$_k5M-e${T=p)2lZMZ{13=h7W-ePslo6tbFJSn{jFWLFoBZb2bXkA_24yGv`~|hq-ty~ zEa=b!;%zXP2h>=mOz5!cX=_JI`f^(hCw0mv1zSV4(R!-uJQbHIt3FKmnp606A9yiY zhB@%pB%rv4D`5>Gm(2s0x`u8WJg}O%u(FovTmwD#D=FL)`XfzvL|8W$uLt}Yr{3Ek3|(T=@qCtWMEyJkDxcp6;KUHQr+>nXE(r6Xp~#Hx16 zSuX`WjaDd6EX>*Ygv~q^zHy8BP82S<)uy~3WKpFNChSQJ@8Ftnxk{-0S-OH*8Doh# zQd_#j=`x>KZQg|?mtHd($O17ekGSI^26_>FB*A@USnr%jztzM7*l+qH&+CcbZ6d$k z`F<9QFT|r45c*O7)E8%PJKi)eJXkz3HRIhU%^`6Q<&&#cj?I-eI3f$+<*dsa8S>+5 z{^L)VHweg~Ze68RI7~SOyA<=0urhf~ehqXmP|)2DEp7<-b1k(PI73 zB1B};hjb)IK-nU@9YCWFukgfCI3VT=4fpEx-;v%;m9>RU?hLkiRI=gJpNDy2!Av)% zbZ5@>X!tUVt5{Kz`$E|2M=x(CtdpSLlfD`{OOEMa|3k+swsbqrn)B{pbgKO60sksp zK`dQqVOC`yr9pVZBm;>3M zX&z#HQnpeAZnd1&lSof`CFfb}X6`BF1E-Oi_8|5OgjM`P{7*`|TgTx11P20AK?wrF z@lTXi(aO=x*2>=OZ;90ZGjsXxxa#O%TC1yu{^gM7l{_i4L}9hnqFClioLeG?3zJe7 z62{PCevyQv&Eb{Fw`4`hdPJw!U>l{j(p3l=Wu{aSB(0Ujrdmi;Py`i)8&MP#1p^fY z)vEmGV=tT2A+5zK=EUYH?PHGrOz;o)*`CYUgU4+Eh){6F{Q(`EX9JW^v?sQzV?QJ~ zRs!P(n(s8&sP8X#W`cp>(T(`<;}awB{Y28gG0k{y#h~%yWsj2+pT|c(@c8ZrFhrUM z(O+L7s*j|cc`1wvlhg6LZ;?HMH)9GXzb))PcVZ1D#to!b=O?h|yB?~I`N9plA8^6v zWkmbc?s>ib^af$_RSZTuj}aa(7e79R^n5DCVW$gHdjbaY`MJQs>_5f#{#*|7_0Z+( zr&Sp^9jST!$qriQvmE*nw9fy=`&D=VANaYJY60u1GrYLxO<(kD{p$J4(ZGNUtr{`R z#|Nx%?|seDp?F|JfWArM4)FG@78pSQ7vdVoT#MTxr|m~V0JYW*VeAxz?)KeVRGSa^ zFgyOwXlOA7Fre`XV_RK0#z3lOm%}yQ7IX)GQ$uU zGzuhWQeBf~*hcl+^=?tQLU1nWVZUAv($i)m+OHi9dV2?<=Y)gj!%lBAEl3js%btKp zl)O$*E4?bKPx~U5tlcOpZ!Qgdg$OR8Wzpj=6HaeAU7xtp4B%>7g|MX*BqR%d1&}dBTS)4bz}kK#N#FSF3oe86+l{!HK=y40PX;T=jmbc zbXx~8+UxDxs5h`|V~$WA5^XZ4#cCbKJV|92pgUEa4fgO{d2xnd^DYI$cmHQGr+G-N zwtPGdRnr}7c^Dz_SoSBhEO!yZl(^QUq&l@?GOCt3w#>HQFbaOIY08VVsWF$-4j&C! z9GOde=Ngkr8SSXWwzG~L|M-)FX-*6G zv_MNxfztSP{Oq(|;KMTie;B;`J&WQ5RtP?4|#C<(3swhWQ zW8>mR?3NjImuwZi&AFZ>H9{Wft9>%GiOD_-^%fSEcBAtZ=E!M>B^*QTnKzbh|8SM) z`NP5^aFlchNm)Re5#9f8<>Y}}5b1P}b#7&ns!r2N$A&GQ8)bX~af`YdMqlF|B~t4g zQTyx+A6Dm-VM2et+Zn!%?Ko+MeS?jS?c&KO=AV$VQ9u!2c#NNZD90NBy zYw6roDMb}88GhCdHvBpNo~I^^m#A}=3#nqGXw+>&T0CrN%1csjLe6M&dZD)2GNb1^ z3B2$n^ms2aJtA%#jd^sKr?bX7;DeI#FEy)koIL(A5% zelzc5Sh0RJ{9%fwF6^)8jGUa$-qJb3~Sct zjVr`(2I!v@MHCjy{H8lMEj&%?-t0T*V(3vbS*URfe=3LQM|&hMLCvl+rPB*EMl~Ab z(8VnkD16?niJ1gksnkPhq*aw@DzjD#$B$B8$58xH@RP4~ zi@>qF?dc_!hKTD$18Js?OCqUlY&69K1$Mug%`M)7M`0C5H5Pq3KBsB(eV1O=5WQ{L z6vS|wvnYYZ>g2&bgXo5c7Veyj*neS>1 zcG_fMwJuP(x?S^`*}eK%yv@7pk4YN=&aet!K-&R>_f>8t?XXrBH8xrb{!HFoL>q6#7`uP?_`MNdr2)#~8$yWljUAa!* zn!{F@&>oJ(_e?6{BxuVq)k7^iU)G;S@H`+#mCFN8@>}f;AKT8jN1YH&tv%AFN?RyB z^GpewKf)sIpd0DMvFQ~`UM^Zl8Y{~rV7IxgNcA8f?1wQ}6`mN_XIrmZR}qGW<9yeM zqQLD3!~`6-H{+AQ!QEEDgQ-d(yAgU7+qa>jLzL%urMk066v$7WzY`nH6dzVn4gKo) zfns)lD55L&#%8|EfqBPn>0&~`aH^8`ZC%pKrOf<1&qjs%Bz^ml<#QL0UVJY=v238e z;0#2*A!3bs+--xTE7JoYQg|nTcGKqNiXE(num{uZN!v05yUC<^slNClrfFIu8v`?* z*_-%G_jm#|{iXpHS1Q@j(qnUrlf+WothH3{VfCeWrkz*Rk8||{{}NrNH>p&=aV0zF zr&COC+{p;!MBQ<_HKFv{>KeSEEbr!IzL^B~iE1TsD^cyM5#CvSn=#o9CA|i5&?`1Z zb;b(w)+172j9*_dRTUlEDd((|f16&~Hl~h!HHx?$1P-)G zEx8QSV7c6etyU^jX@I?fou1&Bd7~e;sB5RuqRrGM#%|lZ{qz~y-7VW*I~GQ}HpaY1 zSUcbJfqU5NuxJS9viPXG62Rp_S)iA2n=V+!E4?7fH%Y6`KNI{OTJjs38M=^C7Gt_o zj9yczH}0UcI!)!K*_qPqhF&;~?ww)Nl6wt`#v#qHI(hRWP4r}nYC8S1u+F8mbY_IW zNaIb(j2Idq+Cm(ow|(JECp^LeC6pI7bXx-0rqr>l=sR*io;j_dtusfaqcP%TaF+4<8 zErP)Z&zV4rrdSIW6%PD)Q{4E^7eI6*=&>4_)`Y-lqrzF4u`vd^0?LqzsA8hk79jL* zsr(yuaFl7OZ4*uDibXm!yv5oCt1Ww;o2f;mUHi)1+)~XoT`RIwrHx(X{MP0#?%`iM z>@3vNADEh?j9>BW&b9QZA`f11531HTg5C6JZk0IP@@1ko`gr>km-eEK@#kZdDrU~! zRk9S@1GJ{kY8bONx~TdV*>4cpW1LFS?RGy?vF0UCL5J&kw&QsyZZPqb*h>2>WdMSv zpp_Ghyb+Bs_Iv|wiU%?qnKsD+#K|Ig%dp~m39Tt?xi-mtktUpkabymE%Ndf%eH0+`Uh1E74Aynf~O5?bqwd@ zJ8OF3_EmDvGv!TF#O4bhI5Fs~oiZy%Ss9_5#t7PCU|pkpwrJ%>-5K#Vqu3q7Y(R;| z4MS^g{$2U6I-}>#4Yv$GH)1d+9Q)y$mW3e(-}J@x8OituHRtZ-!?py){w#M*<`Z_( zS$Ccx9`L&0jFG;sPok&|o&I2OxW=IROurW$x?5hj^zYeztaKiazpnUpGZq8lXimKM z=(GXaqSdlN$sR`fyrK<=<@WJvTy}QLJXq*unZ;Hn)tw>@Z=qfNHujrKaF11B;&dMP8iJn2GOp5VbVM*Mu6z< zVTQMzbt9Ve*|xnUL6hJSG4_PEBgaF8w!=F9(i_RQW0QX_&z6)5tsf}vv*^oMHffC( zXKvHii?+2(_>A%N4v2Z~^X#dHL`agZATFt9~?PGL*G|IH2h!QTl zt~mRW!qT|qH5;l6X_3G(_~~o5QcTft^|Fm2r7O)mI!Ku9DTaoRsWAp!_dq?v*JlwG z>9!_Vlg~E4O~ID^xkOX(HZmQn_rdxSjWfRa&fYV>lYrlMH&-AM5B_-Dj&-=Ao)RON zhY^~H1~JR`)!_9nrV>PWRyPgs8A;k@{#h!7d? zN5PN5nlj8~XYsjt+KZmHhJw`UizIcrsuN?LYV>h%SH>LvDl}^^aZILTjQn)iEOh|m z1iBt|i5<2&R~g21lhQK!H=ivUS-kmc0>Qju1M`%+;-e2}lC*VeGttFr&|4^pjFwO- zVxGb@HIx1yc9Sm@2xs42<*mV+>KUukQioeZxy1z;=2I;x5?3FcsVT2$1hVtY)j+X81Z5pOkdhw=OyF$a4sT+LO4|5o*V9CS*NG zs&29j2YqA)>nOSU;|DpqRf|KhbW`MG^P6v-t-x3I!{wz7i}4ckic(kg+Byo7MR8nH zJywYJqOnS7j!-$zip&N7W{mbO{Pr2}!^wr%ZqFgvi`jOrIgI%=xaH&FzUl=LmWoJ? zWl2*v_G#@U+2dkq{bkq{7tujjsqn;X$8IN{loM3Wa<`f9<}YIyu9U&q(b0^_lRWF% zw$~msPSWcbao9Y$)>%05?42;Y-Ky11sQL+I1vLm;>w(X-p%*ViRhz9i#CR;BJWUpp zuUah0+LdCF3l;tQ=(ZC7djR(*o^|#pq<-P`~I7yFxtfF`)y%YpY^sC;1drWWe~dOZ}^Spy8h9+ zR2}o|z3}rbh9A9WNXpTxgR}EBFCRRWuBgs_CURjrngBbWj@eK$RJZJ%-%h~>xD-JD zah&#_d-rB7t^UIXE-lTR{POA3Q36^?)oKVu=Q%YxzW1$V1t-5WqbpjK5Pqd%sN@D+ zDDA2(Y-cE@k&$oe`hdaTxNPN7`ao{`;V!(CW_y5Q=x8Jc<6KKU`LAA1kL+b^n0?)5 z*51i0bt)@Oxw33ubo)TI1U^uBoeiMU-Q7Y9{Arb)yaqKRmR?(Az1gq_ZLY&nK=lvU zFoX?jfEW6n-BG=u2kGIU*w1z+c`hlJb^BpukGb^o6WRM6&dPPcP4to`WMu-~wImIh z+Y3EcW5I3l`;MFmXfE5H_z{8Yk$ufLmV}l0fN~k956IK4eX?r}a996qH^O8=QjYay zal#yrk|40|N|yDL?!1&%syf;Ay&|8r+w@}N+610;hT3tz9h;zlw*N}HH?j^E!FDBQ z=Pu~#alnn+v1NGEQE~#@opFlUB@>&|0x@weg;q$3Qb_4=%L;NrB%bH|73j}Pi@14i zY;77z#lSWgkaNJw;JFxx$$wu2?;xIA>^K1UqTG>pW#dgGO$!Ihv7B>WnRH%BJ^|ph z{33AcCimV+igaWcs5KW^Bu2x9LR{QMBWQu%{nwe&qrxQnLI0T3GU{6a{}Dca%MxAa zd(vRr0z&K=E`PkTZmobJdqkj$*qbW|FcBj}vSQ&fMDpSmt5%e7s3ZJdi^P}V$NhJ7 zc_bh-O$85-)YW5&PMD$wNr3TQ)7o?##hWkYjM{2GJ0>_SXBey5WXfY^*T}hE>c*|a z9IrAQFx7OZl=VR-aB|jm%zS9({E-UMt^jNl`r9qXZ8rh9&aPhQ3C7kZZ#cKQX1NjE zdGDmq@!HX|eDK8k-IuSJ4^cY(xCd^$D*i9l-Z8k+a9bDc*!GM%wrwXJ+qP}nwr$&X z$F`l0?M_bCS!?fe_qw-son3YR%vm+-`!iqE7~gnAvp!{>!F3?JaFX9H!+Z@oo3uVp zohM}`CG>Y|vw9?@KBgsJvvJ~AR#(vHBJ#5Ca0u2F*aLRn&WPoUpIBYTIzaR}xGS+22Ah%9SqKUNi%-DWh2-!+M#on>^02KR%IW z-ArN|*F~aS*c!>aBhDnd%F84E{2;Aw%N9Ee``+!2)fz9GiGr^&5V{smvYCTf^!}|6 zyR!Xqer%n}`D|IMKQcGPz+>Ez<9%azK)-5>s_r6#q@C0r(p!60_nPkV!Bd@tg>Di? zal8MN{&&^s<0h7l?zieR>RWY6_kY_H{2!`7vFiJ`@D%BDIib6uX;seo-AKFvshb>~TTRY%~tp&bv zveHYo&dQ}h>uE_vE-qp%3d@P=-gW|moFW#GebAj zIf;AjSUpHb>l|1*h=0BTQmO!q!r7aI2wSna@awRfBS@Y2*UpB!3fS6mH=YRPwA7<( zk(oxeNZN`QV^ndd164|&>YoP3cOs^n?3Hkg)<+qnw5$&2Kk5Y5wOsMq=t~+S`yO|=Z&PDPlHts^LgE`AmbU*$F3Ysb7I*u z^NDe?MwfV(M6oC*2IGsyDULzBC72MH;*t3^ueJ5Rl59-G`jU`}2Z31-TXF_qgB*x* zD~W28*}|BdGzu&P=!egYJlVX6bz-ksIY>IC!KxL~o));q2gV7VD38>U8XH-3I*N2p zSgzJfZAA`6R$BeX;C0v>8Ykehn_^tvIJBHKQ^ExtjqtB@#Q4k78(SsnS2;wY;|&YK zjYZMW36gWz6*9{W;$pJdyk#eU4-sgg43nV*JY^Ld&KmJjqHQ#cBS^+wgqM{~e0!~p z$#6F&r4T5)?v4>V-DZ%EhhO03`)oFY-a^B+Ye{HbjLbdW&--WT zHcjljtG^oO_U#yXt8f-Bs%@C{)_6SIhMb!Jho3M$(p;h@Pw%EDy7OszZB} zS}pt<#ig2Vh&+i%m+I<2Q>krgxmXO&Y|P zZlV*9XOEHekNooVc2X@o*9%WIG+MTM%52HgQNOBjrsK4bxhh<%7WmXR6HwzMMD-f^+Z@=tMjE)&`I5>XpEGVBMx_XO3!}E}3MR1w ztP!IlaLPDjqc$zvfjq*e1FsXmUK4u&Loc zh5Grho0nM^4_Gt%xq-&dK2$ph6!g|d1P7dJu&Zl^?AE<<)_2~`8ztsdt9wk%F#Pr= zX?BfGPN0E*Wqtt=XhM_c-;H=sku<$#eSw`_a@(6=#1MLuy{^Y!W7-Y2we!CHC&iL(e|jUX0>}H@t5TV39v0Rd zVJaomywP0gn~m{S7Vp}SkYZ*SMo!jj~#lSS-*9@(wLfN@`jS&yM?qR5y0!r8K@^DH89gU^~GByh_fWr*y{asp3Wa3j)oAlL_UP9Zc;l~Q|iLW z928fyaWG#@UVAb@_-glZ(8)vJB!1zXZD&>#|c9LCB{(S$NS!u64LNX#tpUl#0>m@ZhjcV#J?}DT0J`ea~N7TXYkdLs{Pefwd zCtCH8WZG7~#6)Cv-|Tnlpze7pxJ6poB%ixXZ2M`sW(vRbL~L}Y`D1gp3GrShP2nM2 zCCv02Y({rGT=CMKd|Mq{r%qWntdY%KCwISdx9Rr4dffOyAXL2gW5AFc3L)?6Bfz`a z%?65byDbKe5^v`ld|L~#hVo#>`@mJWZAKEg2@WHGWm#XQLV~Uvgl}`w)C9jjbl6q( z@}WyFix_lwi#D~^&@68d%N5whp6e@Ko6Dx(CZ(g2TW?fZFPtfqAeCD(C6JP#Ib(Lq zPf3wD`J)|#WI8%OnDf-Db=C02M*vK++lgw(&S_VogzrOG9;LB4&oeb%Q0&;SacuL` z`zvICjE$>8f0I~?D(R-4T`!|xH(f!J7+crZq5oM#4)KzOL2I1JONSv{?i7&Cs6-xN z;}?Qe;%2G^ROI9XB{FLU@zpCW_>H{x~nqfRFixUwX) z@QAp$FVw&x7cn##(p_n!sns{#6ziNFFf;l;FnOek`=WN;_>NSZEcKa49NEX70D}nn zH@)dNQ;SF%qCDwr3HplEXh(`#V|8n%;#_`DJ@Sd=c51Q>s?WYQaQ=~{fblWcH07II zWHlCE;xE3WsJ*CP)GZ`vGfH7HGzkV>l>v$oW|n9NN=BpQill^Ft!m6$5yoOwy1gZ` zsl#r6lK35|n|38xhmrqgm}8kVwvoodx@&ck8w-1=$rp#VqvdsbGHn?#;*5Ctr*p7Z zmrPe6B~sg4Ga@U*dS)()%i@gL_}@V#m$|KTj&M)y{dy}qwbW@WV$iiUCD_Y&6J&yc z(u8*3zYHrz*8giDgkF^}H>g*ibw21Yn}r3Ib~5;CrgTe$?-Pa2I3Ak2Tgv+Z%a=OE z1biYzpbk3`oyhI zd*FQ6OiHi*CdyH;4~PHh&*ttBW2zFuEM;Qp8BRbeN*!k!11 z!}IohC*6(?s8;57icBcKG6Gnr)HOF(%!Xeg$Pm-nP1`NdK_6cydxblP-CUfryjPuT zS9aBrMERHytI1_6p8vrrWd<2mI7(QSj4HCPrj0Fi(@HHBzZ?qH_je$almERoryq+o z5#)1qsA`f?M25_W3npFiEVB)NUyxZ%W7E+|IMAV!qv@bJLWmTT2%oZI<6Y@3)q&#S zYlV&L`gpoULHUfS&N&HiY@MWNvB*u)8nw_L&NY)*^uWUYsuF-9eSS8;qaOAx{O1;1NNWgk)BqH$iQj86STFmSF80!;=F5 zLLhPd4G-wELi2EnCc|OYWKK}hWV67?_g3bidtNVC+90W3wOB+OGGw zn`xBFwRk{h)``g1-9Wdamj=mGW&pgQd)U#x*KX>xC2Lm%hCRSPp?%g}`eJ32@**i# z&D>EMocA53fy98bT60>aM$@ypb6TWF4M?8~hb|AJ2-z1E(?X~rI!-@FAx$_4Bu;}O z?<(iVY$w>6g3KqfY4Ya*Z;N>^Xrp*-RDh)M2m$Kq6ucnHztB8515O=`K?$_I1zI#1 zv)2lq!+#95>c1Z5&OlUt6lc;HA?3}j(MO-{Y0{Y#AT;Xf(XuXl8=?h~mm?W^v@kM2 zbz~G#k@EUqsz9c!p&v+4f#St@XBlpKNF9>>X$v$$wMVmB@dRZMjL!88}p%_+-1vr z{|!lX>!%JLIV$iQJ5J?ui^@>BwWFl^X&Ia$+O=mYb+^>U{!1&lzZRE>$M6Nq*LeJO z%QHjXzu-N`d5&rbKwS(oNU~OnncG||7C!^B*3?{F3Mt36-*=N2rp=Wcv$AEM$JM?8;o>;Gjx!MrE`v0g7)~`9^Ao-1>c<*RwHo9aj?^@c`;pM&c*m9YX-L>s8ePNK z8m4fZ=R!c;0ejo+V4c;?(3hK$K6kSe;R&>?4H(U5wL;Wng+vc-$;~jE=6~&LxKpm& z=K=eRdWF_g1=Z69ET!q5yb>}u5njLnUm}#q7!h9smwe$04eD6~9H>z8_jp=lyQ=YC z7%;282nFn=IcrvIuP4S#+Q?7x<(YQ}>I%vi!@+mOghCY-k@PGaN{|xUiTGHIN=9vR zM}BvCjmF%PwG}vh$p`5gR}KD|q(9(`{U<7zoTw_t*7PMJhdY1-XC?&7a7BJ!4MQ|4 zS;aNS@4(&Pz($TV7VXOC{Gh87?*4t#D_nsy2&6NPpMODbdmy{Zu5+~HCc+u5xz;E? zCzG+Nq<6sxs&@^Dfv#-bqLi^Fet}o$HOu3o0 zeb(T?vC!Vtkp-kp`Tj23t6&Y*V)-BkciRT4b3p5s4cOTwxF`K{kACzug!~cdd1yWT z&UXIDwFc;p^Xvr4HRk^)8#%dgH;@+N>Cw9c(Fmt=uj2D;Mer9dj$2spOp)f-cOmGcX-VEtNI4fld^*x=a5@xB+H@ev*n=a~{r4^zGrYFJEoiuqx@Tnn zoVurfp8SJQ_6 zjODSsnx9UM!kd!fF==mAVPVWf`V$I$?Sfbjvk|kvfzFmrIz}V^228#2&@{Qmu9g*L@`95b=8I$EowW4IQmROcY z$1Az|W$1j!UdF+*6GxT2(UeQnPcU&-7(>#N>!?t}gN!hFRuEdgtjOPaB$t1h6iCFT zP{$h$BL+5<>hqN41wZM*!PtZJhE45Uem~R7;%DQG(3kgJmeolgnUt3Xav zm-a_1IISuTAe~GVNmg`x2}2S;Jpza#Pc4uxNF--S6g}dp(y|!JUUJBa1NAS`7#K|x zh=_mZ0eO=6Fa7~MN1&~QwB=2la`1PQ_NHF0R|7OmqUL$!p=0{hi-#)$^VYAFQF211zuGQgK(4C&O(9OHd z)%6VT8!iJO4ettJ4lez)#JaKN_18ikhL+V(bZpWrkdy7ZWBW^MB439eib`A`+JiXo z00jNC#|jo|&)+W(OZ9@Qqj19*DVb(kpSO#qIeTubC0`dtKO|QxzS@~J!{MEjR?JgVtDozI zDzbMjRJ=1F^rkLoI(d;C%UAUVp{sIN#Yr9o*T!&eMBO9q0={*A(i-04xGYm!b=kqA z#Ozhu>onNhW`5H()RN&v^VgKg1cs$#`h=z{8-%jE#_AdEgN2y$ijZg8aY}8DlVzKi zgx9Px+U(8L7w7x?$XYq$r+8`~|0}Xw$esHX&hPT!0+&l!;29Fc<9k!nqmfm6YikR| z0WZ*wybH}JuSO!N5#4LJ?UyF=tOG&X2f{99S)d-48^IcT9gm8Kt?jm!&zhqzU&b;M z>cOH(vEOzcsdY1wKkw?8oBF?k#Dii{Yy`PDfff(Id2^X74)$RylngedeU<#oHzge3 z?ftg-e%YFui{SFGk)i)2;iS!{tq*$(2V>R}JctCo`8kHp9JeWU$I3i+Ck<-HHDy(g zaRC4D!(fr>*R|H2X70OYtL1EH5LW1~2CRc2qB+MxdZOO}j}M^HfG}4Oea23MTKm;& zEylqtWL>tH!qU^RC*hz(w9T|MTz;Y$H|~fwJP|KE5v^E4-q<&&vE%o!K{7JVbJ&_NwQ+ z^9|K)x@x!laT`;-M0fb%u-T|_(n=wF8x-lfv?~b61 zpWi;SocITgs3#}ehFN@97SiqZIVBvprGYV19=S*}i5ql_cJHv459^~QA;)+9%-CMc zso}>GaDamhW-$9?hN)ht(84&4A<+)e8cApB0^@*jRjR+%8hytTs|ulsk*l0@@oA%v zFm3ulmdAuP#E;MNcr8;0e;-zT?2VADQLeC*F!<_89=Wz31|Ct!@Ps>`yJk15d?}6Olw{t!Fe>af(FP+B!n_bkb2IYmLjP)h! z(%85rm6=gO0n{(&Vddzw0ZJilAc^gfUTHnQ3ggtUM!I3saE@mkNbcrupxnjJ9RUeV zNDkIv9S2tw*Mg`_Z#zq()|awZWVTj^dt)1$gJTz)o`EUV_V&QcK*p;F{g9Ts z8i&QbD+e6Gj7)p1Q1_RxC{HhTI5u~N{+ZfWxoFSswpiUOn$Z<|rIEk9i~HD{UWxX- z!`-%yj!^==gN6t<9T4ptv z-R46}Z-~Bom))}qb*lUpCICKk83PMD4m4;X5z*Y7@K0o10CX%o7-ykSHF6h`p|%Az zdW2+}We!gjyl7#4Ul<@V6kM@ijz3d`YTRQ`sL+BD4MCiJraz&jCx)cj`|rcf+_%Ba z@)0Z}V~e;H;kG-Nz=L^{NqM5XHC9SbjEM&;467}D20%Jw9}4OoCZOs3-YmM5h8rM5 zn>zc))c`|`llmfc*CrM|;fRuRurU8>@i#yDb}78DSy2NMK#AzD1^49jTOJn&4r%#J zb6{-guRber<^q_5J)QxBdC0$s!Ih2qqXt+nSA=#npUj$^$3tqYNb*INYk9PMt;XSU zj3~chaNMI*X{Z#mbj~-hW#CQ8TR94;r=wL-sg9I9TdmCIf8YOYxmotI?7=tH!#4;yc~XrEp>++tj!%>H%K99FSC75Ie@WUQ+50{r$$MEyh!UD z3kWN7dU%GtfCfvfUXCYZak_+$z6CPrFO>CMgj~xWJ{05QYk6T(kZm7-+E5mfnr}~q zw2~VXv0a(3Gm+Ye7#1WX*fq+@N0{(UCwi5ZEa646n|&xs5R@mOLCq9DmfSujoi7?Cq1{DXF7~$ZUPvkDQsIPMFH!LX zg(EsiG@KNV+Ad(Y8+()>o)`i#lscEL=-)yW+9woXQ^Ar=L{Pbg2{9O>*Jd*Q5Ekqt zA&l6&BP~o1s~rDfVhe1mNnfzDjAwSPHgCWa_I3%*m2qLR4(GzWG1Y{vm809Ah1)72 z9RzK$aFdwchkZ()TMkud!llK?LqhgL(lkmqz{q) zpMuZ>FO>mQcEtg`d)LsP{=Sqi7&)ppw#$YdE*F$92%nN;;%)j0jJ;79D=spJz!Z5_+KZt@HJZ^6wzjKJoxC5p1w4a5`*;fIt)^qDV3 zMK)8L>1L2(Am^S@xU!vcxHyZIQ()5-Q=eX~oR<9~s$K&#sHDkQwD<1J6Iawl!A#M? z$7{v|-WO`T;jmj3jSeY)ni4{ngk8;a6+ZUsqT%D^ZxgZ;qB>RvTi4|)(gD^8rynP) z>cPGZg@jJ@%yHvO%SLlkikiUKM`B;7x`wcN2<@iEK{ik z<=I>uDATtmDiEy3p{Z9XmIL-rG+$(ag$Aa~yag7{A;+)aFd8~Lb@rx*8CnC& zqHiWqg5`w_6Jtf!;C*{7H{%}x{-MqDdK4{{-Qm|mX_Z3M11}UD)K>#t{wq+=t|n+ z2Sk4QEu4=KEG^*+ifa!Nw4IUeZga-W3*F!p)zVfKwg8G3TM$jD#>1>RZQ#s?bKLQ{)p}0^{t7nyC%(tM@sWf}&g!q%qns;*6-| z1=y+M3##Wx&ZYtSF5!`2ThJS)>VmSdb$!G&vQaf8-78R&7S<{G#bPTBT<4mxx7NY> zU%Y45qP^3t+9!$UXA6s)@j|O2L0hDPYd@1{f*J(KO_6%SJv8f-$qxDTjwH{oM8}A; z#b9k%*D4ncY8A5i1GhvCm2}tErjjjNBh4;{-7l6&gd1A1#RNs8Bg%_(CkX73_H^CdRx!Y~r zu&djdW3fXhNPdwz#roro&~AZaPl?hsV*Ygq@HNFNJQCzK5LRCI=lba4#ZzaE6cjm~pp{$~QK8FPJ^GUw@wYd3djEI@AVne!2 z4&yqDGD@02=z=AuxvnZ+zZ8@v(w3{i6R1p~J4EO}s)22%8L1 z4@mn!tqQ6&2!mzmw>fjEy^ZLk8_~ulja5(9w8<{!@Ni&S=mGN+;*npdxhk*V?JK!; z)#xeh2fIx_3VRrtJ*Mp3*1LH*@XW7dCwE=Cg4V3%mdL4NK@wcM>%sRid! z>cZ)nm%Et@?7bIv-!SdXVewC(#dn`D+j8mcY8mWmNs?seqnP%5kdhf|5^u{f>W)B7 z!h4=PSfq^?#9yNDoN|jlfwXE({|W&1`W{TVpFQpnMx_8JP^0ypZME+-&spx%PSY)cHB5A@T$FpaNO@lt71BQCxE^XPoGUReK?$bNbK7hm;z!ZnKX z;|I(CUElft)pY)A$n{T~MI8Qx#UM~43gK`9IfH`)a;UZxdjK{U{E5c;I`)J{y#@!v ztOB{%0*uBfShsn;D+mGWE^9sn54*?Q!PhDNtcyvrMs-u?v?vST>E%Z2{c)!4aku;Z z!?CXp7epS!SA+qCOmiW2F8XacxnOeYjA;2Q4qNWYF_cZt;W5mnSA1q^n|nqko&CKr zl5V&GjZF9prR58NZAKVruo3r#vxi_mllrNeoON%5um@$>n0etBQ_012<~kRN(F;;FD~5aBB|Fs|3XM@ z*iL~W{JiQgPqGK~19pJ8hoB3h(YnswEsU8%)oo0IqrvlCOyfy96_4c&YIvZ=0zHzF z5Q=`bd##ga$09MS9C=E#fU!BZ${F=|EE7(VGc|o%N>MenG zCN6^$js8mF`4ZQ{lXID|24g-$F^~Okr0#Ko{J7nS!hP)wnZysQ7DAZ)3LfySkRL zN>?Dh$7-8$I`<5FvwNj`Z=?$x!n2FG8~pw6lnd>^UkCBq=^7#p^%=@gCEC#yj?xPw zYA8>;#2{1`s8#f$vUoeVn&di!74a8(aFVRyfFKbYo&HK>oxy}4-QCp9tVBD!TFDnQ zol9N{F0R8QA7H*m$F@!o=P5Vh+=8J?BFD5y*Am|ndV)QRi=qau8;!z< z8Xr$($`_6Fj{ZuiVhiu&tO<_uz9r}DCP%KVC4h9KQu0DSZqa-`#br^uhKieR)|T-w z=1w9oZXq#N<@({|4R%)yD)LoRwY`Lx(uO{6#R3cg^0WOuLr*>nGQ5wrhH59OMy9|J z-=9TsRaso~Jh?nu_jV-%$r!NqK^56-evDD;W~On=ceV(aRkTxpbtTL7`%uHaO@Uqb zLUiFmhf6>#ydv$SX9s^x&w3;3&0117+?|W{5nZqM>sJx{n7Yt{Iabh^odQOP!24!5O%4 z5yZfGs=&j%?wQno;1y#voeV*3y0WdMux_WPX!qCp5COV(at+> zm$p1D0{DzmZQ&Z<^K?5!v`bJ z0n)LI?J^FJAJ*)rdZ;Z}!P-#+tc7)vb3R2hT^lsq4Dnh&XBXi720`6cfymeW3Si`# zfQC8yKo+^egwS_+Jq+)x!&B!J0%0-(3*mj6+dq~}fRq-ur3{^lG~(?mtSyzekbYJ7 zEd>`})iJ?ZN9(tR9{=Xm|2=z$g0nu@@%h($SY&3M7R8Elr{pN zd8S<--KNJJK=Iax(;sGQ6)TrkQQsZyVYuygN!#}hdED{q?tsFAnF94?cI1`hf!cotuR@+u3ToMPD%$>gP}W$ zXUxvwp&IeW4TZv%MQvi`dV_?KhTraUDO#f3LQT1k`PH-IcZJn@<0U-Sz!f=7s_lrm zWb1TVajc`Oxli_*79%CurSp+0Ti7PiFvmFkSPEl18#Homxc@InCy^oQrH_vn=(7NU6DGK{QD)&j%n*!1Ys_zrkm>NCMc(z95KiN!Omy z#k-_U`?yGBLB~kzp&Z!-R7(CFa)-MoYL4uq=M($ncI&fypuN6Zm-}d*0VTX)zEGoo zO48F1^3v`LKoau$hnjUI_qeL#xu?B3Mb;%;$piFjt#t-e3fx4jHh^20zU^|cIwRg- z!q`flTye|28>yOYTWCBNc?VlFbjdTaPkq3`nSMEVVBl~n2FcZ5GjQAR@`>miyucjx zpWKY9?EM>M?FUyW!35BcA7&swe*7aB^dFg;|4PjKR}kjEf-V2ggzSF@OiF(zMT*Z+FMjV=X!09R^$MpTWe+DF85$XiLmVm~CLs=jVcLSf zJiVGW={&y%ZvQzqf-bH_J-TXRK-!pV5vVH9O#Y> zgO0bZ=EM*|0yBZGf|=F*tZB=4(aXE4pJsVhG$BI$k#op1H^+HQ=G{X#zbzReAtXZS zJuFvIi_e(U>LkV}#jH%cW2~=93Q>S7SG2s?0|_ruFxdl{vXFDe)P-4qNQ$*|lplt& zHs&=Zbm_bAX|yeDpVG3=UrcOU+L263dB!NUw33NGY}yZky*T2fmv*MBP!~FnR2mUZzE& z6P)_a?<|o{FJ{n3?-00{DI!WQph}XePVlv|O(@4TJ-V~(NSTa5S!q3QDJ$9HXaPuP zOenTCuNc^wKH3wEGdEYxa&gdH=Lw-x0Av%tsZ+G;b);ASp-iRS&l%pu1w@_r{pYvg z4%>fiXP;;EnQg#+{J8zjr1+m6l5gZz$kxWm*zI43q*%q$6-5Norvz%tq?4ZlLmstq zGr>XKCV-?^V?n`i0X=hGL)a;k`cH}x+r_lQZOzw!?=A#ynb>JR9*@tHn7fy$OJ_SW zGB-zhw);spH`Dd~^d#Tc=Picc{fm5m19S))ST3v{i+|>ek_c=os#l31(%l9n@{88+ z`HKM}_%bqkP(VOIgti(Ih+;o3nrDE3_^?1#1$vRq2u13YU>yQex28BI3|opIP63&Jl%^3&KYdEo#!S}Hty3qa=n_k$ zPyZrQjO{{+=buGJA?^cI3ZhQOly59QnPownae15=YnlVBu*<3~1v7MjUAL7x#FeXx z02-y*WV*}*C*maz<&<~A1k+-KN&&OLfPEX+BLpNGk;0l_Qj;#cmSpFWjdug|>q~CM zdU|Vu>zwxwmn3QGV?u2TjlrE60w3=nSA2Uz4R@O#6Qj5vU(NKzu zCZu+c+=)+$D7fMyaIt3$6~k7=-=q(`rLKccu=@-Wvxe6Cq8Uh3STRi^44ilP|;IPt%+{NCpF{nbRy!iTDLF!hOobv`#U48 z<2!fMTrFLcZRW^s)X6k$GE9*m;o`m9*wRrAHv0-~p66q_B+|IsHY~D0o?*k5LA6ir z>u;hM@1z1BvwJyyTBYgQ8ScHc_&WS*K@!9b5ew8W|LG(#m@8`wP@GMT~3 z)+JWC%w1fOVZJFBU=_Py+FN`FAWrWhJ4x+RNAs^55hvt=8(qUrF|Kh%ItHC`KO)Ss z&2}J~Ye(Jm3Uy1iNmcXni6HCCo<1Q)?dcN2{#EdiA6WvIqkn^AFD|a*?rY%gCRO}KVibrdx3fv3+$(Y z^*z`{xrpYA;UUd-(+*)g^o&qrmi|aDkK~<4_9l+{%o&=5Kk)t&A#Z_2LM|i-G*n#s zt6ri$R3^tGLe;WJ&RD}yxV4>=u?z`IDS(kCu)IyQ+Ygqd8YX^Os_2oxd?^l8GqLzn zEDa9ILAfOLljUc>4c&5eYI9Yp$mDON&Az25r=5|d2q(1J6p?fNv{B&Z%dC0hr4Uph z*;@@J3Y%yC2$464Ztj%B=aGLa_LJm+MVa5l-s3wxTi}0K>}8#uzN@{Et+lnelhc2M z3l%3!NDT6$hJ1-vHb46G!UW{0(Cf@fE5`-Ely}s%*|AAOU~d=+aw6f6M^p6&BkT%= zo0brSwYrTN(3*bUe|zm7z8>8|^`Rfb{MI84UBfxX?0DANQHFp`PyQB&S*b6B4eAX) z4x1k?39B77ql<`A&hJ&j;B|Q5Lo`v`%6Yi%3|ke|g9 zS;=xrzXydh{sN1ql$>!)M%E9{FQrjJ#hCyMu8*5mfeL0cJS4X%lU%mgWnW2kM(+@wwtEwtT~_3un!YB+OM25KZnsP3 ztQcX|!kjQdi|Y_gIPrQnaPVtxpQT z9Iyizyuu7YiozMd9xxcBnp8J1x?W=rv%ym7t3eqOZjE?EM2|TQnLrzipO)$Z{nrY9 z;`d$>@V)bj-))}A|8VCO^j#&5-Bt9hoQ=i5cV6Gg_FpO6#qpA|1N?}irZB}}z!BE zoo!tmAU%*{#yCF;kP^5w2E7!1i-{+zX6c{u)Ju@W&mw^+zm=_Hb3tvgCnQx|>lDkN z6FFE&FKuvH5-W^dAum*do5=6e*Y=x+0fH}!$v|fcsjmraz=?{gX)6zygB+ba`_C-L zpj`!RMiRfJHwX8jH$WqX6{50ePJtAw_Fw(iVKSt*jpu&{akhnzUMrdHNBG%r76eY| zckv|-By9W14#_71eLSfrY3%RMBAZ-I5W}*awAtJhP7rf(c&}q*0bbGztGf6ccDe_KG5$`AM=8_{lWEnpg(@wHN{^fyq1UTigH3aJ5S6yujsuE^ zs+f%v_9yu!@y{JO-5sc(Q5e(WX&PJ3^GJ7*k<~6FzP2p}gnRcp5g#N_S9aC!i@(hM z^gBSJ=6$3Ri}5>(KK3yF>jfyMCtO1KKKxWrKYp10PcOj#&Y1g;wGp>*v@E=y%u|vRlo_1FC~g~yt(BCdDbmkc8iWTmRdd$oGuEo6VXO)tTD(QCj~`MBYiuy*N{El+h;QQ>EhdPK>GB9I(k+x5`7cn$ zCprEC{^el@;T4YY*qP%JK9ReSggCN`f3X6EF7|%wVVp;yWZp~l}GxVNMpd&H3f<+u$0$m1b7JuO3>FO9l z{@kzdWiJ(-Y&j0(Yzd~cRamfY^;R%TKStMn?E(l!=y}GiQ6JbV(qlu_A>J(jP73UkvIlSRjb8tiG!M`CBdqP?z zX~g$tcIlu6177^oza_|LF0b)x!VZ@W$kiqC!wb{wg;niq`Fp*RH-!B%T1EA;iHX)y zHw#}pAwPylLO`0jQ9?jKdsobV(OQK+>9?UCo(%wk{lOz4wcs#_u(jctBkUrHgJYH8 zmZop2V|I>x>SOdIrB@Z;GQ&GdLS8$Nn1<>+qwe7aU)(6Qe50L_2WxNy=*D~+#BQRH z{{+6wI0hPFCjR<)^KY)HzS2WhoxneS1brhVdjG%QNf~D=tN&WBno~6YF`pRyYNXND zfL7w~qB2<0X_CARD4I2cBwbQKCYS#CMxO+yy?VvEp&ek`^KyZ4D}VwQj^OiyFX?vD zh2kLrvWeH}IIGdq=RTLo)b;&+W=8ji2?PGXk9`3EvH)m2EP)d(0dO&4uKHRY4{m$qvEu62s)Bl?=1G zG5AZ0c>EE1(E&_h_NY=f(#;cC&97^MIEK-iyAQC47Bu%N$7eNnJInlGA&Xyc2EkNMT{kxl;2V38#z^F$vEh+~LRZ z!gmlw!gpL=VmH2pVF*Y75!O0?N#+o3&+i4udv%aBLf-Fo08i1sC(-$&g1K-l64A;>7*EA2%w4?(VL^JwON&T$6b{{nPo)bZ7dn znag4UYf*dGt*U$9sZ;x$BVhuH_6J2?=>Pe{?CZ6dmILU+j0PlaQ~v*caQ>yRSF@I1 zmPF%=O%!3Yb!R=|*SeEj?yXI7&a|P5%U8&g72HCGZinxO2ZZC_R{82zWnSx7PLtGz z&Js=%)-CZw#8UJskxL4EWO3RX=Q2NTW4S8${_PQJ1N$>6R$s*yTc%Igx@ZNR9xO_R zc~p0{f1YC4rWm9pophn3JGo(iVYO8gz3%xNo80_k5|k#Xyxxw(Sb&O7iwSLgmhh|7#>)E+srt& zsWawRtYHJNHyQ^ySjYs?GnMM=N%E6))ec#cXrnK)S`Jsz19b!*9ZE>M+IX+K=a=L0 zhm|;Vjau8=&~ZfV9N4wiAGIrG7O8;i^;O8WFxH!KewU-_VjX% z-`$#~G^-z~Y^azEy@QXy2;oyr+DyrB0kpGThjn!buZredM+qn?eX)>hm1=Vw%!b#N zBlEDFyy-K}Sgd^`F{2J2owNdWTdKi88kox5-gC_iFjnn{pfL6Jv%_``Y%cRYaiM4| zhVyQOLtZEh0}t^%3ioe-yZnH{Y{EbhaGcy#%Ge9r2lU5zj`a=8^Hz_b94+nKO@B}D zQXk#d;@dRq#Z*)GD~H7PIp)5%aL~P?!OHJK_>Y)YvI6A<-Mm8yir*5Yq6hlgyiREe z=P}95SBzcatXqgrR_={s>5Zo>kbDr!=bQpt@I6q+>VmmccZd|{(h`-nCq#Uol}HKA z#QP=HYc?G8$gOVhNM~^LWIp?3Z&Y~(o=P~A@JmxE6K<~eN zzydGAyoB#|o~NNRHcoB?RR^hfa*QS5}1c0V#r92yS9c2Fw&DK3kRDa0#5WL{R#J9BhPh7eGUS zF&)T$B*RRBje#yCv|=yx0#oIt8xxVRS7h0bZ=lEgJ5}U0dGaX6FkC@~Ys~d=%gNfh z?)nJG^?J$i{-}{u5WS0$+<-es6h3BB6e0^&G8l>T5g(cD!8WKcfVvZ6AlkH>D#^$a?>p%A@6_+S zDK?y*=|eU43l`=kaoR$Yblonx$x&DMdwso z)gs~_5QJaW?S45?lr4ElkTuqsW{|DycNM-wk%fgiDM7ap+{wUOPg-C)#RMP4XXHC+jwG^sSYWlZ%Tp9N2 zg*fDr#3n$4l^6Y$RjkjkaLyE!gj*!#A3K_aisjKTMzS$dKE8QXo=c~?P)PrVdV>~7 z8(^p>6P{-tmK7cOg#ZbPgV5Z78i*+^B&^nl0ngMEBP1OGiZ-O?*op}q0r1rFe$bez zHKa;;?kd)}SYbNnpW2pRN7vGat|VQ8UDhjocm-~1A(GLUc2zm=MvTcTGN&n0C;5BD(#Acj+Z;iz0;8sh+cEvJs>4@`))-~!-D-&H}cDw$D z!G%r(Cc#Ed;%rHbE?EkczNlA-&$c&Sgl1v^nB89!Z*2Hcp;VyUx}XTAZsxOSik!=5 z7!=n{GLds(jX)^t0#oXtIP}BpES~l?W>e@Jt!|FS@c{1;`>awm>dY}T%}*ghFNL{K zmwu-=|x28wlMpscC|=5CFU6T zo{;>(U!D9R>@Dz;Jh#eUdq1%fTe}bi<8q616BB_%WyPeiuR|(mgB&`LYiXU+f0>hl zIKrWsl9vquc(jWtmTVbqcf;O{vW%g`YW%onIC$zEvHGCEJWVZ)Jz;oCo%s;6(Ewp( z_k2~iXX==8m5mv<(38g-C-1~Zt(6O78^4ZA=Zty;aPt+T!6#tis1@?meLhHDHqn__ z10w$!c{%R(z>UWZ#JF#L6=RK4qp4>AA;T_uvh0>pwY5Ea>jw!lKqZb&|_KO_=iBUQ9coWY<~+v%OetyLdcClah}sNcn$Jnj_Vgn z8}C`4=l+*O;kf%p_AZ_}6-erF{d2YR0f2>S5z)MPDS5zxuF4|MbuF*psz|AUv!snf zKkY%cus$VyS&h3HWC@h>HdSWnTYvMt(-pWBnv)mDi|wCo)ME)Yz;6#oa2$e39Fkkz z1L&Hpmb;MIv!R(yy2-zH#4;XUIK5KRoA#XK_BBI@#!c4@(nHdHP zPU~quSJJOZ%%120%*4J%hcS8HLFah|$p;;pSJg9OT#rCi3G<{+S;~Aod2Iu60)zfh zkPqHPs5r{yqkqWL01hy$V>%7c7Lf;Z`pEqse-7H)IsW0QPFI$-n-N5MgG=cUn_oL7 zL7kbGOMhE0I7x!;P_!RF76^IS?t(EuRz*Jm`L#mOm&9RX&}ZErsm3~1lw24&gq5d; z<^0pw^36a+#X1k?ZM3rJJQcN0e6CzG2sqvmyuWPy@?;xAB;pfZtJbj zrns(cu?G@|3=B~7dclPh46ODaxey=uopD3o&d6Nxm(be%rI| zXxxiGvj3v}NFzkU69seU`*rzzw@lPP-s$Ct4s`zfM!E$mUmaJ8@^B0lVd2>2;DwZ zbV?F!aPD1pRxD>subhmLsH##k$)PBjV@NM~OnSd6LZFJ5C|-t=cPZyesAmCQh;vN| z0b>OPrCIFx)Xu$B?wQ@6cV;|{G8xfwmMBYtS`$kwvHST_aSbg#v{&T>>XQqCX&4m@&s1J*nd$^pZ{=l zblyWD#!TKYoceMF^)ntMYBg~Lwi*eVmR_KHS%UT(yQAsqqgtNoX#mXYZ1L3u^5vot zU`0z$lp3;9RJTiz!^&tIb=x*!Qw;e9dV^^}Cxw!8RxD`}#aphe#ckwqQ;Tn5)#DU!dL)%;x(ivwE5{j^B8`%83T3AU z03-Y)Y%kUfH10B!nMcLq&g;z==;nkpSOqEz6KT8vm#$4b03h2r%ATt$iH^byE=SH* zqlkX=Gv?$B++m5kM(3qSgSdGnZd>6Te8!aT;AU7~D2>d6GL^`Hw6*#B@N`H<$m*7f z2*uGt32PT_o$iCRqVY7CEc|X$uLWXGg!-EYp=GA21*?e$nH5%AX7r5HT5*&Jj?zt2i z=nQ!rv--@re*%em>{Bo1T`2uD5oU|A4pHk}xc9*1q0AZF(!$17TNIbp$*nr$(^s!2 z(`|DiXbbWVB>Ld^<*WC{QvPv7Xk!ENkN-F%N>{%H={@ki+oU+Bj*%!JycPpkc?yam zF_no?sd!C-^EF1>R@)}#FIFzv&pz*Zj;1KcD};uw;@ui}qG`3u_qYlw3O5Q!$v3&T zv20;kPrhASkokUlJ<$OMsw9N2Vk8Ls&>84Mq_$lcCiLQ&TXY;#Zq}%7u1OM%@G%Mf zc9JipL9m#(Pk5<;i_YemKPM+OP?;ER`x0rbGu3xb+sH#p&@OZ{%;1e$T01GiTKv}x z4g0tHm0Se|T;s5P{83*&guycN$1qX?C}KTV=27uAdD81xP%vuo=OV19N^QwhRkJaAgXlaEa?oU0VH%vL$kBXPYcAe#qpJ$m$PcfWx ztDASZoMk&z?|ujaU~5fd&D-?Iuvt1R*UG*AxZgF%n6o|DjDB>Hg-Vm6>1ADCA>Tm8 zyFF%-YGz$t^opctIBb?vad%d9tVkH=-HObm$98C}1?kHi0c$Q8l#bf9EJ7TzIMHHH z$u$d#{r*K;aVfhAgv-xriSwyDavrJJ3o(2+W82&f2MsUbAk7|Uzu_})`%U5F>CE7) zW~B(v1jKSNBXO0xu$U(zX80>~05}Sm>k~p^7d{82BE6=cBea#VN|w=8?UIu?QJjvY zVN99@29l4`3!7u1G-m)kQIy24q-X}jbV(PPg#2g%D|m?2@Uj4Xsc~iEiO98X<5jLy>j36jZj}?Bl#7e z^?`S@VyKVOaF;bkxDTVt^f^ND%?mfO(o_eDE%+-7pk$B?huB9S!UGYM;sX5} z8&sB}8wYi+>1If}bN}U_5|Wt~F;O2i3}f!(X6+MH3gog>=@ewC14@cJCu@gy8Tm35 z?3(?rqR;1BCF3P%GsXOcIY1d{2+U>;%wxAr^Z_1t8Ii089myCC!kjT3f-4XQwe-5U zj(F`BTcgy=zB%v30QEuZXuf=FI9;vYdfIvoc(=pdYI~o~B#L`i(RajCGd?%GOuK%5 zQaL^7Q#J<95jP3T3Ay!}g>UDL>e5F=2K5?M%a$SM+y zp&O|8#qS166-NoW($)uaAQrnq{K4kphtje3nqTu= zMwEZC1H;eG=jMUe=2$L5%=zHsa zBm#Z)UMby;qVQ@#_{3KmMh={nW^nTNlkF607*~;o!M*%=K`(lInCNf=M~?gAe0vnS z45>;VWP55J8a7xHw&2FM!S|$&b+U=o-CyeL-9ZAX>KJy{(9PFRIBk-qp4BtGtY?v` zn7%NJ%XtwxC53P_8jH|ufyv~W1FyUP>BR7&vr|vwqdayX)io43Vg3%M>9wLKI3|~V zx6m-R=&c0kmaUKq5Z=s^`0%80aGj}6KMRjTBAer!9b>eWKt19f9rXxE8On8&7s(|E z#47ZDf1-4a0jbs}OF7H0ZpPt|Ig$A!1qS$`kEzZV z!Is!{m8|rRci!5gl(aWxa@3XrXHyS2RfMh~(yUsOEoO@DW`Z_q{mCRX8(f)YrjN>`i;hLppB`JP`sP51#yKGYt-1HzuEtiFc+JE&s_iCxoS;yq zZFVloy$n!jK*8R(o7-KU_wY7A;!Qn>>*(0%7{wsLu4W`e;iS;0^AAB(z$hfG_^(04 z8helJ$Ba}jJ=a}<&F6O_`jk!!)z0S2JAOX=2JUJ#=@G;CI)|UF5*ut=Dz1LZWMVZj zzgFUc(gABiNgI#7^StfI3S4BIU4M4%3oBMeNBReY)1=gL1J$8j^CD8)Wm_wkb~BB; zjAiR+zXqA+XukK4Zg7`6M?r*6b31Ahws818pX1J11SzcR`^PBU;Cxttv2<%Q0VfPp zpX-biHtB(fiHwV59MoPP*(@+Nfm2mClP-lZ0+e_kLd+s!^<{UCH2@WkyBPIB%DILu z5&0dg5VyT1Hj158k|CGU)KrEd{l>roby4NYAW9ejF$;!M5U8@+I8rJgh&U^`Ud$WA z@iy4(T3e!-ooqbddqHO<7nk)3d>n(epC!z$Rj^*Xu>6$1ENnj5f&b)Jawe2BNhsrI z3xXZ7rOUZ}!C{^JU2X{9C91IiQE;B^aD5CUd=cyZ6D{HSMCkma_6V=ZZt_tQBxx`W z8-qi#}f;Jd3noK+HqV4p**>6YX48Aq@?;?PQsdcT3{Ks>Zq!xjjO*bwd-ZTKf9Dj`eWb#)8WF zX-x%!5yvEC22*xZ?cFNs4gdFHD$FRYkH|svEvjlZ?yIh)jwMwC7RD9h=@-q`IG6Z64ZGYcVK464 z4;h`HRb?n$4R}-$%!ophiZ#2e^hE%lU>WA`bSRbLI~ft#;TTj{OnCAM%}ArhDg@Zy zvP!x16veBhsTmu}Qeg2_BD@fn%d8p{7XiWqvH5tvvnM~bEvUD5m3W3Q>)o(B1fI@? zIXN1UCOQLC$YH^>yTY}PVpWOF-OIOLcB>XUW1*KprkTsg0&CHV*JMOB34|Q(e#Y`` zw$}0-Ux&4-j8O@>nqBIc&-v!J;s!0_!xQECs;;tjxa&K{yptbX(s z`rMpns68ONlb1aA73zGN4hD_ko9(IImA~yVU%iapz!0WdC8p~hy8a8F?tuFqY0UX8 z)0Bjk$n8nQhv6P_ira|D_p)Mfh`id}>H;LV+2XE&#%X&Cr;tafI!7}s!w#+P;ELWM zgU8U>5<%z%k&FsyT-_U}==T^$RFD?yVqsr<8hz-z7=gTNpN>Uw1GdSs%uY@zXTNaY zlBUTs)sHpH+Ig_6Jv@KjA##siR1kEMk4h=|t*`B)9d8)TG%bufoc{upX}+c)9>E1T1N1Dq1ISjbn{8m#t>J_MAfNuxrLwE1Sfv0M>;=#Po1xg!vY{yMj z_}pwwyYpE2X(0`k=QIp9&|aaKwDHAl+jGzh3E32f)AWm>ElP2r+3rb0=U!E-R_=X7Y>B zp^;gU?P^$0Ne!xh))Ufko#TIg^wLp_DQuez=u+DI_0^g&=TuBmVutowT187g0jR8g*%WlX1n}f}1d$p0w0UK22r%YAc6N-`Syc zb@s>`ntVkJ_pLj0mlR%K@o*OanzhIkfY2icrDM4SP66gJ^f9%L^02to(`1Ymd>Mt@ zI?!uZ=F%vi$n z`OJ7)!N+n6>7!j9ii@&QVToIq&Wu_dORc}W4+w*bc^l)1ugJ5qU#c+Q}p?p;J3^%FlSWQG$jo0@7cSL`R+7}g6?Sa$m=_$_Xnu* zzv8^=^7TV&%VJvTWCNnt{rr%{v>;w>SJZnQ*$wG5|#pP1jsd+@MlJt60t2 zx@ecbR?kXgCYRXOx62B=h{4D)NzMSrV2}~(5$0VlCSfao692to|C@YeEh-8>;!%F{ z6PGJJuk?`<0fBeVKK9lSm6M_iBOM@t4-O6jHznl6`9+ac#fAETVY0yYtIT)7;5}M> zwV*>6hqAm9`nNVIe09d~&A_rHLQhZqH+}FWlCZ9DXGhFru^DQ)5m*o8=NjfoMOeCz z_LG+TYT+D9JIQ59ICg{%BS=mQ2bghKMlXSL4cE(_1tzYZD_ixJOjNr7^#&>1wUa}L zrK@gYCSp_<6heAA`^3(S#9W>_t130x<4=^7iEQTLi+b)eL#auokVOr|{IzZ|YUsLY zr*1m7=Ky6FYz5R_A`V$hfBm#1Nib1X4Ym|r0HUabi#$HOviu7S{BlYLMUO?!DYh2( zxowND?L-Ju@kv2ZQWf9h%?)sEFO51}^RuaTpj5|v@(x}CIT?hI$3uHzx)snwPbIsI zc@2|NbBlZnJ{oCUDyVePub6$jn@}D{!dKr&n@NIzYtL;gCN8)1%+&J-r*1j=8X~{+ zKFjNw6-Cd3KfG*wm9U*6k_svv4*Kxa_HK>`1jjy)NX=f<{!9PqWCnu+yC{C1q{36K zXl_&Y&{S3>i}2HNqYNAG-WtIKXll#w!TadaOdGm${C4FVL?=z0hfX%=%zhYql0|!% z+=+z77rpxt+9W_RoY($v+pmtN?rxyo8V2d(Y$XD3?)~*CD*WU338K9>*!3wpB5lah zxx|R1zgI*t?7v7;vBR?(G4CXcVI|dimOc3G zi7CzJd{^lUTHDH?wax!4Q~KZ5wz8?Q9l+%87B}cviSN)qsSyFbEm!bEEo)v=t%DXI zKs4Usg_0tbWbm*uVSU7F9Xy_Dd1kLx`_ zGNCax)l}D`tNca8Tt~qusSvl>m?KkD+)Q;5*P93Kn=Opa+n2|p7Uy2)0iuV-UaVTXtExTtW8E4 z1kR}kj8FSsaS)df(ggv|7tcbc_ycG!@bXq+B`==G34f*4*yPD&ct%0*EtD&UzSJm1 z8}I1v1{Z6>QoyThk~|sXD7q?dvTZUDDuBcz=n)k=u`kfyQTR3+?Ot#E1+x7Nq7&RsAHy=gVZy zYiH2mlQP`@Qi9^=>i&I)7_X}Bq^yee&DC)_Hn#wCMky>Q2~vFwre!m;fatOk>bO5#qALs7P`ZI@bN5rcI;r zqTg)f%4oPW>6VHzPI!G^{Ke!h*$nkdY36!_ z;WFekxfLJ{ihPB#EraHqXEZNdU9D8hPTMi-h;l+}r@OX$o3rFfmll_H4QyWDCo;}y zV{52|IFd3II7)Z(iXKl{CHM4`wMM~@qkCmmE}+C4sid}!E5-ctct?p5?Na8E0F~jo zDA{jz@7Ngc#chV+0%~@f(;M<(t2uxT__0I**GJykvqUpP)*AuzF( zZD4bTTx+)Ec~)6$u#Q970OZDtJU=uVRQZX`c9J9{`RpJZ8%-y@_exDh;#`5OpPEP9 z{opF2&7a;j)~RZK7QN3%i!OcjxVGOvysS|HXdS4GJ@F)j`2NB>Nb*w?!InTE;&LM) z;A14w=%wqI4NppPea9=O<6waRfed$WecoG$Ey|FGk9|zS3t2wmu}HGb)T^>X2m^3A zSkGra+q#779BmEDuRS?#4@i}HhGro|q*-c9;EpNSPPU2K2Z$Lq-`u23GeM5px6KOl^3Ytylt0Vh z0^NW#id)n=QS$|+b{?^{;rBDbz$++S5eIa=0eW)e$VpOi zxGIqrH(I`0f>oj!=vImQ?a~X>{Q)%CW7ev4RYf??4Bao)l8Dq3wD&@#-;;jCDaTTmYD-G zinLq!^o5M>Am_4do-T&z9%5?DHaxYj^Tv@NX8_p83q8|spG${YMrYp)3s815;1Hx? zb9f3U%CZ!Avp9rI*Btw8@7U)P2SR!?FlBAyz2j@6++7dCoxrm-Ve{XrR#!*XwDd%U z#Fe7hnIh=+s8~rq6dreJ6Ut}V=9XvcFl>j9Sjb2?pJobYsAa{l#X_x%t>nUpNfRKl zx^=z(2!_M1=*F@~kMx@p`WZOdbC8G|bWx>Q{uWyQ#{~@ra`*lBg#>o=TQnD#teA=r z{cAaKhHt}QpggBPn*N^YH0pxYLksjzW}t77X7rCM*XOrF%6~MK6_R@`E~c#VN>={HKS%ywBlm{`=vR1zf5Nl*CEoAb#wTmIYkba50#yE+6EvqkhYt)a*Qe-LV4w^W z|6WV$XBU5eGTJhDhzUU+E_+Y`QMy06_!}xa=*j!rTmA#}XD!jE$Y3+_cEpJV_1Jgk<#ay$pwyM>g$`!RpwD>D27U%}Ge)W#BE3YzJEpO3%3ho`0Ke{v*& zUfX{=8~;1-)B3hQflEdIA@JXi%+rE&KfyI*e*v!Yq&(fz34L0?<0tGpjbFh2d~yEq zaQ*dQKTX^H6aT>ApW^@TBmXo><4+bt>wm=Zzf(NTuJn@v*7+Y%{B5qKr&IDYMbA$r zM%Vw02{bo1h&)pG-Gi{~6N{z1W{pN<5`|nz`U7-PxP}jP7sq7(AtX8b|+= za^c;7NcpG8ai+gs?N39~f6|@5|G&^Z4NCuIp!(A>cp97gvr~co|KFYZpOLyxd-F8l z@n>&31OK0U^Vf6qG#=|`2eL!|4;}bxvZtc=pJc@`|0&tOOW>c5>Qk}uPaduKU-A6V zdp^zo_7wlAg7haodD1_@|M7nKlb-Y`$y4dhPZH$Je?sy@zw;MDo~Kk#MHoM+sz3ZE zRDV6|PfufiqQA`l1^OSR`CpyQKD~pd=Q=+*h>L&4@efXUo>Dz^v;U-eEd6IxfA4gE o%JtOg`IBoIWS{@7arV>&sv!LWWN(AQszG0oAgeo}?zg}G3)CSP&j0`b diff --git a/lib/cloudera/hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar b/lib/hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar similarity index 100% rename from lib/cloudera/hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar rename to lib/hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar diff --git a/src/java/com/cloudera/sqoop/config/ConfigurationConstants.java b/src/java/com/cloudera/sqoop/config/ConfigurationConstants.java new file mode 100644 index 00000000..eebaa3d3 --- /dev/null +++ b/src/java/com/cloudera/sqoop/config/ConfigurationConstants.java @@ -0,0 +1,83 @@ +/** + * Licensed to Cloudera, Inc. under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Cloudera, Inc. licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.cloudera.sqoop.config; + +/** + * Static constants that identify configuration keys, counter group names, and + * counter names. + */ +public final class ConfigurationConstants { + + /** + * The Configuration property identifying the current task id. + */ + public static final String PROP_MAPRED_TASK_ID = "mapred.task.id"; + + /** + * The Configuration property identifying the job's local directory. + */ + public static final String PROP_JOB_LOCAL_DIRECTORY = "job.local.dir"; + + /** + * The Configuration property identifying the number of map tasks to be used. + */ + public static final String PROP_MAPRED_MAP_TASKS = "mapred.map.tasks"; + + /** + * The Configuration property identifying the speculative execution flag for + * map tasks. + */ + public static final String PROP_MAPRED_MAP_TASKS_SPECULATIVE_EXEC = + "mapred.map.tasks.speculative.execution"; + + /** + * The Configuration property identifying the speculative execution flag for + * reduce tasks. + */ + public static final String PROP_MAPRED_REDUCE_TASKS_SPECULATIVE_EXEC = + "mapred.reduce.tasks.speculative.execution"; + + /** + * The Configuration property identifying the job tracker address. + */ + public static final String PROP_MAPRED_JOB_TRACKER_ADDRESS = + "mapred.job.tracker"; + + /** + * The group name of task counters. + */ + public static final String COUNTER_GROUP_MAPRED_TASK_COUNTERS = + "org.apache.hadoop.mapred.Task$Counter"; + + /** + * The name of the counter that tracks output records from Map phase. + */ + public static final String COUNTER_MAP_OUTPUT_RECORDS = + "MAP_OUTPUT_RECORDS"; + + /** + * The name of the counter that tracks input records to the Map phase. + */ + public static final String COUNTER_MAP_INPUT_RECORDS = + "MAP_INPUT_RECORDS"; + + private ConfigurationConstants() { + // Disable Explicit Object Creation + } +} diff --git a/src/java/com/cloudera/sqoop/config/ConfigurationHelper.java b/src/java/com/cloudera/sqoop/config/ConfigurationHelper.java new file mode 100644 index 00000000..97f97396 --- /dev/null +++ b/src/java/com/cloudera/sqoop/config/ConfigurationHelper.java @@ -0,0 +1,171 @@ +/** + * Licensed to Cloudera, Inc. under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Cloudera, Inc. licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.cloudera.sqoop.config; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.JobContext; +import org.apache.hadoop.util.GenericOptionsParser; + +import com.cloudera.sqoop.mapreduce.db.DBConfiguration; + +/** + * This class provides static helper methods that allow access and manipulation + * of job configuration. It is convenient to keep such access in one place in + * order to allow easy modifications when some of these aspects change from + * version to version of Hadoop. + */ +public final class ConfigurationHelper { + + /** + * Set the (hinted) number of map tasks for a job. + */ + public static void setJobNumMaps(Job job, int numMapTasks) { + job.getConfiguration().setInt( + ConfigurationConstants.PROP_MAPRED_MAP_TASKS, numMapTasks); + } + + /** + * Get the (hinted) number of map tasks for a job. + */ + public static int getJobNumMaps(JobContext job) { + return job.getConfiguration().getInt( + ConfigurationConstants.PROP_MAPRED_MAP_TASKS, 1); + } + + /** + * @return the number of mapper output records from a job using its counters. + */ + public static long getNumMapOutputRecords(Job job) + throws IOException, InterruptedException { + return job.getCounters().findCounter( + ConfigurationConstants.COUNTER_GROUP_MAPRED_TASK_COUNTERS, + ConfigurationConstants.COUNTER_MAP_OUTPUT_RECORDS).getValue(); + } + + /** + * @return the number of mapper input records from a job using its counters. + */ + public static long getNumMapInputRecords(Job job) + throws IOException, InterruptedException { + return job.getCounters().findCounter( + ConfigurationConstants.COUNTER_GROUP_MAPRED_TASK_COUNTERS, + ConfigurationConstants.COUNTER_MAP_INPUT_RECORDS).getValue(); + } + + /** + * Get the (hinted) number of map tasks for a job. + */ + public static int getConfNumMaps(Configuration conf) { + return conf.getInt(ConfigurationConstants.PROP_MAPRED_MAP_TASKS, 1); + } + + /** + * Set the mapper speculative execution property for a job. + */ + public static void setJobMapSpeculativeExecution(Job job, boolean isEnabled) { + job.getConfiguration().setBoolean( + ConfigurationConstants.PROP_MAPRED_MAP_TASKS_SPECULATIVE_EXEC, + isEnabled); + } + + /** + * Set the reducer speculative execution property for a job. + */ + public static void setJobReduceSpeculativeExecution( + Job job, boolean isEnabled) { + job.getConfiguration().setBoolean( + ConfigurationConstants.PROP_MAPRED_REDUCE_TASKS_SPECULATIVE_EXEC, + isEnabled); + } + + /** + * Sets the Jobtracker address to use for a job. + */ + public static void setJobtrackerAddr(Configuration conf, String addr) { + conf.set(ConfigurationConstants.PROP_MAPRED_JOB_TRACKER_ADDRESS, addr); + } + + /** + * @return the Configuration property identifying a DBWritable to use. + */ + public static String getDbInputClassProperty() { + return DBConfiguration.INPUT_CLASS_PROPERTY; + } + + /** + * @return the Configuration property identifying the DB username. + */ + public static String getDbUsernameProperty() { + return DBConfiguration.USERNAME_PROPERTY; + } + + /** + * @return the Configuration property identifying the DB password. + */ + public static String getDbPasswordProperty() { + return DBConfiguration.PASSWORD_PROPERTY; + } + + /** + * @return the Configuration property identifying the DB connect string. + */ + public static String getDbUrlProperty() { + return DBConfiguration.URL_PROPERTY; + } + + /** + * @return the Configuration property identifying the DB input table. + */ + public static String getDbInputTableNameProperty() { + return DBConfiguration.INPUT_TABLE_NAME_PROPERTY; + } + + /** + * @return the Configuration property specifying WHERE conditions for the + * db table. + */ + public static String getDbInputConditionsProperty() { + return DBConfiguration.INPUT_CONDITIONS_PROPERTY; + } + + /** + * Parse arguments in 'args' via the GenericOptionsParser and + * embed the results in the supplied configuration. + * @param conf the configuration to populate with generic options. + * @param args the arguments to process. + * @return the unused args to be passed to the application itself. + */ + public static String [] parseGenericOptions( + Configuration conf, String [] args) throws IOException { + // This needs to be shimmed because in Apache Hadoop this can throw + // an IOException, but it does not do so in CDH. We just mandate in + // this method that an IOException is possible. + GenericOptionsParser genericParser = new GenericOptionsParser( + conf, args); + return genericParser.getRemainingArgs(); + } + + + private ConfigurationHelper() { + // Disable explicit object creation + } +} diff --git a/src/java/com/cloudera/sqoop/manager/MySQLUtils.java b/src/java/com/cloudera/sqoop/manager/MySQLUtils.java index 19bc1025..5036a9c8 100644 --- a/src/java/com/cloudera/sqoop/manager/MySQLUtils.java +++ b/src/java/com/cloudera/sqoop/manager/MySQLUtils.java @@ -18,6 +18,8 @@ package com.cloudera.sqoop.manager; +import static com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR; + import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; @@ -27,8 +29,9 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import static com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR; -import com.cloudera.sqoop.shims.HadoopShim; + +import com.cloudera.sqoop.config.ConfigurationConstants; +import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.util.DirectImportUtils; /** @@ -55,15 +58,15 @@ private MySQLUtils() { public static final String OUTPUT_ENCLOSE_REQUIRED_KEY = "sqoop.output.enclose.required"; public static final String TABLE_NAME_KEY = - HadoopShim.get().getDbInputTableNameProperty(); + ConfigurationHelper.getDbInputTableNameProperty(); public static final String CONNECT_STRING_KEY = - HadoopShim.get().getDbUrlProperty(); + ConfigurationHelper.getDbUrlProperty(); public static final String USERNAME_KEY = - HadoopShim.get().getDbUsernameProperty(); + ConfigurationHelper.getDbUsernameProperty(); public static final String PASSWORD_KEY = - HadoopShim.get().getDbPasswordProperty(); + ConfigurationHelper.getDbPasswordProperty(); public static final String WHERE_CLAUSE_KEY = - HadoopShim.get().getDbInputConditionsProperty(); + ConfigurationHelper.getDbInputConditionsProperty(); public static final String EXTRA_ARGS_KEY = "sqoop.mysql.extra.args"; @@ -92,7 +95,7 @@ public static String writePasswordFile(Configuration conf) throws IOException { // Create the temp file to hold the user's password. String tmpDir = conf.get( - HadoopShim.get().getJobLocalDirProperty(), "/tmp/"); + ConfigurationConstants.PROP_JOB_LOCAL_DIRECTORY, "/tmp/"); File tempFile = File.createTempFile("mysql-cnf", ".cnf", new File(tmpDir)); // Make the password file only private readable. diff --git a/src/java/com/cloudera/sqoop/manager/OracleManager.java b/src/java/com/cloudera/sqoop/manager/OracleManager.java index 285c8c00..40d1156c 100644 --- a/src/java/com/cloudera/sqoop/manager/OracleManager.java +++ b/src/java/com/cloudera/sqoop/manager/OracleManager.java @@ -19,6 +19,7 @@ package com.cloudera.sqoop.manager; import java.io.IOException; +import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; @@ -26,16 +27,14 @@ import java.sql.Timestamp; import java.util.HashMap; import java.util.Map; -import java.lang.reflect.Method; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.mapreduce.OutputFormat; -import com.cloudera.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.mapreduce.JdbcExportJob; -import com.cloudera.sqoop.shims.ShimLoader; +import com.cloudera.sqoop.mapreduce.OracleExportOutputFormat; +import com.cloudera.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat; import com.cloudera.sqoop.util.ExportException; import com.cloudera.sqoop.util.ImportException; @@ -166,7 +165,7 @@ protected synchronized void finalize() throws Throwable { for (Connection c : connectionMap.values()) { c.close(); } - + super.finalize(); } } @@ -304,15 +303,9 @@ public void importTable(ImportJobContext context) public void exportTable(ExportJobContext context) throws IOException, ExportException { context.setConnManager(this); - try { - JdbcExportJob exportJob = new JdbcExportJob(context, null, null, - (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.OracleExportOutputFormat")); - exportJob.runExport(); - } catch (ClassNotFoundException cnfe) { - throw new ExportException("Could not start export; could not find class", - cnfe); - } + JdbcExportJob exportJob = new JdbcExportJob(context, null, null, + OracleExportOutputFormat.class); + exportJob.runExport(); } @Override @@ -376,7 +369,7 @@ private String dbToJavaType(int sqlType) { // return null if no java type was found for sqlType return null; } - + /** * Attempt to map sql type to hive type. * @param sqlType sql data type diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java rename to src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java rename to src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java diff --git a/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java b/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java index c728095c..94714cd5 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java +++ b/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java @@ -23,24 +23,22 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; -import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; -import com.cloudera.sqoop.mapreduce.db.DBConfiguration; -import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.DBWritable; +import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import com.cloudera.sqoop.SqoopOptions; -import com.cloudera.sqoop.manager.ConnManager; +import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.lib.LargeObjectLoader; -import com.cloudera.sqoop.shims.HadoopShim; -import com.cloudera.sqoop.shims.ShimLoader; +import com.cloudera.sqoop.manager.ConnManager; import com.cloudera.sqoop.manager.ImportJobContext; +import com.cloudera.sqoop.mapreduce.db.DBConfiguration; +import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat; /** * Actually runs a jdbc import job using the ORM files generated by the @@ -91,8 +89,7 @@ protected Class getMapperClass() { protected Class getOutputFormatClass() throws ClassNotFoundException { if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) { - return (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.RawKeyTextOutputFormat"); + return RawKeyTextOutputFormat.class; } else if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) { return SequenceFileOutputFormat.class; @@ -154,7 +151,7 @@ protected void configureInputFormat(Job job, String tableName, } LOG.debug("Using table class: " + tableClassName); - job.getConfiguration().set(HadoopShim.get().getDbInputClassProperty(), + job.getConfiguration().set(ConfigurationHelper.getDbInputClassProperty(), tableClassName); job.getConfiguration().setLong(LargeObjectLoader.MAX_INLINE_LOB_LEN_KEY, diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/ExportInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/ExportInputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/ExportInputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/ExportInputFormat.java diff --git a/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java b/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java index c8a161a5..8f169ff6 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java +++ b/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java @@ -38,11 +38,10 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import com.cloudera.sqoop.SqoopOptions; +import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.lib.SqoopRecord; import com.cloudera.sqoop.manager.ExportJobContext; import com.cloudera.sqoop.orm.TableClassName; -import com.cloudera.sqoop.shims.HadoopShim; -import com.cloudera.sqoop.shims.ShimLoader; import com.cloudera.sqoop.util.ExportException; import com.cloudera.sqoop.util.PerfCounters; @@ -137,7 +136,7 @@ public static boolean isSequenceFiles(Configuration conf, Path p) } /** - * @param file a file to test. + * @param file a file to test. * @return true if 'file' refers to a SequenceFile. */ private static boolean hasSequenceFileHeader(Path file, Configuration conf) { @@ -192,8 +191,7 @@ protected Class getInputFormatClass() throws ClassNotFoundException { Class configuredIF = super.getInputFormatClass(); if (null == configuredIF) { - return (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.ExportInputFormat"); + return ExportInputFormat.class; } else { return configuredIF; } @@ -204,8 +202,7 @@ protected Class getOutputFormatClass() throws ClassNotFoundException { Class configuredOF = super.getOutputFormatClass(); if (null == configuredOF) { - return (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.ExportOutputFormat"); + return ExportOutputFormat.class; } else { return configuredOF; } @@ -218,7 +215,7 @@ protected void configureMapper(Job job, String tableName, job.setMapperClass(getMapperClass()); // Concurrent writes of the same records would be problematic. - HadoopShim.get().setJobMapSpeculativeExecution(job, false); + ConfigurationHelper.setJobMapSpeculativeExecution(job, false); job.setMapOutputKeyClass(SqoopRecord.class); job.setMapOutputValueClass(NullWritable.class); @@ -249,13 +246,13 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException, perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters") .findCounter("HDFS_BYTES_READ").getValue()); LOG.info("Transferred " + perfCounters.toString()); - long numRecords = HadoopShim.get().getNumMapInputRecords(job); + long numRecords = ConfigurationHelper.getNumMapInputRecords(job); LOG.info("Exported " + numRecords + " records."); } return success; } - + /** * Run an export job to dump a table from HDFS to a database. diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java diff --git a/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java b/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java index 362b3458..7767ddc3 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java +++ b/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java @@ -43,7 +43,6 @@ import com.cloudera.sqoop.lib.SqoopRecord; import com.cloudera.sqoop.manager.ConnManager; import com.cloudera.sqoop.manager.ImportJobContext; -import com.cloudera.sqoop.shims.ShimLoader; import com.cloudera.sqoop.util.ImportException; /** @@ -76,14 +75,13 @@ protected Class getMapperClass() { @Override protected Class getOutputFormatClass() throws ClassNotFoundException { - return (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.DelegatingOutputFormat"); + return DelegatingOutputFormat.class; } @Override protected void configureOutputFormat(Job job, String tableName, String tableClassName) throws ClassNotFoundException, IOException { - + // Use the DelegatingOutputFormat with the HBasePutProcessor. job.setOutputFormatClass(getOutputFormatClass()); diff --git a/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java b/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java index 09463310..b78d0e30 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java +++ b/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java @@ -37,9 +37,9 @@ import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.orm.TableClassName; -import com.cloudera.sqoop.shims.HadoopShim; import com.cloudera.sqoop.util.ImportException; import com.cloudera.sqoop.util.PerfCounters; +import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.manager.ImportJobContext; /** @@ -49,7 +49,7 @@ public class ImportJobBase extends JobBase { private ImportJobContext context; - + public static final Log LOG = LogFactory.getLog( ImportJobBase.class.getName()); @@ -76,7 +76,7 @@ public ImportJobBase(final SqoopOptions opts, @Override protected void configureOutputFormat(Job job, String tableName, String tableClassName) throws ClassNotFoundException, IOException { - + job.setOutputFormatClass(getOutputFormatClass()); if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) { @@ -115,7 +115,7 @@ protected boolean runJob(Job job) throws ClassNotFoundException, IOException, perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters") .findCounter("HDFS_BYTES_WRITTEN").getValue()); LOG.info("Transferred " + perfCounters.toString()); - long numRecords = HadoopShim.get().getNumMapOutputRecords(job); + long numRecords = ConfigurationHelper.getNumMapOutputRecords(job); LOG.info("Retrieved " + numRecords + " records."); } return success; diff --git a/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java b/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java index 0cd9c1fd..85d3e82e 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java +++ b/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java @@ -32,7 +32,6 @@ import com.cloudera.sqoop.manager.ConnManager; import com.cloudera.sqoop.manager.ExportJobContext; -import com.cloudera.sqoop.shims.ShimLoader; /** * Run an update-based export using JDBC (JDBC-based UpdateOutputFormat). @@ -48,13 +47,7 @@ public class JdbcUpdateExportJob extends ExportJobBase { */ private static Class getUpdateOutputFormat() throws IOException { - try { - return (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.UpdateOutputFormat"); - } catch (ClassNotFoundException cnfe) { - throw new IOException("Could not load updating export OutputFormat", - cnfe); - } + return UpdateOutputFormat.class; } public JdbcUpdateExportJob(final ExportJobContext context) diff --git a/src/java/com/cloudera/sqoop/mapreduce/JobBase.java b/src/java/com/cloudera/sqoop/mapreduce/JobBase.java index 41448f33..0242bd04 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/JobBase.java +++ b/src/java/com/cloudera/sqoop/mapreduce/JobBase.java @@ -40,8 +40,8 @@ import com.cloudera.sqoop.SqoopOptions; +import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.manager.ConnManager; -import com.cloudera.sqoop.shims.HadoopShim; import com.cloudera.sqoop.util.ClassLoaderStack; import com.cloudera.sqoop.util.Jars; @@ -133,12 +133,11 @@ public void setOptions(SqoopOptions opts) { protected void cacheJars(Job job, ConnManager mgr) throws IOException { - Configuration conf = job.getConfiguration(); + Configuration conf = job.getConfiguration(); FileSystem fs = FileSystem.getLocal(conf); Set localUrls = new HashSet(); addToCache(Jars.getSqoopJarPath(), fs, localUrls); - addToCache(Jars.getShimJarPath(), fs, localUrls); if (null != mgr) { addToCache(Jars.getDriverClassJar(mgr), fs, localUrls); addToCache(Jars.getJarPathForClass(mgr.getClass()), fs, localUrls); @@ -159,7 +158,7 @@ protected void cacheJars(Job job, ConnManager mgr) LOG.warn("SQOOP_HOME is unset. May not be able to find " + "all job dependencies."); } - + // If we didn't put anything in our set, then there's nothing to cache. if (localUrls.isEmpty()) { return; @@ -273,7 +272,7 @@ protected int configureNumTasks(Job job) throws IOException { LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers."); } - HadoopShim.get().setJobNumMaps(job, numMapTasks); + ConfigurationHelper.setJobNumMaps(job, numMapTasks); job.setNumReduceTasks(0); return numMapTasks; } diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java b/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java index b2931195..88966043 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java @@ -21,23 +21,17 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; - import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; - import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import com.cloudera.sqoop.SqoopOptions; - -import com.cloudera.sqoop.shims.ShimLoader; import com.cloudera.sqoop.util.Jars; /** @@ -78,7 +72,7 @@ public boolean runMergeJob() throws IOException { String existingJar = options.getExistingJarName(); if (existingJar != null) { // User explicitly identified a jar path. - LOG.debug("Setting job jar to user-specified jar: " + existingJar); + LOG.debug("Setting job jar to user-specified jar: " + existingJar); job.getConfiguration().set("mapred.jar", existingJar); } else { // Infer it from the location of the specified class, if it's on the @@ -124,9 +118,7 @@ public boolean runMergeJob() throws IOException { job.setMapperClass(MergeRecordMapper.class); } else { job.setMapperClass(MergeTextMapper.class); - job.setOutputFormatClass((Class) - ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.RawKeyTextOutputFormat")); + job.setOutputFormatClass(RawKeyTextOutputFormat.class); } jobConf.set("mapred.output.key.class", userClassName); diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java index 66aba86a..bde6e531 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java +++ b/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java @@ -22,21 +22,17 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.OutputFormat; -import com.cloudera.sqoop.mapreduce.db.DBConfiguration; -import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat; import org.apache.hadoop.mapreduce.lib.db.DBWritable; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.manager.ConnManager; -import com.cloudera.sqoop.manager.MySQLUtils; -import com.cloudera.sqoop.shims.ShimLoader; import com.cloudera.sqoop.manager.ImportJobContext; +import com.cloudera.sqoop.manager.MySQLUtils; +import com.cloudera.sqoop.mapreduce.db.DBConfiguration; +import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat; /** * Class that runs an import job using mysqldump in the mapper. @@ -48,11 +44,8 @@ public class MySQLDumpImportJob extends ImportJobBase { public MySQLDumpImportJob(final SqoopOptions opts, ImportJobContext context) throws ClassNotFoundException { - super(opts, MySQLDumpMapper.class, - (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.MySQLDumpInputFormat"), - (Class) ShimLoader.getShimClass( - "com.cloudera.sqoop.mapreduce.RawKeyTextOutputFormat"), context); + super(opts, MySQLDumpMapper.class, MySQLDumpInputFormat.class, + RawKeyTextOutputFormat.class, context); } /** diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java diff --git a/src/shims/common/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java similarity index 100% rename from src/shims/common/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java rename to src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java index 7a19b1b2..44e44dba 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java @@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * Implement DBSplitter over BigDecimal values. @@ -46,9 +46,9 @@ public List split(Configuration conf, ResultSet results, String lowClausePrefix = colName + " >= "; String highClausePrefix = colName + " < "; - + BigDecimal numSplits = new BigDecimal( - HadoopShim.get().getConfNumMaps(conf)); + ConfigurationHelper.getConfNumMaps(conf)); if (minVal == null && maxVal == null) { // Range is null to null. Return a null split accordingly. diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java index 523fd9a1..cff6110d 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java @@ -42,15 +42,15 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * A InputFormat that reads input data from an SQL table. *

- * DBInputFormat emits LongWritables containing the record number as - * key and DBWritables as value. - * - * The SQL query, and input class can be using one of the two + * DBInputFormat emits LongWritables containing the record number as + * key and DBWritables as value. + * + * The SQL query, and input class can be using one of the two * setInput methods. */ public class DBInputFormat @@ -71,7 +71,7 @@ public void write(DataOutput out) throws IOException { } @Override public void write(PreparedStatement arg0) throws SQLException { } } - + /** * A InputSplit that spans a set of rows. */ @@ -172,7 +172,7 @@ public void setConf(Configuration conf) { public Configuration getConf() { return dbConf.getConf(); } - + public DBConfiguration getDBConf() { return dbConf; } @@ -227,7 +227,7 @@ conf, getConnection(), getDBConf(), conditions, fieldNames, @Override /** {@inheritDoc} */ public RecordReader createRecordReader(InputSplit split, - TaskAttemptContext context) throws IOException, InterruptedException { + TaskAttemptContext context) throws IOException, InterruptedException { return createDBRecordReader((DBInputSplit) split, context.getConfiguration()); @@ -237,7 +237,7 @@ public RecordReader createRecordReader(InputSplit split, @Override public List getSplits(JobContext job) throws IOException { - ResultSet results = null; + ResultSet results = null; Statement statement = null; try { statement = connection.createStatement(); @@ -246,7 +246,7 @@ public List getSplits(JobContext job) throws IOException { results.next(); long count = results.getLong(1); - int chunks = HadoopShim.get().getJobNumMaps(job); + int chunks = ConfigurationHelper.getJobNumMaps(job); long chunkSize = (count / chunks); results.close(); @@ -285,14 +285,14 @@ public List getSplits(JobContext job) throws IOException { } } - /** Returns the query for getting the total number of rows, + /** Returns the query for getting the total number of rows, * subclasses can override this for custom behaviour.*/ protected String getCountQuery() { - + if(dbConf.getInputCountQuery() != null) { return dbConf.getInputCountQuery(); } - + StringBuilder query = new StringBuilder(); query.append("SELECT COUNT(*) FROM " + tableName); @@ -304,20 +304,20 @@ protected String getCountQuery() { /** * Initializes the map-part of the job with the appropriate input settings. - * + * * @param job The map-reduce job - * @param inputClass the class object implementing DBWritable, which is the + * @param inputClass the class object implementing DBWritable, which is the * Java object holding tuple fields. * @param tableName The table to read data from - * @param conditions The condition which to select data with, + * @param conditions The condition which to select data with, * eg. '(updated > 20070101 AND length > 0)' * @param orderBy the fieldNames in the orderBy clause. * @param fieldNames The field names in the table * @see #setInput(Job, Class, String, String) */ - public static void setInput(Job job, + public static void setInput(Job job, Class inputClass, - String tableName, String conditions, + String tableName, String conditions, String orderBy, String... fieldNames) { job.setInputFormatClass(DBInputFormat.class); DBConfiguration dbConf = new DBConfiguration(job.getConfiguration()); @@ -327,17 +327,17 @@ public static void setInput(Job job, dbConf.setInputConditions(conditions); dbConf.setInputOrderBy(orderBy); } - + /** * Initializes the map-part of the job with the appropriate input settings. - * + * * @param job The map-reduce job - * @param inputClass the class object implementing DBWritable, which is the + * @param inputClass the class object implementing DBWritable, which is the * Java object holding tuple fields. - * @param inputQuery the input query to select fields. Example : + * @param inputQuery the input query to select fields. Example : * "SELECT f1, f2, f3 FROM Mytable ORDER BY f1" - * @param inputCountQuery the input query that returns - * the number of records in the table. + * @param inputCountQuery the input query that returns + * the number of records in the table. * Example : "SELECT COUNT(f1) FROM Mytable" * @see #setInput(Job, Class, String, String, String, String...) */ diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java index cc022c07..a74f9b20 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java @@ -36,24 +36,24 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.StringUtils; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * A OutputFormat that sends the reduce output to a SQL table. - *

- * {@link DBOutputFormat} accepts <key,value> pairs, where - * key has a type extending DBWritable. Returned {@link RecordWriter} - * writes only the key to the database with a batch SQL query. - * + *

+ * {@link DBOutputFormat} accepts <key,value> pairs, where + * key has a type extending DBWritable. Returned {@link RecordWriter} + * writes only the key to the database with a batch SQL query. + * */ -public class DBOutputFormat +public class DBOutputFormat extends OutputFormat { private static final Log LOG = LogFactory.getLog(DBOutputFormat.class); - public void checkOutputSpecs(JobContext context) + public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {} - public OutputCommitter getOutputCommitter(TaskAttemptContext context) + public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new FileOutputCommitter(FileOutputFormat.getOutputPath(context), context); @@ -62,7 +62,7 @@ public OutputCommitter getOutputCommitter(TaskAttemptContext context) /** * A RecordWriter that writes the reduce output to a SQL table. */ - public class DBRecordWriter + public class DBRecordWriter extends RecordWriter { private Connection connection; @@ -81,11 +81,11 @@ public DBRecordWriter(Connection connection public Connection getConnection() { return connection; } - + public PreparedStatement getStatement() { return statement; } - + @Override /** {@inheritDoc} */ public void close(TaskAttemptContext context) throws IOException { @@ -123,7 +123,7 @@ public void write(K key, V value) throws IOException { /** * Constructs the query used as the prepared statement to insert data. - * + * * @param table * the table to insert into * @param fieldNames @@ -163,20 +163,20 @@ public String constructQuery(String table, String[] fieldNames) { @Override /** {@inheritDoc} */ - public RecordWriter getRecordWriter(TaskAttemptContext context) + public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException { DBConfiguration dbConf = new DBConfiguration(context.getConfiguration()); String tableName = dbConf.getOutputTableName(); String[] fieldNames = dbConf.getOutputFieldNames(); - + if(fieldNames == null) { fieldNames = new String[dbConf.getOutputFieldCount()]; } - + try { Connection connection = dbConf.getConnection(); PreparedStatement statement = null; - + statement = connection.prepareStatement( constructQuery(tableName, fieldNames)); return new DBRecordWriter(connection, statement); @@ -188,12 +188,12 @@ public RecordWriter getRecordWriter(TaskAttemptContext context) /** * Initializes the reduce-part of the job with * the appropriate output settings. - * + * * @param job The job * @param tableName The table to insert data into * @param fieldNames The field names in the table. */ - public static void setOutput(Job job, String tableName, + public static void setOutput(Job job, String tableName, String... fieldNames) throws IOException { if(fieldNames.length > 0 && fieldNames[0] != null) { DBConfiguration dbConf = setOutput(job, tableName); @@ -201,34 +201,34 @@ public static void setOutput(Job job, String tableName, } else { if (fieldNames.length > 0) { setOutput(job, tableName, fieldNames.length); - } else { + } else { throw new IllegalArgumentException( "Field names must be greater than 0"); } } } - + /** - * Initializes the reduce-part of the job + * Initializes the reduce-part of the job * with the appropriate output settings. - * + * * @param job The job * @param tableName The table to insert data into * @param fieldCount the number of fields in the table. */ - public static void setOutput(Job job, String tableName, + public static void setOutput(Job job, String tableName, int fieldCount) throws IOException { DBConfiguration dbConf = setOutput(job, tableName); dbConf.setOutputFieldCount(fieldCount); } - + private static DBConfiguration setOutput(Job job, String tableName) throws IOException { job.setOutputFormatClass(DBOutputFormat.class); - HadoopShim.get().setJobReduceSpeculativeExecution(job, false); + ConfigurationHelper.setJobReduceSpeculativeExecution(job, false); DBConfiguration dbConf = new DBConfiguration(job.getConfiguration()); - + dbConf.setOutputTableName(tableName); return dbConf; } diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java index e482edb0..6708ceed 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java @@ -42,7 +42,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * A InputFormat that reads input data from an SQL table. @@ -167,7 +167,7 @@ protected DBSplitter getSplitter(int sqlDataType) { /** {@inheritDoc} */ public List getSplits(JobContext job) throws IOException { - int targetNumTasks = HadoopShim.get().getJobNumMaps(job); + int targetNumTasks = ConfigurationHelper.getJobNumMaps(job); if (1 == targetNumTasks) { // There's no need to run a bounding vals query; just return a split // that separates nothing. This can be considerably more optimal for a @@ -313,9 +313,9 @@ conf, getConnection(), dbConf, dbConf.getInputConditions(), * We reuse the same field, but it's not strictly ordering it * -- just partitioning the results. */ - public static void setInput(Job job, + public static void setInput(Job job, Class inputClass, - String tableName, String conditions, + String tableName, String conditions, String splitBy, String... fieldNames) { DBInputFormat.setInput(job, inputClass, tableName, conditions, splitBy, fieldNames); diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java index d6edc9f7..d4830d3d 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java @@ -27,11 +27,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * Implement DBSplitter over date/time values. @@ -55,7 +54,7 @@ public List split(Configuration conf, ResultSet results, String lowClausePrefix = colName + " >= "; String highClausePrefix = colName + " < "; - int numSplits = HadoopShim.get().getConfNumMaps(conf); + int numSplits = ConfigurationHelper.getConfNumMaps(conf); if (numSplits < 1) { numSplits = 1; } diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java index 65c87038..1023bc2c 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java @@ -29,7 +29,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * Implement DBSplitter over floating-point values. @@ -62,7 +62,7 @@ public List split(Configuration conf, ResultSet results, // Use this as a hint. May need an extra task if the size doesn't // divide cleanly. - int numSplits = HadoopShim.get().getConfNumMaps(conf); + int numSplits = ConfigurationHelper.getConfNumMaps(conf); double splitSize = (maxVal - minVal) / (double) numSplits; if (splitSize < MIN_INCREMENT) { diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java index bbafa780..cd3b5ece 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java @@ -26,7 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * Implement DBSplitter over integer values. @@ -41,7 +41,7 @@ public List split(Configuration conf, ResultSet results, String lowClausePrefix = colName + " >= "; String highClausePrefix = colName + " < "; - int numSplits = HadoopShim.get().getConfNumMaps(conf); + int numSplits = ConfigurationHelper.getConfNumMaps(conf); if (numSplits < 1) { numSplits = 1; } diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java index 09afad41..79aa57c2 100644 --- a/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java +++ b/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java @@ -18,19 +18,18 @@ package com.cloudera.sqoop.mapreduce.db; +import java.math.BigDecimal; import java.sql.ResultSet; import java.sql.SQLException; -import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * Implement DBSplitter over text strings. @@ -88,7 +87,7 @@ public List split(Configuration conf, ResultSet results, // Use this as a hint. May need an extra task if the size doesn't // divide cleanly. - int numSplits = HadoopShim.get().getConfNumMaps(conf); + int numSplits = ConfigurationHelper.getConfNumMaps(conf); String lowClausePrefix = colName + " >= '"; String highClausePrefix = colName + " < '"; diff --git a/src/java/com/cloudera/sqoop/shims/HadoopShim.java b/src/java/com/cloudera/sqoop/shims/HadoopShim.java deleted file mode 100644 index f216666f..00000000 --- a/src/java/com/cloudera/sqoop/shims/HadoopShim.java +++ /dev/null @@ -1,147 +0,0 @@ -/** - * Licensed to Cloudera, Inc. under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Cloudera, Inc. licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.cloudera.sqoop.shims; - -import java.io.IOException; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.MapContext; - -/** - * In order to be compatible with multiple versions of Hadoop, all parts - * of the Hadoop interface that are not cross-version compatible are - * encapsulated in an implementation of this class. Users should use - * the ShimLoader class as a factory to obtain an implementation of - * HadoopShims corresponding to the version of Hadoop currently on the - * classpath. - */ -public abstract class HadoopShim { - - protected HadoopShim() { - } - - /** - * Parse arguments in 'args' via the GenericOptionsParser and - * embed the results in the supplied configuration. - * @param conf the configuration to populate with generic options. - * @param args the arguments to process. - * @return the unused args to be passed to the application itself. - */ - public abstract String [] parseGenericOptions(Configuration conf, - String [] args) throws IOException; - - /** - * @return the number of mapper output records from a job using its counters. - */ - public abstract long getNumMapOutputRecords(Job job) - throws IOException, InterruptedException; - - /** - * @return the number of mapper input records from a job using its counters. - */ - public abstract long getNumMapInputRecords(Job job) - throws IOException, InterruptedException; - - /** - * @return the Configuration property identifying the current task id. - */ - public abstract String getTaskIdProperty(); - - /** - * @return the Configuration property identifying the job's local dir. - */ - public abstract String getJobLocalDirProperty(); - - /** - * Set the (hinted) number of map tasks for a job. - */ - public abstract void setJobNumMaps(Job job, int numMapTasks); - - /** - * Get the (hinted) number of map tasks for a job. - */ - public abstract int getJobNumMaps(JobContext job); - - /** - * Get the (hinted) number of map tasks for a job. - */ - public abstract int getConfNumMaps(Configuration conf); - - /** - * Set the mapper speculative execution property for a job. - */ - public abstract void setJobMapSpeculativeExecution(Job job, - boolean isEnabled); - - /** - * Set the reducer speculative execution property for a job. - */ - public abstract void setJobReduceSpeculativeExecution(Job job, - boolean isEnabled); - - /** - * Sets the Jobtracker address to use for a job. - */ - public abstract void setJobtrackerAddr(Configuration conf, String addr); - - /** - * Returns the Configuration property identifying a DBWritable to use. - */ - public abstract String getDbInputClassProperty(); - - /** - * Returns the Configuration property identifying the DB username. - */ - public abstract String getDbUsernameProperty(); - - /** - * Returns the Configuration property identifying the DB password. - */ - public abstract String getDbPasswordProperty(); - - /** - * Returns the Configuration property identifying the DB connect string. - */ - public abstract String getDbUrlProperty(); - - /** - * Returns the Configuration property identifying the DB input table. - */ - public abstract String getDbInputTableNameProperty(); - - /** - * Returns the Configuration property specifying WHERE conditions for the - * db table. - */ - public abstract String getDbInputConditionsProperty(); - - /** - * Returns a mock MapContext that has both an OutputCommitter and an - * InputSplit wired to the specified path. - * Used for testing LargeObjectLoader. - */ - public abstract MapContext getMapContextForIOPath( - Configuration conf, Path p); - - public static final synchronized HadoopShim get() { - return ShimLoader.getHadoopShim(null); - } -} diff --git a/src/java/com/cloudera/sqoop/shims/ShimLoader.java b/src/java/com/cloudera/sqoop/shims/ShimLoader.java deleted file mode 100644 index 4ef62842..00000000 --- a/src/java/com/cloudera/sqoop/shims/ShimLoader.java +++ /dev/null @@ -1,266 +0,0 @@ -/** - * Licensed to Cloudera, Inc. under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Cloudera, Inc. licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.cloudera.sqoop.shims; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.VersionInfo; -import com.cloudera.sqoop.util.ClassLoaderStack; - -/** - * Provides a service locator for the appropriate shim, dynamically chosen - * based on the Hadoop version in the classpath. - */ -public abstract class ShimLoader { - private static HadoopShim hadoopShim; - - public static final Log LOG = LogFactory.getLog(ShimLoader.class.getName()); - - /** - * Which directory Sqoop checks for shim jars. - */ - public static final String SHIM_JAR_DIR_PROPERTY = "sqoop.shim.jar.dir"; - - /** - * The names of the classes for shimming Hadoop. - * This list must be maintained in the same order as HADOOP_SHIM_MATCHES - */ - private static final List HADOOP_SHIM_CLASSES = - new ArrayList(); - - /** - * Patterns to match to identify which shim jar to load when shimming - * Hadoop. - * This list must be maintained in the same order as HADOOP_SHIM_MATCHES - */ - private static final List HADOOP_SHIM_JARS = - new ArrayList(); - - /** - * The regular expressions compared against the Hadoop version string - * when determining which shim class to load. - */ - private static final List HADOOP_SHIM_MATCHES = - new ArrayList(); - - static { - // These regular expressions will be evaluated in order until one matches. - - // CDH3 distribution has versions as follows (with nnn being build number): - // 0.20.2+nnn: for CDH3B2 release - // 0.20.3-CDH3-SNAPSHOT: for CDH3B3 development builds - // 0.20.3+nnn: expected for CDH3B3 release - HADOOP_SHIM_MATCHES.add("0.20.(2|3)((\\+[0-9]+)|(-CDH3-SNAPSHOT))"); - HADOOP_SHIM_CLASSES.add("com.cloudera.sqoop.shims.CDH3Shim"); - HADOOP_SHIM_JARS.add("sqoop-shim-cloudera-.*.jar"); - - // Apache 0.22 trunk. - // Version may have the form "0.22-SNAPSHOT" - HADOOP_SHIM_MATCHES.add("0.22-.*"); - HADOOP_SHIM_CLASSES.add("com.cloudera.sqoop.shims.Apache22HadoopShim"); - HADOOP_SHIM_JARS.add("sqoop-shim-apache-.*.jar"); - - // ... or "0.22.n-SNAPSHOT" - HADOOP_SHIM_MATCHES.add("0.22.\\d+-.*"); - HADOOP_SHIM_CLASSES.add("com.cloudera.sqoop.shims.Apache22HadoopShim"); - HADOOP_SHIM_JARS.add("sqoop-shim-apache-.*.jar"); - - // Apache 0.22 trunk snapshots often compile with "Unknown" version, - // so we default to guessing Apache in this case. - HADOOP_SHIM_MATCHES.add("Unknown"); - HADOOP_SHIM_CLASSES.add("com.cloudera.sqoop.shims.Apache22HadoopShim"); - HADOOP_SHIM_JARS.add("sqoop-shim-apache-.*.jar"); - - // Apache 0.21 uses the same shim jars as 0.22 - HADOOP_SHIM_MATCHES.add("0.21.\\d+(-.*)?"); - HADOOP_SHIM_CLASSES.add("com.cloudera.sqoop.shims.Apache22HadoopShim"); - HADOOP_SHIM_JARS.add("sqoop-shim-apache-.*.jar"); - - } - - /** - * Factory method to get an instance of HadoopShim based on the - * version of Hadoop on the classpath. - * @param conf an optional Configuration whose internal ClassLoader - * should be updated with the jar containing the HadoopShim. - */ - public static synchronized HadoopShim getHadoopShim(Configuration conf) { - if (hadoopShim == null) { - hadoopShim = loadShim(HADOOP_SHIM_MATCHES, HADOOP_SHIM_CLASSES, - HADOOP_SHIM_JARS, HadoopShim.class, conf); - } - return hadoopShim; - } - - /** - * Factory method to get an instance of HadoopShim based on the - * version of Hadoop on the classpath. - */ - public static synchronized HadoopShim getHadoopShim() { - return getHadoopShim(null); - } - - @SuppressWarnings("unchecked") - /** - * Actually load the shim for the current Hadoop version. - * @param matchExprs a list of regexes against which the current Hadoop - * version is compared. The first one to hit defines which class/jar to - * use. - * @param classNames a list in the same order as matchExprs. This defines - * what class name to load as the shim class if the Hadoop version matches - * matchExprs[i]. - * @param jarPatterns a list in the same order as matchExprs. This defines - * a pattern to select a jar file from which the shim classes should be - * loaded. - * @param xface the shim interface that the shim class must match. - * @param conf an optional Configuration whose context classloader should - * be updated to the current Thread's contextClassLoader after pushing a - * new ClassLoader on the stack to load this shim jar. - */ - private static T loadShim(List matchExprs, - List classNames, List jarPatterns, Class xface, - Configuration conf) { - String version = VersionInfo.getVersion(); - - LOG.debug("Loading shims for class : " + xface.getName()); - LOG.debug("Hadoop version: " + version); - - for (int i = 0; i < matchExprs.size(); i++) { - LOG.debug("Checking: " + matchExprs.get(i)); - if (version.matches(matchExprs.get(i))) { - String className = classNames.get(i); - String jarPattern = jarPatterns.get(i); - - if (LOG.isDebugEnabled()) { - LOG.debug("Version matched regular expression: " + matchExprs.get(i)); - LOG.debug("Trying to load class: " + className); - } - - // Test to see if the class is already on the classpath. - try { - // If we can load the shim directly, we just do so. In this case, - // there's no need to update the Configuration's classloader, - // because we didn't modify the classloader stack. - return getShimInstance(className, xface); - } catch (Exception e) { - // Not already present. We'll need to load a jar for this. - // Ignore this exception. - } - - try { - LOG.debug("Searching for jar matching: " + jarPattern); - loadMatchingShimJar(jarPattern, className); - LOG.debug("Loading shim from jar"); - T shim = getShimInstance(className, xface); - - if (null != conf) { - // Set the context classloader for the base Configuration to - // the current one, so we can load more classes from the shim jar. - conf.setClassLoader(Thread.currentThread().getContextClassLoader()); - } - - return shim; - } catch (Exception e) { - throw new RuntimeException("Could not load shim in class " - + className, e); - } - } - } - - throw new RuntimeException("Could not find appropriate Hadoop shim for " - + version); - } - - /** - * Check the current classloader to see if it can load the prescribed - * class name as an instance of 'xface'. If so, create an instance of - * the class and return it. - * @param className the shim class to attempt to instantiate. - * @param xface the interface it must implement. - * @return an instance of className. - */ - private static T getShimInstance(String className, Class xface) - throws ClassNotFoundException, InstantiationException, - IllegalAccessException { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - Class clazz = Class.forName(className, true, cl); - return xface.cast(clazz.newInstance()); - } - - /** - * Look through the shim directory for a jar matching 'jarPattern' - * and classload it. - * @param jarPattern a regular expression which the shim jar's filename - * must match. - * @param className a class to classload from the jar. - */ - private static void loadMatchingShimJar(String jarPattern, String className) - throws IOException { - String jarFilename; - - String shimDirName = System.getProperty(SHIM_JAR_DIR_PROPERTY, "."); - File shimDir = new File(shimDirName); - if (!shimDir.exists()) { - throw new IOException("No such shim directory: " + shimDirName); - } - - String [] candidates = shimDir.list(); - if (null == candidates) { - throw new IOException("Could not list shim directory: " + shimDirName); - } - - for (String candidate : candidates) { - if (candidate.matches(jarPattern)) { - LOG.debug("Found jar matching pattern " + jarPattern + ": " - + candidate); - File jarFile = new File(shimDir, candidate); - String jarFileName = jarFile.toString(); - ClassLoaderStack.addJarFile(jarFileName, className); - LOG.debug("Successfully pushed classloader for jar: " + jarFileName); - return; - } - } - - throw new IOException("Could not load shim jar for pattern: " - + jarPattern); - } - - private ShimLoader() { - // prevent instantiation - } - - /** - * Given the name of a class, try to load the shim jars and return the Class - * object referring to that class. - * @param className a class to load out of the shim jar - * @return the class object loaded from the shim jar for the given class. - */ - public static Class getShimClass(String className) - throws ClassNotFoundException { - getHadoopShim(); // Make sure shims are loaded. - return (Class) Class.forName(className, - true, Thread.currentThread().getContextClassLoader()); - } -} diff --git a/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java b/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java index 72f31191..5bc54be7 100644 --- a/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java +++ b/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java @@ -41,7 +41,6 @@ import com.cloudera.sqoop.lib.DelimiterSet; import com.cloudera.sqoop.manager.ConnManager; import com.cloudera.sqoop.metastore.JobData; -import com.cloudera.sqoop.shims.ShimLoader; /** * Layer on top of SqoopTool that provides some basic common code @@ -77,8 +76,8 @@ public abstract class BaseSqoopTool extends SqoopTool { public static final String HIVE_HOME_ARG = "hive-home"; public static final String WAREHOUSE_DIR_ARG = "warehouse-dir"; public static final String TARGET_DIR_ARG = "target-dir"; - public static final String APPEND_ARG = "append"; - + public static final String APPEND_ARG = "append"; + public static final String FMT_SEQUENCEFILE_ARG = "as-sequencefile"; public static final String FMT_TEXTFILE_ARG = "as-textfile"; public static final String HIVE_IMPORT_ARG = "hive-import"; @@ -170,10 +169,6 @@ protected void setManager(ConnManager mgr) { * @return true on success, false on failure. */ protected boolean init(SqoopOptions sqoopOpts) { - - // Make sure shim jar is classloaded early. - ShimLoader.getHadoopShim(sqoopOpts.getConf()); - // Get the connection to the database. try { JobData data = new JobData(sqoopOpts, this); @@ -227,7 +222,7 @@ protected boolean hasUnrecognizedArgs(String [] argv, int offset, int len) { LOG.error("Error parsing arguments for " + getToolName() + ":"); printedBanner = true; } - LOG.error("Unrecognized argument: " + argv[i]); + LOG.error("Unrecognized argument: " + argv[i]); unrecognized = true; } } @@ -537,7 +532,7 @@ protected RelatedOptions getHBaseOptions() { } - + /** * Apply common command-line to the state. */ @@ -650,7 +645,7 @@ protected void applyOutputFormatOptions(CommandLine in, SqoopOptions out) in.getOptionValue(ESCAPED_BY_ARG))); out.setExplicitDelims(true); } - + if (in.hasOption(MYSQL_DELIMITERS_ARG)) { out.setOutputEncloseRequired(false); out.setFieldsTerminatedBy(','); diff --git a/src/java/com/cloudera/sqoop/tool/SqoopTool.java b/src/java/com/cloudera/sqoop/tool/SqoopTool.java index 751d2bf9..d8452780 100644 --- a/src/java/com/cloudera/sqoop/tool/SqoopTool.java +++ b/src/java/com/cloudera/sqoop/tool/SqoopTool.java @@ -36,7 +36,7 @@ import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException; import com.cloudera.sqoop.cli.SqoopParser; import com.cloudera.sqoop.cli.ToolOptions; -import com.cloudera.sqoop.shims.ShimLoader; +import com.cloudera.sqoop.config.ConfigurationHelper; /** * Base class for Sqoop subprograms (e.g., SqoopImport, SqoopExport, etc.) @@ -158,7 +158,7 @@ protected void setToolName(String name) { * configureOptions()/applyOptions(). * @return an integer return code for external programs to consume. 0 * represents success; nonzero means failure. - */ + */ public abstract int run(SqoopOptions options); /** @@ -246,7 +246,7 @@ public SqoopOptions parseArguments(String [] args, String [] toolArgs = args; // args after generic parser is done. if (useGenericOptions) { try { - toolArgs = ShimLoader.getHadoopShim().parseGenericOptions( + toolArgs = ConfigurationHelper.parseGenericOptions( out.getConf(), args); } catch (IOException ioe) { ParseException pe = new ParseException( diff --git a/src/java/com/cloudera/sqoop/util/Jars.java b/src/java/com/cloudera/sqoop/util/Jars.java index 8c3d62b4..24aefbbc 100644 --- a/src/java/com/cloudera/sqoop/util/Jars.java +++ b/src/java/com/cloudera/sqoop/util/Jars.java @@ -27,7 +27,6 @@ import org.apache.commons.logging.LogFactory; import com.cloudera.sqoop.manager.ConnManager; -import com.cloudera.sqoop.shims.HadoopShim; /** * Utility class; returns the locations of various jars. @@ -47,17 +46,6 @@ public static String getSqoopJarPath() { return getJarPathForClass(Jars.class); } - /** - * @return the path to the currently-loaded shim jar. - */ - public static String getShimJarPath() { - HadoopShim h = HadoopShim.get(); - if (null == h) { - return null; - } - return getJarPathForClass(h.getClass()); - } - /** * Return the jar file path that contains a particular class. * Method mostly cloned from o.a.h.mapred.JobConf.findContainingJar(). diff --git a/src/java/com/cloudera/sqoop/util/TaskId.java b/src/java/com/cloudera/sqoop/util/TaskId.java index e9eb4dfc..baabb1e7 100644 --- a/src/java/com/cloudera/sqoop/util/TaskId.java +++ b/src/java/com/cloudera/sqoop/util/TaskId.java @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationConstants; /** * Utility class; returns task attempt Id of the current job @@ -52,7 +52,8 @@ public static String get(Configuration conf, String defaultVal) { * task attempt can be stored. */ public static File getLocalWorkPath(Configuration conf) throws IOException { - String tmpDir = conf.get(HadoopShim.get().getJobLocalDirProperty(), + String tmpDir = conf.get( + ConfigurationConstants.PROP_JOB_LOCAL_DIRECTORY, "/tmp/"); // Create a local subdir specific to this task attempt. diff --git a/src/scripts/hudson/run-code-quality.sh b/src/scripts/hudson/run-code-quality.sh index 58281bc6..bc77e8c0 100755 --- a/src/scripts/hudson/run-code-quality.sh +++ b/src/scripts/hudson/run-code-quality.sh @@ -39,7 +39,7 @@ fi # Run main compilation step. -${ANT} clean jar-all-shims findbugs javadoc cobertura checkstyle \ +${ANT} clean jar-all findbugs javadoc cobertura checkstyle \ -Divy.home=$IVY_HOME -Dhadoop.dist=${COMPILE_HADOOP_DIST} \ -Dcobertura.home=${COBERTURA_HOME} -Dcobertura.format=xml \ -Dfindbugs.home=${FINDBUGS_HOME} \ diff --git a/src/shims/apache/com/cloudera/sqoop/shims/Apache22HadoopShim.java b/src/shims/apache/com/cloudera/sqoop/shims/Apache22HadoopShim.java deleted file mode 100644 index 6f338eb3..00000000 --- a/src/shims/apache/com/cloudera/sqoop/shims/Apache22HadoopShim.java +++ /dev/null @@ -1,134 +0,0 @@ -/** - * Licensed to Cloudera, Inc. under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Cloudera, Inc. licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.cloudera.sqoop.shims; - -import java.io.IOException; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapreduce.Counters; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.MapContext; -import org.apache.hadoop.mapreduce.OutputCommitter; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TaskCounter; -import org.apache.hadoop.mapreduce.TaskType; -import org.apache.hadoop.mapreduce.lib.input.FileSplit; -import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; -import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.apache.hadoop.mapreduce.task.MapContextImpl; -import org.apache.hadoop.mrunit.mapreduce.mock.MockReporter; - -/** - * Hadoop Shim for Apache 0.22. - */ -public class Apache22HadoopShim extends CommonHadoopShim { - @Override - public long getNumMapOutputRecords(Job job) - throws IOException, InterruptedException { - return job.getCounters().findCounter( - TaskCounter.MAP_OUTPUT_RECORDS).getValue(); - } - - @Override - public long getNumMapInputRecords(Job job) - throws IOException, InterruptedException { - return job.getCounters().findCounter( - TaskCounter.MAP_INPUT_RECORDS).getValue(); - } - - @Override - public String getTaskIdProperty() { - return JobContext.TASK_ID; - } - - @Override - public String getJobLocalDirProperty() { - return JobContext.JOB_LOCAL_DIR; - } - - @Override - public void setJobNumMaps(Job job, int numMapTasks) { - job.getConfiguration().setInt(JobContext.NUM_MAPS, numMapTasks); - } - - @Override - public int getJobNumMaps(JobContext job) { - return job.getConfiguration().getInt(JobContext.NUM_MAPS, 1); - } - - @Override - public int getConfNumMaps(Configuration conf) { - return conf.getInt(JobContext.NUM_MAPS, 1); - } - - @Override - public void setJobMapSpeculativeExecution(Job job, boolean isEnabled) { - job.setMapSpeculativeExecution(isEnabled); - } - - @Override - public void setJobReduceSpeculativeExecution(Job job, boolean isEnabled) { - job.setReduceSpeculativeExecution(isEnabled); - } - - @Override - public void setJobtrackerAddr(Configuration conf, String addr) { - conf.set(JTConfig.JT_IPC_ADDRESS, "local"); - } - - private static class MockMapContextWithCommitter - extends MapContextImpl { - private Configuration conf; - private Path path; - - public MockMapContextWithCommitter(Configuration c, Path p) { - super(c, new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0), - null, null, null, new MockReporter(new Counters()), null); - - this.conf = c; - this.path = p; - } - - @Override - public InputSplit getInputSplit() { - return new FileSplit(new Path(path, "inputFile"), 0, 0, new String[0]); - } - - @Override - public Configuration getConfiguration() { - return conf; - } - - @Override - public OutputCommitter getOutputCommitter() { - try { - return new FileOutputCommitter(path, this); - } catch (IOException ioe) { - return null; - } - } - } - - @Override - public MapContext getMapContextForIOPath(Configuration conf, Path p) { - return new MockMapContextWithCommitter(conf, p); - } -} diff --git a/src/shims/common/com/cloudera/sqoop/shims/CommonHadoopShim.java b/src/shims/common/com/cloudera/sqoop/shims/CommonHadoopShim.java deleted file mode 100644 index c5caad9b..00000000 --- a/src/shims/common/com/cloudera/sqoop/shims/CommonHadoopShim.java +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Licensed to Cloudera, Inc. under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Cloudera, Inc. licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.shims; - -import java.io.IOException; - -import org.apache.hadoop.conf.Configuration; -import com.cloudera.sqoop.mapreduce.db.DBConfiguration; -import org.apache.hadoop.util.GenericOptionsParser; - -/** - * Contains code which belongs in all Hadoop shims which is syntactically - * identical in both, but needs to be recompiled against multiple different - * Hadoop versions (e.g., references to 'final static String' fields). - */ -public abstract class CommonHadoopShim extends HadoopShim { - - @Override - public String [] parseGenericOptions(Configuration conf, String [] args) - throws IOException { - // This needs to be shimmed because in Apache Hadoop this can throw - // an IOException, but it does not do so in CDH. We just mandate in - // this method that an IOException is possible. - GenericOptionsParser genericParser = new GenericOptionsParser( - conf, args); - return genericParser.getRemainingArgs(); - } - - @Override - public String getDbInputClassProperty() { - return DBConfiguration.INPUT_CLASS_PROPERTY; - } - - @Override - public String getDbUsernameProperty() { - return DBConfiguration.USERNAME_PROPERTY; - } - - @Override - public String getDbPasswordProperty() { - return DBConfiguration.PASSWORD_PROPERTY; - } - - @Override - public String getDbUrlProperty() { - return DBConfiguration.URL_PROPERTY; - } - - @Override - public String getDbInputTableNameProperty() { - return DBConfiguration.INPUT_TABLE_NAME_PROPERTY; - } - - @Override - public String getDbInputConditionsProperty() { - return DBConfiguration.INPUT_CONDITIONS_PROPERTY; - } -} - diff --git a/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java b/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java index 4346d08e..a7f299c3 100644 --- a/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java +++ b/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java @@ -18,7 +18,9 @@ package com.cloudera.sqoop.lib; -import java.io.*; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; import java.sql.ResultSet; import java.sql.SQLException; @@ -29,7 +31,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.MapContext; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import com.cloudera.sqoop.shims.HadoopShim; + +import com.cloudera.sqoop.testutil.MockObjectFactory; import com.cloudera.sqoop.testutil.MockResultSet; /** @@ -61,7 +64,7 @@ public void setUp() throws IOException, InterruptedException { * getInputSplit() to determine where to read our source data from--the same * directory. We are repurposing the same context for both output and input. */ - mapContext = HadoopShim.get().getMapContextForIOPath(conf, outDir); + mapContext = MockObjectFactory.getMapContextForIOPath(conf, outDir); loader = new LargeObjectLoader(mapContext.getConfiguration(), FileOutputFormat.getWorkOutputPath(mapContext)); } diff --git a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java b/src/test/com/cloudera/sqoop/orm/TestParseMethods.java index 58df3e60..acef3880 100644 --- a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java +++ b/src/test/com/cloudera/sqoop/orm/TestParseMethods.java @@ -32,7 +32,7 @@ import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException; -import com.cloudera.sqoop.shims.HadoopShim; +import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.testutil.CommonArgs; import com.cloudera.sqoop.testutil.HsqldbTestServer; import com.cloudera.sqoop.testutil.ImportJobTestCase; @@ -50,7 +50,7 @@ public class TestParseMethods extends ImportJobTestCase { * Create the argv to pass to Sqoop. * @return the argv as an array of strings. */ - private String [] getArgv(boolean includeHadoopFlags, String fieldTerminator, + private String [] getArgv(boolean includeHadoopFlags, String fieldTerminator, String lineTerminator, String encloser, String escape, boolean encloserRequired) { @@ -118,7 +118,7 @@ public void runParseTest(String fieldTerminator, String lineTerminator, job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName); // use local mode in the same JVM. - HadoopShim.get().setJobtrackerAddr(job, "local"); + ConfigurationHelper.setJobtrackerAddr(job, "local"); job.set("fs.default.name", "file:///"); String warehouseDir = getWarehouseDir(); @@ -203,7 +203,7 @@ public void testNumericTypes() throws IOException { "92104916282869291837672829102857271948687.287475322", "true", }; - + createTableWithColTypes(types, vals); runParseTest(",", "\\n", "\\\'", "\\", false); } diff --git a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java index 72104fe6..f6d24748 100644 --- a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java +++ b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java @@ -39,7 +39,6 @@ import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.manager.ConnManager; import com.cloudera.sqoop.metastore.JobData; -import com.cloudera.sqoop.shims.ShimLoader; import com.cloudera.sqoop.tool.ImportTool; import junit.framework.TestCase; @@ -168,7 +167,6 @@ protected SqoopOptions getSqoopOptions(Configuration conf) { @Before public void setUp() { - ShimLoader.getHadoopShim(); incrementTableNum(); if (!isLog4jConfigured) { diff --git a/src/shims/cloudera/com/cloudera/sqoop/shims/CDH3Shim.java b/src/test/com/cloudera/sqoop/testutil/MockObjectFactory.java similarity index 53% rename from src/shims/cloudera/com/cloudera/sqoop/shims/CDH3Shim.java rename to src/test/com/cloudera/sqoop/testutil/MockObjectFactory.java index 12122be1..e8bca5f6 100644 --- a/src/shims/cloudera/com/cloudera/sqoop/shims/CDH3Shim.java +++ b/src/test/com/cloudera/sqoop/testutil/MockObjectFactory.java @@ -15,7 +15,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.cloudera.sqoop.shims; + +package com.cloudera.sqoop.testutil; import java.io.IOException; @@ -23,8 +24,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.MapContext; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.TaskAttemptID; @@ -33,65 +32,17 @@ import org.apache.hadoop.mrunit.mapreduce.mock.MockReporter; /** - * Hadoop Shim for CDH3 (based on 0.20.2). + * Allows the creation of various mock objects for testing purposes. */ -public class CDH3Shim extends CommonHadoopShim { - @Override - public long getNumMapOutputRecords(Job job) - throws IOException, InterruptedException { - return job.getCounters().findCounter( - "org.apache.hadoop.mapred.Task$Counter", - "MAP_OUTPUT_RECORDS").getValue(); - } +public final class MockObjectFactory { - @Override - public long getNumMapInputRecords(Job job) - throws IOException, InterruptedException { - return job.getCounters().findCounter( - "org.apache.hadoop.mapred.Task$Counter", - "MAP_INPUT_RECORDS").getValue(); - } - - @Override - public String getTaskIdProperty() { - return "mapred.task.id"; - } - - @Override - public String getJobLocalDirProperty() { - return "job.local.dir"; - } - - @Override - public void setJobNumMaps(Job job, int numMapTasks) { - job.getConfiguration().setInt("mapred.map.tasks", numMapTasks); - } - - @Override - public int getJobNumMaps(JobContext job) { - return job.getConfiguration().getInt("mapred.map.tasks", 1); - } - - @Override - public int getConfNumMaps(Configuration conf) { - return conf.getInt("mapred.map.tasks", 1); - } - - @Override - public void setJobMapSpeculativeExecution(Job job, boolean isEnabled) { - job.getConfiguration().setBoolean( - "mapred.map.tasks.speculative.execution", isEnabled); - } - - @Override - public void setJobReduceSpeculativeExecution(Job job, boolean isEnabled) { - job.getConfiguration().setBoolean( - "mapred.reduce.tasks.speculative.execution", isEnabled); - } - - @Override - public void setJobtrackerAddr(Configuration conf, String addr) { - conf.set("mapred.job.tracker", addr); + /** + * Returns a mock MapContext that has both an OutputCommitter and an + * InputSplit wired to the specified path. + * Used for testing LargeObjectLoader. + */ + public static MapContext getMapContextForIOPath(Configuration conf, Path p) { + return new MockMapContextWithCommitter(conf, p); } private static class MockMapContextWithCommitter @@ -101,7 +52,7 @@ private static class MockMapContextWithCommitter public MockMapContextWithCommitter(Configuration c, Path p) { super(c, new TaskAttemptID("jt", 0, true, 0, 0), - null, null, null, new MockReporter(new Counters()), null); + null, null, null, new MockReporter(new Counters()), null); this.path = p; this.conf = c; @@ -127,8 +78,7 @@ public Configuration getConfiguration() { } } - @Override - public MapContext getMapContextForIOPath(Configuration conf, Path p) { - return new MockMapContextWithCommitter(conf, p); + private MockObjectFactory() { + // Disable explicity object creation } }