Author: arvind Date: Wed Dec 7 18:33:08 2011 New Revision: 1211583 URL: http://svn.apache.org/viewvc?rev=1211583&view=rev Log: SQOOP-397. Make Sqoop work with Hadoop 0.23 (Bilung Lee via Arvind Prabhakar) Modified: incubator/sqoop/trunk/COMPILING.txt incubator/sqoop/trunk/build.xml incubator/sqoop/trunk/ivy.xml incubator/sqoop/trunk/ivy/libraries.properties incubator/sqoop/trunk/src/test/com/cloudera/sqoop/TestCompression.java incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java incubator/sqoop/trunk/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java Modified: incubator/sqoop/trunk/COMPILING.txt URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/COMPILING.txt?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/COMPILING.txt (original) +++ incubator/sqoop/trunk/COMPILING.txt Wed Dec 7 18:33:08 2011 @@ -55,9 +55,9 @@ If you want to build everything (includi +ant package+. This will appear in the +build/sqoop-(version)/+ directory. -Sqoop is built against the latest Hadoop distribution available from Cloudera. -These dependencies are obtained via IVY which downloads the necessary binaries -from Cloudera maven repository. +This version of Sqoop is built against Hadoop 0.23 available from Apache +maven repository by default. These dependencies are obtained via IVY which +downloads the necessary binaries. == Testing Sqoop @@ -274,3 +274,12 @@ will allow you to edit Sqoop sources in dependencies correctly resolved. To compile the jars, you should still use ant. + +== Using a specific version of Hadoop + +Now Sqoop defaults to use Hadoop 0.23 available from Apache maven repository. +To switch back to the previous version of Hadoop 0.20, for example, run: + +++++ +ant test -Dhadoopversion=20 +++++ Modified: incubator/sqoop/trunk/build.xml URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/build.xml?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/build.xml (original) +++ incubator/sqoop/trunk/build.xml Wed Dec 7 18:33:08 2011 @@ -24,6 +24,13 @@ xmlns:artifact="urn:maven-artifact-ant" xmlns:ivy="antlib:org.apache.ivy.ant"> + + + + + + + @@ -150,10 +157,22 @@ --> + + + + + + + + + + + + - + @@ -184,13 +203,6 @@ - - - - - - - @@ -1062,20 +1074,20 @@ - + - + - + @@ -1083,7 +1095,7 @@ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" sync="true" /> + conf="hadoop${hadoopversion}test" /> Modified: incubator/sqoop/trunk/ivy.xml URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/ivy.xml?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/ivy.xml (original) +++ incubator/sqoop/trunk/ivy.xml Wed Dec 7 18:33:08 2011 @@ -36,14 +36,13 @@ under the License. - + + + - + + + + + + + + + + + + + + + + + rev="${hadoop-core.version}" conf="hadoop20->default"/> + rev="${hadoop-core.version}" conf="hadoop20test->default"/> + rev="${hbase.version}" conf="hbase->default"> @@ -121,9 +137,12 @@ under the License. + + + Modified: incubator/sqoop/trunk/ivy/libraries.properties URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/ivy/libraries.properties?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/ivy/libraries.properties (original) +++ incubator/sqoop/trunk/ivy/libraries.properties Wed Dec 7 18:33:08 2011 @@ -28,15 +28,14 @@ commons-io.version=1.4 commons-lang.version=2.4 commons-logging.version=1.0.4 -# Cloudera Distribution dependency version -hadoop-core.cloudera.version=0.20.2-cdh3u1 - +hadoop-core.version=0.20.2-cdh3u1 +hadoop-common.version=0.23.0-SNAPSHOT hbase.version=0.90.3-cdh3u1 zookeeper.version=3.3.3-cdh3u1 hsqldb.version=1.8.0.10 -ivy.version=2.0.0-rc2 +ivy.version=2.1.0 junit.version=4.5 Modified: incubator/sqoop/trunk/src/test/com/cloudera/sqoop/TestCompression.java URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/test/com/cloudera/sqoop/TestCompression.java?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/src/test/com/cloudera/sqoop/TestCompression.java (original) +++ incubator/sqoop/trunk/src/test/com/cloudera/sqoop/TestCompression.java Wed Dec 7 18:33:08 2011 @@ -163,6 +163,7 @@ public class TestCompression extends Imp if (codec == null) { codec = new GzipCodec(); + ReflectionUtils.setConf(codec, getConf()); } Path p = new Path(getDataFilePath().toString() + codec.getDefaultExtension()); Modified: incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java (original) +++ incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java Wed Dec 7 18:33:08 2011 @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.client.Re import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.VersionInfo; import org.junit.After; import org.junit.Before; @@ -120,6 +121,9 @@ public abstract class HBaseTestCase exte @Override @Before public void setUp() { + if (!isHadoop20()) { + return; + } HBaseTestCase.recordTestBuildDataProperty(); try { startMaster(); @@ -143,6 +147,9 @@ public abstract class HBaseTestCase exte @Override @After public void tearDown() { + if (!isHadoop20()) { + return; + } try { shutdown(); } catch (Exception e) { @@ -173,4 +180,8 @@ public abstract class HBaseTestCase exte table.close(); } } + + protected boolean isHadoop20() { + return VersionInfo.getVersion().startsWith("0.20"); + } } Modified: incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java (original) +++ incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java Wed Dec 7 18:33:08 2011 @@ -29,6 +29,9 @@ public class TestHBaseImport extends HBa @Test public void testBasicUsage() throws IOException { + if (!isHadoop20()) { + return; + } // Create the HBase table in Sqoop as we run the job. String [] argv = getArgv(true, "BasicUsage", "BasicColFam", true, null); String [] types = { "INT", "INT" }; @@ -40,6 +43,9 @@ public class TestHBaseImport extends HBa @Test public void testMissingTableFails() throws IOException { + if (!isHadoop20()) { + return; + } // Test that if the table doesn't exist, we fail unless we // explicitly create the table. String [] argv = getArgv(true, "MissingTable", "MissingFam", false, null); @@ -56,6 +62,9 @@ public class TestHBaseImport extends HBa @Test public void testOverwriteSucceeds() throws IOException { + if (!isHadoop20()) { + return; + } // Test that we can create a table and then import immediately // back on top of it without problem. String [] argv = getArgv(true, "OverwriteT", "OverwriteF", true, null); @@ -71,6 +80,9 @@ public class TestHBaseImport extends HBa @Test public void testStrings() throws IOException { + if (!isHadoop20()) { + return; + } String [] argv = getArgv(true, "stringT", "stringF", true, null); String [] types = { "INT", "VARCHAR(32)" }; String [] vals = { "0", "'abc'" }; @@ -81,6 +93,9 @@ public class TestHBaseImport extends HBa @Test public void testNulls() throws IOException { + if (!isHadoop20()) { + return; + } String [] argv = getArgv(true, "nullT", "nullF", true, null); String [] types = { "INT", "INT", "INT" }; String [] vals = { "0", "42", "null" }; @@ -96,6 +111,9 @@ public class TestHBaseImport extends HBa @Test public void testExitFailure() throws IOException { + if (!isHadoop20()) { + return; + } String [] types = { "INT", "INT", "INT" }; String [] vals = { "0", "42", "43" }; createTableWithColTypes(types, vals); Modified: incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java (original) +++ incubator/sqoop/trunk/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java Wed Dec 7 18:33:08 2011 @@ -29,6 +29,9 @@ public class TestHBaseQueryImport extend @Test public void testImportFromQuery() throws IOException { + if (!isHadoop20()) { + return; + } String [] types = { "INT", "INT", "INT" }; String [] vals = { "0", "42", "43" }; createTableWithColTypes(types, vals); @@ -47,6 +50,9 @@ public class TestHBaseQueryImport extend @Test public void testExitFailure() throws IOException { + if (!isHadoop20()) { + return; + } String [] types = { "INT", "INT", "INT" }; String [] vals = { "0", "42", "43" }; createTableWithColTypes(types, vals); Modified: incubator/sqoop/trunk/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java?rev=1211583&r1=1211582&r2=1211583&view=diff ============================================================================== --- incubator/sqoop/trunk/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java (original) +++ incubator/sqoop/trunk/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java Wed Dec 7 18:33:08 2011 @@ -35,6 +35,7 @@ import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.db.*; import org.apache.hadoop.mapreduce.lib.output.*; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.VersionInfo; /** * Test aspects of DataDrivenDBInputFormat. @@ -82,11 +83,17 @@ public class TestDataDrivenDBInputFormat } public void setUp() throws Exception { + if (!isHadoop20()) { + return; + } initialize(DRIVER_CLASS, DB_URL); super.setUp(); } public void tearDown() throws Exception { + if (!isHadoop20()) { + return; + } super.tearDown(); shutdown(); } @@ -165,6 +172,9 @@ public class TestDataDrivenDBInputFormat } public void testDateSplits() throws Exception { + if (!isHadoop20()) { + return; + } Statement s = connection.createStatement(); final String DATE_TABLE = "datetable"; final String COL = "foo"; @@ -219,4 +229,8 @@ public class TestDataDrivenDBInputFormat s.close(); } } + + protected boolean isHadoop20() { + return VersionInfo.getVersion().startsWith("0.20"); + } }