crunch-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jwi...@apache.org
Subject [9/19] git commit: CRUNCH-17: Split out Crunch integration tests. Contributed by Matthias Friedrich.
Date Sat, 14 Jul 2012 17:28:53 GMT
CRUNCH-17: Split out Crunch integration tests. Contributed by Matthias Friedrich.


Project: http://git-wip-us.apache.org/repos/asf/incubator-crunch/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-crunch/commit/42c9e4e5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-crunch/tree/42c9e4e5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-crunch/diff/42c9e4e5

Branch: refs/heads/master
Commit: 42c9e4e53946ad1f4e0a73f0882d3ef03479c655
Parents: f699409
Author: jwills <jwills@apache.org>
Authored: Fri Jul 13 14:12:10 2012 -0700
Committer: jwills <jwills@apache.org>
Committed: Fri Jul 13 14:12:10 2012 -0700

----------------------------------------------------------------------
 crunch/pom.xml                                     |   54 +
 .../it/java/org/apache/crunch/CollectionsIT.java   |  112 +
 crunch/src/it/java/org/apache/crunch/MapsIT.java   |   93 +
 .../it/java/org/apache/crunch/MaterializeIT.java   |  107 +
 .../java/org/apache/crunch/MaterializeToMapIT.java |   78 +
 .../java/org/apache/crunch/MultipleOutputIT.java   |  110 +
 .../org/apache/crunch/PCollectionGetSizeIT.java    |  154 +
 .../java/org/apache/crunch/PTableKeyValueIT.java   |  107 +
 .../src/it/java/org/apache/crunch/PageRankIT.java  |  166 +
 .../it/java/org/apache/crunch/TermFrequencyIT.java |  134 +
 .../src/it/java/org/apache/crunch/TextPairIT.java  |   69 +
 crunch/src/it/java/org/apache/crunch/TfIdfIT.java  |  233 +
 .../org/apache/crunch/TupleNClassCastBugIT.java    |   95 +
 .../java/org/apache/crunch/WordCountHBaseIT.java   |  207 +
 .../src/it/java/org/apache/crunch/WordCountIT.java |  172 +
 .../crunch/impl/mem/MemPipelineFileWritingIT.java  |   53 +
 .../crunch/impl/mr/collect/UnionCollectionIT.java  |  161 +
 .../apache/crunch/io/CompositePathIterableIT.java  |   81 +
 .../crunch/io/avro/AvroFileSourceTargetIT.java     |  153 +
 .../org/apache/crunch/io/avro/AvroReflectIT.java   |  115 +
 .../it/java/org/apache/crunch/lib/AggregateIT.java |  233 +
 .../java/org/apache/crunch/lib/AvroTypeSortIT.java |  148 +
 .../it/java/org/apache/crunch/lib/CogroupIT.java   |  126 +
 .../src/it/java/org/apache/crunch/lib/SetIT.java   |  115 +
 .../src/it/java/org/apache/crunch/lib/SortIT.java  |  334 +
 .../apache/crunch/lib/SpecificAvroGroupByIT.java   |  139 +
 .../apache/crunch/lib/join/FullOuterJoinIT.java    |   51 +
 .../org/apache/crunch/lib/join/InnerJoinIT.java    |   51 +
 .../org/apache/crunch/lib/join/JoinTester.java     |  107 +
 .../apache/crunch/lib/join/LeftOuterJoinIT.java    |   51 +
 .../org/apache/crunch/lib/join/MapsideJoinIT.java  |  119 +
 .../crunch/lib/join/MultiAvroSchemaJoinIT.java     |  116 +
 .../apache/crunch/lib/join/RightOuterJoinIT.java   |   51 +
 crunch/src/it/resources/customers.txt              |    4 +
 crunch/src/it/resources/docs.txt                   |    6 +
 crunch/src/it/resources/employee.avro              |    9 +
 crunch/src/it/resources/letters.txt                |    2 +
 crunch/src/it/resources/log4j.properties           |   11 +
 crunch/src/it/resources/maugham.txt                |29112 +++++++++++++++
 crunch/src/it/resources/orders.txt                 |    4 +
 crunch/src/it/resources/person.avro                |    9 +
 crunch/src/it/resources/set1.txt                   |    4 +
 crunch/src/it/resources/set2.txt                   |    3 +
 crunch/src/it/resources/shakes.txt                 | 3667 ++
 crunch/src/it/resources/urls.txt                   |   11 +
 .../java/org/apache/crunch/CollectionsTest.java    |  112 -
 .../src/test/java/org/apache/crunch/MapsTest.java  |   93 -
 .../java/org/apache/crunch/MaterializeTest.java    |  107 -
 .../org/apache/crunch/MaterializeToMapTest.java    |   78 -
 .../java/org/apache/crunch/MultipleOutputTest.java |  110 -
 .../org/apache/crunch/PCollectionGetSizeTest.java  |  154 -
 .../java/org/apache/crunch/PTableKeyValueTest.java |  107 -
 .../test/java/org/apache/crunch/PageRankTest.java  |  166 -
 .../src/test/java/org/apache/crunch/TFIDFTest.java |  233 -
 .../java/org/apache/crunch/TermFrequencyTest.java  |  134 -
 .../test/java/org/apache/crunch/TextPairTest.java  |   69 -
 .../org/apache/crunch/TupleNClassCastBugTest.java  |   95 -
 .../java/org/apache/crunch/WordCountHBaseTest.java |  207 -
 .../test/java/org/apache/crunch/WordCountTest.java |  172 -
 .../impl/mem/MemPipelineFileWritingTest.java       |   53 -
 .../impl/mr/collect/UnionCollectionTest.java       |  161 -
 .../crunch/io/CompositePathIterableTest.java       |   81 -
 .../crunch/io/avro/AvroFileSourceTargetTest.java   |  153 -
 .../org/apache/crunch/io/avro/AvroReflectTest.java |  115 -
 .../java/org/apache/crunch/lib/AggregateTest.java  |  232 -
 .../org/apache/crunch/lib/AvroTypeSortTest.java    |  148 -
 .../java/org/apache/crunch/lib/CogroupTest.java    |  126 -
 .../test/java/org/apache/crunch/lib/SetTest.java   |  115 -
 .../test/java/org/apache/crunch/lib/SortTest.java  |  333 -
 .../apache/crunch/lib/SpecificAvroGroupByTest.java |  139 -
 .../apache/crunch/lib/join/FullOuterJoinTest.java  |   51 -
 .../org/apache/crunch/lib/join/InnerJoinTest.java  |   51 -
 .../org/apache/crunch/lib/join/JoinTester.java     |  107 -
 .../apache/crunch/lib/join/LeftOuterJoinTest.java  |   51 -
 .../apache/crunch/lib/join/MapsideJoinTest.java    |  119 -
 .../crunch/lib/join/MultiAvroSchemaJoinTest.java   |  116 -
 .../apache/crunch/lib/join/RightOuterJoinTest.java |   51 -
 crunch/src/test/resources/customers.txt            |    4 -
 crunch/src/test/resources/docs.txt                 |    6 -
 crunch/src/test/resources/employee.avro            |    9 -
 crunch/src/test/resources/letters.txt              |    2 -
 crunch/src/test/resources/log4j.properties         |   11 -
 crunch/src/test/resources/maugham.txt              |29112 ---------------
 crunch/src/test/resources/orders.txt               |    4 -
 crunch/src/test/resources/person.avro              |    9 -
 crunch/src/test/resources/set1.txt                 |    4 -
 crunch/src/test/resources/set2.txt                 |    3 -
 crunch/src/test/resources/shakes.txt               | 3667 --
 crunch/src/test/resources/urls.txt                 |   11 -
 89 files changed, 36937 insertions(+), 36881 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/pom.xml
----------------------------------------------------------------------
diff --git a/crunch/pom.xml b/crunch/pom.xml
index 8f207d9..7b542be 100644
--- a/crunch/pom.xml
+++ b/crunch/pom.xml
@@ -163,6 +163,60 @@ under the License.
           </execution>
         </executions>
       </plugin>
+      <!-- We put slow-running tests into src/it and run them during the
+           integration-test phase using the failsafe plugin. This way
+           developers can run unit tests conveniently from the IDE or via
+           "mvn package" from the command line without triggering time
+           consuming integration tests. -->
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>1.7</version>
+        <executions>
+          <execution>
+            <id>add-test-source</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>add-test-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/it/java</source>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>add-test-resource</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>add-test-resource</goal>
+            </goals>
+            <configuration>
+              <resources>
+                  <resource>
+                    <directory>src/it/resources</directory>
+                  </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+        <version>2.12</version>
+        <configuration>
+          <testSourceDirectory>src/it/java</testSourceDirectory>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>integration-test</goal>
+              <goal>verify</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/CollectionsIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/CollectionsIT.java b/crunch/src/it/java/org/apache/crunch/CollectionsIT.java
new file mode 100644
index 0000000..d66624e
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/CollectionsIT.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Collection;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mem.MemPipeline;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+
+@SuppressWarnings("serial")
+public class CollectionsIT {
+  
+  public static class AggregateStringListFn implements CombineFn.Aggregator<Collection<String>> {
+    private final Collection<String> rtn = Lists.newArrayList();
+    
+    @Override
+    public void reset() {
+      rtn.clear();
+    }
+    
+    @Override
+    public void update(Collection<String> values) {
+      rtn.addAll(values);
+    }      
+    
+    @Override
+    public Iterable<Collection<String>> results() {
+      return ImmutableList.of(rtn);
+    }
+  }
+  
+  public static PTable<String, Collection<String>> listOfCharcters(PCollection<String> lines, PTypeFamily typeFamily) {
+     
+    return lines.parallelDo(new DoFn<String, Pair<String, Collection<String>>>() {
+      @Override
+      public void process(String line, Emitter<Pair<String, Collection<String>>> emitter) {
+        for (String word : line.split("\\s+")) {
+          Collection<String> characters = Lists.newArrayList();
+          for(char c : word.toCharArray()) {
+            characters.add(String.valueOf(c));
+          }
+          emitter.emit(Pair.of(word, characters));
+        }
+      }
+    }, typeFamily.tableOf(typeFamily.strings(), typeFamily.collections(typeFamily.strings())))
+    .groupByKey()
+    .combineValues(CombineFn.<String, Collection<String>>aggregator(new AggregateStringListFn()));
+  }
+  
+  @Test
+  public void testWritables() throws IOException {
+    run(new MRPipeline(CollectionsIT.class), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testAvro() throws IOException {
+    run(new MRPipeline(CollectionsIT.class), AvroTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testInMemoryWritables() throws IOException {
+    run(MemPipeline.getInstance(), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testInMemoryAvro() throws IOException {
+    run(MemPipeline.getInstance(), AvroTypeFamily.getInstance());
+  }
+  
+  public void run(Pipeline pipeline, PTypeFamily typeFamily) throws IOException {
+	String shakesInputPath = FileHelper.createTempCopyOf("shakes.txt");
+    
+    PCollection<String> shakespeare = pipeline.readTextFile(shakesInputPath);
+    Iterable<Pair<String, Collection<String>>> lines = listOfCharcters(shakespeare, typeFamily).materialize();
+    
+    boolean passed = false;
+    for (Pair<String, Collection<String>> line : lines) {
+      if(line.first().startsWith("yellow")) {
+        passed = true;
+        break;
+      }
+    }
+    pipeline.done();
+    assertTrue(passed);
+  }  
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/MapsIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/MapsIT.java b/crunch/src/it/java/org/apache/crunch/MapsIT.java
new file mode 100644
index 0000000..cedb9a3
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/MapsIT.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import java.util.Map;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+
+public class MapsIT {
+
+  @Test
+  public void testWritables() throws Exception {
+	run(WritableTypeFamily.getInstance());
+  }
+  
+  @Test
+  public void testAvros() throws Exception {
+	run(AvroTypeFamily.getInstance());
+  }
+  
+  public static void run(PTypeFamily typeFamily) throws Exception {
+	Pipeline pipeline = new MRPipeline(MapsIT.class);
+    String shakesInputPath = FileHelper.createTempCopyOf("shakes.txt");
+    PCollection<String> shakespeare = pipeline.readTextFile(shakesInputPath);
+    Iterable<Pair<String, Map<String, Long>>> output = shakespeare.parallelDo(
+      new DoFn<String, Pair<String, Map<String, Long>>>() {
+	    @Override
+	    public void process(String input,
+		    Emitter<Pair<String, Map<String, Long>>> emitter) {
+		  String last = null;
+		  for (String word : input.toLowerCase().split("\\W+")) {
+		    if (!word.isEmpty()) {
+			  String firstChar = word.substring(0, 1);
+		      if (last != null) {
+		    	Map<String, Long> cc = ImmutableMap.of(firstChar, 1L);
+			    emitter.emit(Pair.of(last, cc));
+		      }
+		      last = firstChar;
+		    }
+		  }
+	    }
+      }, typeFamily.tableOf(typeFamily.strings(), typeFamily.maps(typeFamily.longs())))
+      .groupByKey()
+      .combineValues(new CombineFn<String, Map<String, Long>>() {
+	    @Override
+	    public void process(Pair<String, Iterable<Map<String, Long>>> input,
+		    Emitter<Pair<String, Map<String, Long>>> emitter) {
+		  Map<String, Long> agg = Maps.newHashMap();
+		  for (Map<String, Long> in : input.second()) {
+		    for (Map.Entry<String, Long> e : in.entrySet()) {
+			  if (!agg.containsKey(e.getKey())) {
+			    agg.put(e.getKey(), e.getValue());
+			  } else {
+			    agg.put(e.getKey(), e.getValue() + agg.get(e.getKey()));
+			  }
+		    }
+		  }
+		  emitter.emit(Pair.of(input.first(), agg));
+	    }
+	  }).materialize();
+    boolean passed = false;
+    for (Pair<String, Map<String, Long>> v : output) {
+      if (v.first() == "k" && v.second().get("n") == 8L) {
+    	passed = true;
+    	break;
+      }
+    }
+    pipeline.done();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/MaterializeIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/MaterializeIT.java b/crunch/src/it/java/org/apache/crunch/MaterializeIT.java
new file mode 100644
index 0000000..95e239d
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/MaterializeIT.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mem.MemPipeline;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.collect.Lists;
+
+public class MaterializeIT {
+
+	/** Filter that rejects everything. */
+	@SuppressWarnings("serial")
+	private static class FalseFilterFn extends FilterFn<String> {
+
+		@Override
+		public boolean accept(final String input) {
+			return false;
+		}
+	}
+
+	@Test
+	public void testMaterializeInput_Writables() throws IOException {
+		runMaterializeInput(new MRPipeline(MaterializeIT.class), WritableTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeInput_Avro() throws IOException {
+		runMaterializeInput(new MRPipeline(MaterializeIT.class), AvroTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeInput_InMemoryWritables() throws IOException {
+		runMaterializeInput(MemPipeline.getInstance(), WritableTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeInput_InMemoryAvro() throws IOException {
+		runMaterializeInput(MemPipeline.getInstance(), AvroTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeEmptyIntermediate_Writables() throws IOException {
+		runMaterializeEmptyIntermediate(new MRPipeline(MaterializeIT.class),
+				WritableTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeEmptyIntermediate_Avro() throws IOException {
+		runMaterializeEmptyIntermediate(new MRPipeline(MaterializeIT.class),
+				AvroTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeEmptyIntermediate_InMemoryWritables() throws IOException {
+		runMaterializeEmptyIntermediate(MemPipeline.getInstance(), WritableTypeFamily.getInstance());
+	}
+
+	@Test
+	public void testMaterializeEmptyIntermediate_InMemoryAvro() throws IOException {
+		runMaterializeEmptyIntermediate(MemPipeline.getInstance(), AvroTypeFamily.getInstance());
+	}
+
+	public void runMaterializeInput(Pipeline pipeline, PTypeFamily typeFamily) throws IOException {
+		List<String> expectedContent = Lists.newArrayList("b", "c", "a", "e");
+		String inputPath = FileHelper.createTempCopyOf("set1.txt");
+
+		PCollection<String> lines = pipeline.readTextFile(inputPath);
+		assertEquals(expectedContent, Lists.newArrayList(lines.materialize()));
+		pipeline.done();
+	}
+
+	public void runMaterializeEmptyIntermediate(Pipeline pipeline, PTypeFamily typeFamily)
+			throws IOException {
+		String inputPath = FileHelper.createTempCopyOf("set1.txt");
+		PCollection<String> empty = pipeline.readTextFile(inputPath).filter(new FalseFilterFn());
+
+		assertTrue(Lists.newArrayList(empty.materialize()).isEmpty());
+		pipeline.done();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/MaterializeToMapIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/MaterializeToMapIT.java b/crunch/src/it/java/org/apache/crunch/MaterializeToMapIT.java
new file mode 100644
index 0000000..6215dd9
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/MaterializeToMapIT.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mem.MemPipeline;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import com.google.common.collect.ImmutableList;
+
+public class MaterializeToMapIT {
+  
+  static final ImmutableList<Pair<Integer,String>> kvPairs = 
+      ImmutableList.of(
+          Pair.of(0, "a"),
+          Pair.of(1, "b"),
+          Pair.of(2, "c"),
+          Pair.of(3, "e"));
+  
+  public void assertMatches(Map<Integer,String> m) {
+    for (Integer k : m.keySet()) {
+      System.out.println(k + " " + kvPairs.get(k).second() + " " + m.get(k));
+      assertTrue(kvPairs.get(k).second().equals(m.get(k)));
+    }
+  }
+  
+  @Test
+  public void testMemMaterializeToMap() {
+    assertMatches(MemPipeline.tableOf(kvPairs).materializeToMap());
+  }
+  
+  private static class Set1Mapper extends MapFn<String,Pair<Integer,String>> {
+    @Override
+    public Pair<Integer, String> map(String input) {
+      
+      int k = -1;
+      if (input.equals("a")) k = 0;
+      else if (input.equals("b")) k = 1;
+      else if (input.equals("c")) k = 2;
+      else if (input.equals("e")) k = 3;
+      return Pair.of(k, input);
+    }
+  }
+  
+  @Test
+  public void testMRMaterializeToMap() throws IOException {
+    Pipeline p = new MRPipeline(MaterializeToMapIT.class);
+    String inputFile = FileHelper.createTempCopyOf("set1.txt");
+    PCollection<String> c = p.readTextFile(inputFile);
+    PTypeFamily tf = c.getTypeFamily();
+    PTable<Integer,String> t = c.parallelDo(new Set1Mapper(), tf.tableOf(tf.ints(), tf.strings()));
+    Map<Integer, String> m = t.materializeToMap();
+    assertMatches(m);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/MultipleOutputIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/MultipleOutputIT.java b/crunch/src/it/java/org/apache/crunch/MultipleOutputIT.java
new file mode 100644
index 0000000..63120e5
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/MultipleOutputIT.java
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.At;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.io.Files;
+
+public class MultipleOutputIT {
+  
+  public static PCollection<String> evenCountLetters(PCollection<String> words, PTypeFamily typeFamily) {
+    return words.parallelDo("even", new FilterFn<String>(){
+
+        @Override
+        public boolean accept(String input) {
+            return input.length() % 2 == 0;
+        }}, typeFamily.strings());
+  }
+  
+  public static PCollection<String> oddCountLetters(PCollection<String> words, PTypeFamily typeFamily) {
+      return words.parallelDo("odd", new FilterFn<String>(){
+
+        @Override
+        public boolean accept(String input) {
+            return input.length() % 2 != 0;
+        }}, typeFamily.strings());
+       
+    }
+  
+  public static PTable<String, Long> substr(PTable<String, Long> ptable) {
+	return ptable.parallelDo(new DoFn<Pair<String, Long>, Pair<String, Long>>() {
+	  public void process(Pair<String, Long> input,
+		  Emitter<Pair<String, Long>> emitter) {
+		if (input.first().length() > 0) {
+		  emitter.emit(Pair.of(input.first().substring(0, 1), input.second()));
+		}
+	  }      
+    }, ptable.getPTableType());
+  }
+  
+  @Test
+  public void testWritables() throws IOException {
+    run(new MRPipeline(MultipleOutputIT.class), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testAvro() throws IOException {
+    run(new MRPipeline(MultipleOutputIT.class), AvroTypeFamily.getInstance());
+  }
+ 
+  
+  public void run(Pipeline pipeline, PTypeFamily typeFamily) throws IOException {
+	String inputPath = FileHelper.createTempCopyOf("letters.txt");
+	File outputEven = FileHelper.createOutputPath();
+	File outputOdd = FileHelper.createOutputPath();
+	String outputPathEven = outputEven.getAbsolutePath();
+	String outputPathOdd = outputOdd.getAbsolutePath();
+	
+    PCollection<String> words = pipeline.read(
+         At.textFile(inputPath, typeFamily.strings()));
+    
+    PCollection<String> evenCountWords = evenCountLetters(words, typeFamily);
+    PCollection<String> oddCountWords = oddCountLetters(words, typeFamily);
+    pipeline.writeTextFile(evenCountWords, outputPathEven);
+    pipeline.writeTextFile(oddCountWords, outputPathOdd);
+    
+    pipeline.done();
+   
+    checkFileContents(outputPathEven, Arrays.asList("bb"));
+    checkFileContents(outputPathOdd, Arrays.asList("a"));
+   
+	outputEven.deleteOnExit();
+	outputOdd.deleteOnExit();
+  }  
+  
+  private void checkFileContents(String filePath, List<String> expected) throws IOException{
+    File outputFile = new File(filePath, "part-m-00000");
+    List<String> lines = Files.readLines(outputFile, Charset.defaultCharset());
+    assertEquals(expected, lines);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/PCollectionGetSizeIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/PCollectionGetSizeIT.java b/crunch/src/it/java/org/apache/crunch/PCollectionGetSizeIT.java
new file mode 100644
index 0000000..14a3b3f
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/PCollectionGetSizeIT.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.apache.crunch.io.At.sequenceFile;
+import static org.apache.crunch.io.At.textFile;
+import static org.apache.crunch.types.writable.Writables.strings;
+import static com.google.common.collect.Lists.newArrayList;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertThat;
+
+import java.io.IOException;
+
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import org.apache.crunch.impl.mem.MemPipeline;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.test.FileHelper;
+
+public class PCollectionGetSizeIT {
+
+    private String emptyInputPath;
+    private String nonEmptyInputPath;
+    private String outputPath;
+
+    /** Filter that rejects everything. */
+    @SuppressWarnings("serial")
+    private static class FalseFilterFn extends FilterFn<String> {
+
+        @Override
+        public boolean accept(final String input) {
+            return false;
+        }
+    }
+
+    @Before
+    public void setUp() throws IOException {
+        emptyInputPath = FileHelper.createTempCopyOf("emptyTextFile.txt");
+        nonEmptyInputPath = FileHelper.createTempCopyOf("set1.txt");
+        outputPath = FileHelper.createOutputPath().getAbsolutePath();
+    }
+
+    @Test
+    public void testGetSizeOfEmptyInput_MRPipeline() throws IOException {
+        testCollectionGetSizeOfEmptyInput(new MRPipeline(this.getClass()));
+    }
+
+    @Test
+    public void testGetSizeOfEmptyInput_MemPipeline() throws IOException {
+        testCollectionGetSizeOfEmptyInput(MemPipeline.getInstance());
+    }
+
+    private void testCollectionGetSizeOfEmptyInput(Pipeline pipeline) throws IOException {
+
+        assertThat(pipeline.read(textFile(emptyInputPath)).getSize(), is(0L));
+    }
+
+    @Test
+    public void testMaterializeEmptyInput_MRPipeline() throws IOException {
+        testMaterializeEmptyInput(new MRPipeline(this.getClass()));
+    }
+
+    @Test
+    public void testMaterializeEmptyImput_MemPipeline() throws IOException {
+        testMaterializeEmptyInput(MemPipeline.getInstance());
+    }
+
+    private void testMaterializeEmptyInput(Pipeline pipeline) throws IOException {
+        assertThat(newArrayList(pipeline.readTextFile(emptyInputPath).materialize().iterator()).size(), is(0));
+    }
+
+    @Test
+    public void testGetSizeOfEmptyIntermediatePCollection_MRPipeline() throws IOException {
+
+        PCollection<String> emptyIntermediate = createPesistentEmptyIntermediate(new MRPipeline(this.getClass()));
+
+        assertThat(emptyIntermediate.getSize(), is(0L));
+    }
+
+    @Test
+    @Ignore("GetSize of a DoCollection is only an estimate based on scale factor, so we can't count on it being reported as 0")
+    public void testGetSizeOfEmptyIntermediatePCollection_NoSave_MRPipeline() throws IOException {
+
+        PCollection<String> data = new MRPipeline(this.getClass()).readTextFile(nonEmptyInputPath);
+
+        PCollection<String> emptyPCollection = data.filter(new FalseFilterFn());
+
+        assertThat(emptyPCollection.getSize(), is(0L));
+    }
+
+    @Test
+    public void testGetSizeOfEmptyIntermediatePCollection_MemPipeline() {
+
+        PCollection<String> emptyIntermediate = createPesistentEmptyIntermediate(MemPipeline.getInstance());
+
+        assertThat(emptyIntermediate.getSize(), is(0L));
+    }
+
+    @Test
+    public void testMaterializeOfEmptyIntermediatePCollection_MRPipeline() throws IOException {
+
+        PCollection<String> emptyIntermediate = createPesistentEmptyIntermediate(new MRPipeline(this.getClass()));
+
+        assertThat(newArrayList(emptyIntermediate.materialize()).size(), is(0));
+    }
+
+    @Test
+    public void testMaterializeOfEmptyIntermediatePCollection_MemPipeline() {
+
+        PCollection<String> emptyIntermediate = createPesistentEmptyIntermediate(MemPipeline.getInstance());
+
+        assertThat(newArrayList(emptyIntermediate.materialize()).size(), is(0));
+    }
+
+    private PCollection<String> createPesistentEmptyIntermediate(Pipeline pipeline) {
+
+        PCollection<String> data = pipeline.readTextFile(nonEmptyInputPath);
+
+        PCollection<String> emptyPCollection = data.filter(new FalseFilterFn());
+
+        emptyPCollection.write(sequenceFile(outputPath, strings()));
+
+        pipeline.run();
+
+        return pipeline.read(sequenceFile(outputPath, strings()));
+    }
+
+    @Test(expected = IllegalStateException.class)
+    public void testExpectExceptionForGettingSizeOfNonExistingFile_MRPipeline() throws IOException {
+        new MRPipeline(this.getClass()).readTextFile("non_existing.file").getSize();
+    }
+
+    @Test(expected = IllegalStateException.class)
+    public void testExpectExceptionForGettingSizeOfNonExistingFile_MemPipeline() {
+        MemPipeline.getInstance().readTextFile("non_existing.file").getSize();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/PTableKeyValueIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/PTableKeyValueIT.java b/crunch/src/it/java/org/apache/crunch/PTableKeyValueIT.java
new file mode 100644
index 0000000..197dfa9
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/PTableKeyValueIT.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+
+import junit.framework.Assert;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.At;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.collect.Lists;
+
+@RunWith(value = Parameterized.class)
+public class PTableKeyValueIT implements Serializable {
+
+	private static final long serialVersionUID = 4374227704751746689L;
+
+	private transient PTypeFamily typeFamily;
+	private transient MRPipeline pipeline;
+	private transient String inputFile;
+
+	@Before
+	public void setUp() throws IOException {
+		pipeline = new MRPipeline(PTableKeyValueIT.class);
+		inputFile = FileHelper.createTempCopyOf("set1.txt");
+	}
+
+	@After
+	public void tearDown() {
+		pipeline.done();
+	}
+
+	public PTableKeyValueIT(PTypeFamily typeFamily) {
+		this.typeFamily = typeFamily;
+	}
+
+	@Parameters
+	public static Collection<Object[]> data() {
+		Object[][] data = new Object[][] {
+				{ WritableTypeFamily.getInstance() },
+				{ AvroTypeFamily.getInstance() } };
+		return Arrays.asList(data);
+	}
+
+	@Test
+	public void testKeysAndValues() throws Exception {
+
+		PCollection<String> collection = pipeline.read(At.textFile(inputFile,
+				typeFamily.strings()));
+
+		PTable<String, String> table = collection.parallelDo(
+				new DoFn<String, Pair<String, String>>() {
+
+					@Override
+					public void process(String input,
+							Emitter<Pair<String, String>> emitter) {
+						emitter.emit(Pair.of(input.toUpperCase(), input));
+
+					}
+				}, typeFamily.tableOf(typeFamily.strings(),
+						typeFamily.strings()));
+
+		PCollection<String> keys = table.keys();
+		PCollection<String> values = table.values();
+
+		ArrayList<String> keyList = Lists.newArrayList(keys.materialize()
+				.iterator());
+		ArrayList<String> valueList = Lists.newArrayList(values.materialize()
+				.iterator());
+
+		Assert.assertEquals(keyList.size(), valueList.size());
+		for (int i = 0; i < keyList.size(); i++) {
+			Assert.assertEquals(keyList.get(i), valueList.get(i).toUpperCase());
+		}
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/PageRankIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/PageRankIT.java b/crunch/src/it/java/org/apache/crunch/PageRankIT.java
new file mode 100644
index 0000000..8eaa49b
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/PageRankIT.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Collection;
+import java.util.List;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mem.MemPipeline;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.lib.Aggregate;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PType;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.avro.Avros;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import org.apache.crunch.util.PTypes;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+public class PageRankIT {
+
+  public static class PageRankData {
+	public float score;
+	public float lastScore;
+	public List<String> urls;
+	
+	public PageRankData() { }
+	
+	public PageRankData(float score, float lastScore, Iterable<String> urls) {
+	  this.score = score;
+	  this.lastScore = lastScore;
+	  this.urls = Lists.newArrayList(urls);
+	}
+	
+	public PageRankData next(float newScore) {
+	  return new PageRankData(newScore, score, urls);
+	}
+	
+	public float propagatedScore() {
+	  return score / urls.size();
+	}
+	
+	@Override
+	public String toString() {
+	  return score + " " + lastScore + " " + urls;
+	}
+  }
+  
+  @Test public void testAvroReflect() throws Exception {
+	PTypeFamily tf = AvroTypeFamily.getInstance();
+	PType<PageRankData> prType = Avros.reflects(PageRankData.class);
+    run(new MRPipeline(PageRankIT.class), prType, tf);	
+  }
+  
+  @Test public void testAvroMReflectInMemory() throws Exception {
+    PTypeFamily tf = AvroTypeFamily.getInstance();
+    PType<PageRankData> prType = Avros.reflects(PageRankData.class);
+    run(MemPipeline.getInstance(), prType, tf);        
+  }
+  
+  @Test public void testAvroJSON() throws Exception {
+	PTypeFamily tf = AvroTypeFamily.getInstance();
+	PType<PageRankData> prType = PTypes.jsonString(PageRankData.class, tf);
+    run(new MRPipeline(PageRankIT.class), prType, tf);
+  }
+
+  @Test public void testAvroBSON() throws Exception {
+	PTypeFamily tf = AvroTypeFamily.getInstance();
+	PType<PageRankData> prType = PTypes.smile(PageRankData.class, tf);
+    run(new MRPipeline(PageRankIT.class), prType, tf);
+  }
+  
+  @Test public void testWritablesJSON() throws Exception {
+	PTypeFamily tf = WritableTypeFamily.getInstance();
+	PType<PageRankData> prType = PTypes.jsonString(PageRankData.class, tf);
+    run(new MRPipeline(PageRankIT.class), prType, tf);
+  }
+
+  @Test public void testWritablesBSON() throws Exception {
+	PTypeFamily tf = WritableTypeFamily.getInstance();
+	PType<PageRankData> prType = PTypes.smile(PageRankData.class, tf);
+    run(new MRPipeline(PageRankIT.class), prType, tf);
+  }
+  
+  public static PTable<String, PageRankData> pageRank(PTable<String, PageRankData> input, final float d) {
+    PTypeFamily ptf = input.getTypeFamily();
+    PTable<String, Float> outbound = input.parallelDo(
+        new DoFn<Pair<String, PageRankData>, Pair<String, Float>>() {
+          @Override
+          public void process(Pair<String, PageRankData> input, Emitter<Pair<String, Float>> emitter) {
+            PageRankData prd = input.second();
+            for (String link : prd.urls) {
+              emitter.emit(Pair.of(link, prd.propagatedScore()));
+            }
+          }
+        }, ptf.tableOf(ptf.strings(), ptf.floats()));
+    
+    return input.cogroup(outbound).parallelDo(
+        new MapFn<Pair<String, Pair<Collection<PageRankData>, Collection<Float>>>, Pair<String, PageRankData>>() {
+              @Override
+              public Pair<String, PageRankData> map(Pair<String, Pair<Collection<PageRankData>, Collection<Float>>> input) {
+                PageRankData prd = Iterables.getOnlyElement(input.second().first());
+                Collection<Float> propagatedScores = input.second().second();
+                float sum = 0.0f;
+                for (Float s : propagatedScores) {
+                  sum += s;
+                }
+                return Pair.of(input.first(), prd.next(d + (1.0f - d)*sum));
+              }
+            }, input.getPTableType());
+  }
+  
+  public static void run(Pipeline pipeline, PType<PageRankData> prType, PTypeFamily ptf) throws Exception {
+    String urlInput = FileHelper.createTempCopyOf("urls.txt");
+    PTable<String, PageRankData> scores = pipeline.readTextFile(urlInput)
+        .parallelDo(new MapFn<String, Pair<String, String>>() {
+          @Override
+          public Pair<String, String> map(String input) {
+            String[] urls = input.split("\\t");
+            return Pair.of(urls[0], urls[1]);
+          }
+        }, ptf.tableOf(ptf.strings(), ptf.strings()))
+        .groupByKey()
+        .parallelDo(new MapFn<Pair<String, Iterable<String>>, Pair<String, PageRankData>>() {
+              @Override
+              public Pair<String, PageRankData> map(Pair<String, Iterable<String>> input) {
+                return Pair.of(input.first(), new PageRankData(1.0f, 0.0f, input.second()));
+              }
+            }, ptf.tableOf(ptf.strings(), prType));
+    
+    Float delta = 1.0f;
+    while (delta > 0.01) {
+      scores = pageRank(scores, 0.5f);
+      scores.materialize().iterator(); // force the write
+      delta = Iterables.getFirst(Aggregate.max(
+          scores.parallelDo(new MapFn<Pair<String, PageRankData>, Float>() {
+            @Override
+            public Float map(Pair<String, PageRankData> input) {
+              PageRankData prd = input.second();
+              return Math.abs(prd.score - prd.lastScore);
+            }
+          }, ptf.floats())).materialize(), null);
+    }
+    assertEquals(0.0048, delta, 0.001);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/TermFrequencyIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/TermFrequencyIT.java b/crunch/src/it/java/org/apache/crunch/TermFrequencyIT.java
new file mode 100644
index 0000000..d3c877b
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/TermFrequencyIT.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mem.MemPipeline;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.At;
+import org.apache.crunch.io.ReadableSourceTarget;
+import org.apache.crunch.lib.Aggregate;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+
+@SuppressWarnings("serial")
+public class TermFrequencyIT implements Serializable {  
+  
+  @Test
+  public void testTermFrequencyWithNoTransform() throws IOException {
+    run(new MRPipeline(TermFrequencyIT.class), WritableTypeFamily.getInstance(), false);
+  }
+  
+  @Test
+  public void testTermFrequencyWithTransform() throws IOException {
+    run(new MRPipeline(TermFrequencyIT.class), WritableTypeFamily.getInstance(), true);
+  }
+  
+  @Test
+  public void testTermFrequencyNoTransformInMemory() throws IOException {
+    run(MemPipeline.getInstance(), WritableTypeFamily.getInstance(), false);  
+  }
+
+  @Test
+  public void testTermFrequencyWithTransformInMemory() throws IOException {
+    run(MemPipeline.getInstance(), WritableTypeFamily.getInstance(), true);
+  }
+  
+
+  public void run(Pipeline pipeline, PTypeFamily typeFamily, boolean transformTF) throws IOException {
+    String input = FileHelper.createTempCopyOf("docs.txt");
+    
+    File transformedOutput = FileHelper.createOutputPath();
+    File tfOutput = FileHelper.createOutputPath();
+    
+    PCollection<String> docs = pipeline.readTextFile(input);
+    
+    PTypeFamily ptf = docs.getTypeFamily();
+    
+    /*
+     * Input: String
+     * Input title  text
+     * 
+     * Output: PTable<Pair<String, String>, Long> 
+     * Pair<Pair<word, title>, count in title>
+     */
+    PTable<Pair<String, String>, Long> tf = Aggregate.count(docs.parallelDo("term document frequency",
+        new DoFn<String, Pair<String, String>>() {
+      @Override
+      public void process(String doc, Emitter<Pair<String, String>> emitter) {
+        String[] kv = doc.split("\t");
+        String title = kv[0];
+        String text = kv[1];
+        for (String word : text.split("\\W+")) {
+          if(word.length() > 0) {
+            Pair<String, String> pair = Pair.of(word.toLowerCase(), title);
+            emitter.emit(pair);
+          }
+        }
+      }
+    }, ptf.pairs(ptf.strings(), ptf.strings())));
+    
+    if(transformTF) {
+      /*
+       * Input: Pair<Pair<String, String>, Long>
+       * Pair<Pair<word, title>, count in title>
+       * 
+       * Output: PTable<String, Pair<String, Long>>
+       * PTable<word, Pair<title, count in title>>
+       */
+      PTable<String, Pair<String, Long>> wordDocumentCountPair = tf.parallelDo("transform wordDocumentPairCount",
+          new MapFn<Pair<Pair<String, String>, Long>, Pair<String, Pair<String, Long>>>() {
+            @Override
+            public Pair<String, Pair<String, Long>> map(Pair<Pair<String, String>, Long> input) {
+              Pair<String, String> wordDocumentPair = input.first();            
+              return Pair.of(wordDocumentPair.first(), Pair.of(wordDocumentPair.second(), input.second()));
+            }
+        }, ptf.tableOf(ptf.strings(), ptf.pairs(ptf.strings(), ptf.longs())));
+      
+      pipeline.writeTextFile(wordDocumentCountPair, transformedOutput.getAbsolutePath());
+    }
+    
+    SourceTarget<String> st = At.textFile(tfOutput.getAbsolutePath());
+    pipeline.write(tf, st);
+    
+    pipeline.run();
+    
+    // test the case we should see
+    Iterable<String> lines = ((ReadableSourceTarget<String>) st).read(pipeline.getConfiguration());
+    boolean passed = false;
+    for (String line : lines) {
+      if ("[well,A]\t0".equals(line)) {
+        fail("Found " + line + " but well is in Document A 1 time");
+      }
+      if ("[well,A]\t1".equals(line)) {
+        passed = true;
+      }
+    }
+    assertTrue(passed);
+    pipeline.done();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/TextPairIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/TextPairIT.java b/crunch/src/it/java/org/apache/crunch/TextPairIT.java
new file mode 100644
index 0000000..a42b6d5
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/TextPairIT.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.From;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.writable.Writables;
+
+public class TextPairIT  {
+
+  @Test
+  public void testWritables() throws IOException {
+    run(new MRPipeline(TextPairIT.class));
+  }
+  
+  private static final String CANARY = "Writables.STRING_TO_TEXT";
+  
+  public static PCollection<Pair<String, String>> wordDuplicate(PCollection<String> words) {
+    return words.parallelDo("my word duplicator", new DoFn<String, Pair<String, String>>() {
+      public void process(String line, Emitter<Pair<String, String>> emitter) {
+        for (String word : line.split("\\W+")) {
+          if(word.length() > 0) {
+            Pair<String, String> pair = Pair.of(CANARY, word);
+            emitter.emit(pair);
+          }
+        }
+      }
+    }, Writables.pairs(Writables.strings(), Writables.strings()));
+  }
+  
+  public void run(Pipeline pipeline) throws IOException {
+    String input = FileHelper.createTempCopyOf("shakes.txt");
+        
+    PCollection<String> shakespeare = pipeline.read(From.textFile(input));
+    Iterable<Pair<String, String>> lines = pipeline.materialize(wordDuplicate(shakespeare));    
+    boolean passed = false;
+    for (Pair<String, String> line : lines) {
+      if (line.first().contains(CANARY)) {
+        passed = true;
+        break;
+      }
+    }
+    
+    pipeline.done();
+    assertTrue(passed);
+  }  
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/TfIdfIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/TfIdfIT.java b/crunch/src/it/java/org/apache/crunch/TfIdfIT.java
new file mode 100644
index 0000000..2472449
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/TfIdfIT.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static com.google.common.io.Resources.getResource;
+import static com.google.common.io.Resources.newInputStreamSupplier;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.charset.Charset;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.Test;
+
+import org.apache.crunch.fn.MapKeysFn;
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.seq.SeqFileSourceTarget;
+import org.apache.crunch.lib.Aggregate;
+import org.apache.crunch.lib.Join;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.collect.Lists;
+import com.google.common.io.Files;
+
+@SuppressWarnings("serial")
+public class TfIdfIT implements Serializable {  
+  // total number of documents, should calculate
+  protected static final double N = 2;
+  
+  @Test
+  public void testWritablesSingleRun() throws IOException {
+    run(new MRPipeline(TfIdfIT.class), WritableTypeFamily.getInstance(), true);
+  }
+
+  @Test
+  public void testWritablesMultiRun() throws IOException {
+    run(new MRPipeline(TfIdfIT.class), WritableTypeFamily.getInstance(), false);
+  }
+
+  /**
+   * This method should generate a TF-IDF score for the input.
+   */
+  public PTable<String, Collection<Pair<String, Double>>>  generateTFIDF(PCollection<String> docs,
+      Path termFreqPath, PTypeFamily ptf) throws IOException {    
+    
+    /*
+     * Input: String
+     * Input title  text
+     * 
+     * Output: PTable<Pair<String, String>, Long> 
+     * Pair<Pair<word, title>, count in title>
+     */
+    PTable<Pair<String, String>, Long> tf = Aggregate.count(docs.parallelDo("term document frequency",
+        new DoFn<String, Pair<String, String>>() {
+      @Override
+      public void process(String doc, Emitter<Pair<String, String>> emitter) {
+        String[] kv = doc.split("\t");
+        String title = kv[0];
+        String text = kv[1];
+        for (String word : text.split("\\W+")) {
+          if(word.length() > 0) {
+            Pair<String, String> pair = Pair.of(word.toLowerCase(), title);
+            emitter.emit(pair);
+          }
+        }
+      }
+    }, ptf.pairs(ptf.strings(), ptf.strings())));
+    
+    tf.write(new SeqFileSourceTarget<Pair<Pair<String, String>, Long>>(termFreqPath, tf.getPType()));
+    
+    /*
+     * Input: Pair<Pair<String, String>, Long>
+     * Pair<Pair<word, title>, count in title>
+     * 
+     * Output: PTable<String, Long>
+     * PTable<word, # of docs containing word>
+     */
+    PTable<String, Long> n = Aggregate.count(tf.parallelDo("little n (# of docs contain word)",  
+        new DoFn<Pair<Pair<String, String>, Long>, String>() {
+      @Override
+      public void process(Pair<Pair<String, String>, Long> input,
+          Emitter<String> emitter) {
+        emitter.emit(input.first().first());
+      }
+    }, ptf.strings()));
+    
+    /*
+     * Input: Pair<Pair<String, String>, Long>
+     * Pair<Pair<word, title>, count in title>
+     * 
+     * Output: PTable<String, Pair<String, Long>>
+     * PTable<word, Pair<title, count in title>>
+     */
+    PTable<String, Collection<Pair<String, Long>>> wordDocumentCountPair = tf.parallelDo("transform wordDocumentPairCount",
+        new DoFn<Pair<Pair<String, String>, Long>, Pair<String, Collection<Pair<String, Long>>>>() {
+          Collection<Pair<String, Long>> buffer;
+          String key;
+          @Override
+          public void process(Pair<Pair<String, String>, Long> input,
+        	  Emitter<Pair<String, Collection<Pair<String, Long>>>> emitter) {
+            Pair<String, String> wordDocumentPair = input.first();
+            if(!wordDocumentPair.first().equals(key)) {
+              flush(emitter);
+              key = wordDocumentPair.first();
+              buffer = Lists.newArrayList();
+            }
+            buffer.add(Pair.of(wordDocumentPair.second(), input.second()));            
+          }
+          protected void flush(Emitter<Pair<String, Collection<Pair<String, Long>>>> emitter) {
+            if(buffer != null) {
+              emitter.emit(Pair.of(key, buffer));
+              buffer = null;
+            }
+          }
+          @Override
+          public void cleanup(Emitter<Pair<String, Collection<Pair<String, Long>>>> emitter) {
+            flush(emitter);
+          }
+      }, ptf.tableOf(ptf.strings(), ptf.collections(ptf.pairs(ptf.strings(), ptf.longs()))));
+
+    PTable<String, Pair<Long, Collection<Pair<String, Long>>>> joinedResults = Join.join(n, wordDocumentCountPair);
+
+    /*
+     * Input: Pair<String, Pair<Long, Collection<Pair<String, Long>>>
+     * Pair<word, Pair<# of docs containing word, Collection<Pair<title, term frequency>>>
+     * 
+     * Output: Pair<String, Collection<Pair<String, Double>>>
+     * Pair<word, Collection<Pair<title, tfidf>>>
+     */
+    return joinedResults.parallelDo("calculate tfidf",
+        new MapFn<Pair<String, Pair<Long, Collection<Pair<String, Long>>>>, Pair<String, Collection<Pair<String, Double>>>>() {
+          @Override
+          public Pair<String, Collection<Pair<String, Double>>> map(Pair<String, Pair<Long, Collection<Pair<String, Long>>>> input) {
+            Collection<Pair<String, Double>> tfidfs = Lists.newArrayList();
+            String word = input.first();
+            double n = input.second().first();
+            double idf = Math.log(N / n);
+            for(Pair<String, Long> tf : input.second().second()) {
+              double tfidf = tf.second() * idf;
+              tfidfs.add(Pair.of(tf.first(), tfidf));
+            }
+            return Pair.of(word, tfidfs);
+          }
+      
+    }, ptf.tableOf(ptf.strings(), ptf.collections(ptf.pairs(ptf.strings(), ptf.doubles()))));
+  }
+  
+  public void run(Pipeline pipeline, PTypeFamily typeFamily, boolean singleRun) throws IOException {
+    File input = File.createTempFile("docs", "txt");
+    input.deleteOnExit();
+    Files.copy(newInputStreamSupplier(getResource("docs.txt")), input);
+    
+    String outputPath1 = getOutput();
+    String outputPath2 = getOutput();
+    
+    Path tfPath = new Path(getOutput("termfreq"));
+    
+    PCollection<String> docs = pipeline.readTextFile(input.getAbsolutePath());
+        
+    PTable<String, Collection<Pair<String, Double>>> results =
+        generateTFIDF(docs, tfPath, typeFamily);
+    pipeline.writeTextFile(results, outputPath1);
+    if (!singleRun) {
+      pipeline.run();
+    }
+    
+    PTable<String, Collection<Pair<String, Double>>> uppercased = results.parallelDo(
+        new MapKeysFn<String, String, Collection<Pair<String, Double>>>() {
+          @Override
+          public String map(String k1) {
+            return k1.toUpperCase();
+          } 
+        }, results.getPTableType());
+    pipeline.writeTextFile(uppercased, outputPath2);
+    pipeline.done();
+    
+    // Check the lowercase version...
+    File outputFile = new File(outputPath1, "part-r-00000");
+    outputFile.deleteOnExit();
+    List<String> lines = Files.readLines(outputFile, Charset.defaultCharset());
+    boolean passed = false;
+    for (String line : lines) {
+      if (line.startsWith("the") && line.contains("B,0.6931471805599453")) {
+        passed = true;
+        break;
+      }
+    }
+    assertTrue(passed);
+    
+    // ...and the uppercase version
+    outputFile = new File(outputPath2, "part-r-00000");
+    outputFile.deleteOnExit();
+    lines = Files.readLines(outputFile, Charset.defaultCharset());
+    passed = false;
+    for (String line : lines) {
+      if (line.startsWith("THE") && line.contains("B,0.6931471805599453")) {
+        passed = true;
+        break;
+      }
+    }
+    assertTrue(passed);
+  }
+  
+  public static String getOutput() throws IOException {
+    return getOutput("output");
+  }
+  
+  public static String getOutput(String prefix) throws IOException {
+    File output = File.createTempFile(prefix, "");
+    String path = output.getAbsolutePath();
+    output.delete();
+    return path;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/TupleNClassCastBugIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/TupleNClassCastBugIT.java b/crunch/src/it/java/org/apache/crunch/TupleNClassCastBugIT.java
new file mode 100644
index 0000000..9e65244
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/TupleNClassCastBugIT.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static com.google.common.io.Resources.getResource;
+import static com.google.common.io.Resources.newInputStreamSupplier;
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.List;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.io.Files;
+
+public class TupleNClassCastBugIT {
+
+  public static PCollection<TupleN> mapGroupDo(PCollection<String> lines, PTypeFamily ptf) {
+    PTable<String, TupleN> mapped = lines.parallelDo(new MapFn<String, Pair<String, TupleN>>() {
+
+      @Override
+      public Pair<String, TupleN> map(String line) {
+        String[] columns = line.split("\\t");
+        String docId = columns[0];
+        String docLine = columns[1];
+        return Pair.of(docId, new TupleN(docId, docLine));
+      }
+    }, ptf.tableOf(ptf.strings(), ptf.tuples(ptf.strings(), ptf.strings())));
+    return mapped.groupByKey().parallelDo(new DoFn<Pair<String, Iterable<TupleN>>, TupleN>() {
+      @Override
+      public void process(Pair<String, Iterable<TupleN>> input, Emitter<TupleN> tupleNEmitter) {
+        for (TupleN tuple : input.second()) {
+          tupleNEmitter.emit(tuple);
+        }
+      }
+    }, ptf.tuples(ptf.strings(), ptf.strings()));
+  }
+
+  @Test
+  public void testWritables() throws IOException {
+    run(new MRPipeline(TupleNClassCastBugIT.class), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testAvro() throws IOException {
+    run(new MRPipeline(TupleNClassCastBugIT.class), AvroTypeFamily.getInstance());
+  }
+
+  public void run(Pipeline pipeline, PTypeFamily typeFamily) throws IOException {
+    File input = File.createTempFile("docs", "txt");
+    input.deleteOnExit();
+    Files.copy(newInputStreamSupplier(getResource("docs.txt")), input);
+
+    File output = File.createTempFile("output", "");
+    String outputPath = output.getAbsolutePath();
+    output.delete();
+
+    PCollection<String> docLines = pipeline.readTextFile(input.getAbsolutePath());
+    pipeline.writeTextFile(mapGroupDo(docLines, typeFamily), outputPath);
+    pipeline.done();
+
+    // *** We are not directly testing the output, we are looking for a ClassCastException
+    // *** which is thrown in a different thread during the reduce phase. If all is well
+    // *** the file will exist and have six lines. Otherwise the bug is present.
+    File outputFile = new File(output, "part-r-00000");
+    List<String> lines = Files.readLines(outputFile, Charset.defaultCharset());
+    int lineCount = 0;
+    for (String line : lines) {
+      lineCount++;
+    }
+    assertEquals(6, lineCount);
+    output.deleteOnExit();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/WordCountHBaseIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/WordCountHBaseIT.java b/crunch/src/it/java/org/apache/crunch/WordCountHBaseIT.java
new file mode 100644
index 0000000..1fa922c
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/WordCountHBaseIT.java
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Random;
+import java.util.jar.JarEntry;
+import java.util.jar.JarOutputStream;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapred.TaskAttemptContext;
+import org.apache.hadoop.filecache.DistributedCache;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.hbase.HBaseSourceTarget;
+import org.apache.crunch.io.hbase.HBaseTarget;
+import org.apache.crunch.lib.Aggregate;
+import org.apache.crunch.types.writable.Writables;
+import org.apache.crunch.util.DistCache;
+import com.google.common.io.ByteStreams;
+
+public class WordCountHBaseIT {
+  protected static final Log LOG = LogFactory.getLog(WordCountHBaseIT.class);
+
+  private static final byte[] COUNTS_COLFAM = Bytes.toBytes("cf");
+  private static final byte[] WORD_COLFAM = Bytes.toBytes("cf");
+
+  private HBaseTestingUtility hbaseTestUtil = new HBaseTestingUtility();
+  
+  @SuppressWarnings("serial")
+  public static PCollection<Put> wordCount(PTable<ImmutableBytesWritable, Result> words) {
+    PTable<String, Long> counts = Aggregate.count(words.parallelDo(
+        new DoFn<Pair<ImmutableBytesWritable, Result>, String>() {
+          @Override
+          public void process(Pair<ImmutableBytesWritable, Result> row, Emitter<String> emitter) {
+            byte[] word = row.second().getValue(WORD_COLFAM, null);
+            if (word != null) {
+              emitter.emit(Bytes.toString(word));
+            }
+          }
+        }, words.getTypeFamily().strings()));
+
+    return counts.parallelDo("convert to put",
+        new DoFn<Pair<String, Long>, Put>() {
+          @Override
+          public void process(Pair<String, Long> input, Emitter<Put> emitter) {
+            Put put = new Put(Bytes.toBytes(input.first()));
+            put.add(COUNTS_COLFAM, null,
+                Bytes.toBytes(input.second()));
+            emitter.emit(put);
+          }
+
+        }, Writables.writables(Put.class));
+  }
+
+  @SuppressWarnings("deprecation")
+  @Before
+  public void setUp() throws Exception {
+    Configuration conf = hbaseTestUtil.getConfiguration();
+    File tmpDir = File.createTempFile("logdir", "");
+    tmpDir.delete();
+    tmpDir.mkdir();
+    tmpDir.deleteOnExit();
+    conf.set("hadoop.log.dir", tmpDir.getAbsolutePath());
+    conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
+    conf.setInt("hbase.master.info.port", -1);
+    conf.setInt("hbase.regionserver.info.port", -1);
+
+    hbaseTestUtil.startMiniZKCluster();
+    hbaseTestUtil.startMiniCluster();
+    hbaseTestUtil.startMiniMapReduceCluster(1);
+    
+    // For Hadoop-2.0.0, we have to do a bit more work.
+    if (TaskAttemptContext.class.isInterface()) {
+      conf = hbaseTestUtil.getConfiguration();
+      FileSystem fs = FileSystem.get(conf);
+      Path tmpPath = new Path("target", "WordCountHBaseTest-tmpDir");
+      FileSystem localFS = FileSystem.getLocal(conf);
+      for (FileStatus jarFile : localFS.listStatus(new Path("target/lib/"))) {
+        Path target = new Path(tmpPath, jarFile.getPath().getName());
+        fs.copyFromLocalFile(jarFile.getPath(), target);
+        DistributedCache.addFileToClassPath(target, conf, fs);
+      }
+    
+      // Create a programmatic container for this jar.
+      JarOutputStream jos = new JarOutputStream(new FileOutputStream("WordCountHBaseTest.jar"));
+      File baseDir = new File("target/test-classes");
+      String prefix = "org/apache/crunch/";
+      jarUp(jos, baseDir, prefix + "WordCountHBaseTest.class");
+      jarUp(jos, baseDir, prefix + "WordCountHBaseTest$1.class");
+      jarUp(jos, baseDir, prefix + "WordCountHBaseTest$2.class");
+      jos.close();
+
+      Path target = new Path(tmpPath, "WordCountHBaseTest.jar");
+      fs.copyFromLocalFile(true, new Path("WordCountHBaseTest.jar"), target);
+      DistributedCache.addFileToClassPath(target, conf, fs);
+    }
+  }
+  
+  private void jarUp(JarOutputStream jos, File baseDir, String classDir) throws IOException {
+    File file = new File(baseDir, classDir);
+    JarEntry e = new JarEntry(classDir);
+    e.setTime(file.lastModified());
+    jos.putNextEntry(e);
+    ByteStreams.copy(new FileInputStream(file), jos);
+    jos.closeEntry();
+  }
+  
+  @Test
+  public void testWordCount() throws IOException {
+    run(new MRPipeline(WordCountHBaseIT.class, hbaseTestUtil.getConfiguration()));
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    hbaseTestUtil.shutdownMiniMapReduceCluster();
+    hbaseTestUtil.shutdownMiniCluster();
+    hbaseTestUtil.shutdownMiniZKCluster();
+  }
+  
+  public void run(Pipeline pipeline) throws IOException {
+    
+    Random rand = new Random();
+    int postFix = Math.abs(rand.nextInt());
+    String inputTableName = "crunch_words_" + postFix;
+    String outputTableName = "crunch_counts_" + postFix;
+
+    try {
+      
+      HTable inputTable = hbaseTestUtil.createTable(Bytes.toBytes(inputTableName),
+          WORD_COLFAM);
+      HTable outputTable = hbaseTestUtil.createTable(Bytes.toBytes(outputTableName),
+          COUNTS_COLFAM);
+  
+      int key = 0;
+      key = put(inputTable, key, "cat");
+      key = put(inputTable, key, "cat");
+      key = put(inputTable, key, "dog");
+      Scan scan = new Scan();
+      scan.addColumn(WORD_COLFAM, null);
+      HBaseSourceTarget source = new HBaseSourceTarget(inputTableName, scan);
+      PTable<ImmutableBytesWritable, Result> shakespeare = pipeline.read(source);
+      pipeline.write(wordCount(shakespeare), new HBaseTarget(outputTableName));
+      pipeline.done();
+      
+      assertIsLong(outputTable, "cat", 2);
+      assertIsLong(outputTable, "dog", 1);    
+    } finally {
+      // not quite sure...
+    }
+  }
+  
+  protected int put(HTable table, int key, String value) throws IOException {
+    Put put = new Put(Bytes.toBytes(key));
+    put.add(WORD_COLFAM, null, Bytes.toBytes(value));    
+    table.put(put);
+    return key + 1;
+  }
+  
+  protected void assertIsLong(HTable table, String key, long i) throws IOException {
+    Get get = new Get(Bytes.toBytes(key));
+    get.addColumn(COUNTS_COLFAM, null);
+    Result result = table.get(get);
+    
+    byte[] rawCount = result.getValue(COUNTS_COLFAM, null);
+    assertTrue(rawCount != null);
+    assertEquals(new Long(i), new Long(Bytes.toLong(rawCount)));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/WordCountIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/WordCountIT.java b/crunch/src/it/java/org/apache/crunch/WordCountIT.java
new file mode 100644
index 0000000..fce5d65
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/WordCountIT.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.List;
+
+import org.junit.Test;
+
+import org.apache.crunch.impl.mr.MRPipeline;
+import org.apache.crunch.io.At;
+import org.apache.crunch.io.To;
+import org.apache.crunch.lib.Aggregate;
+import org.apache.crunch.test.FileHelper;
+import org.apache.crunch.types.PTypeFamily;
+import org.apache.crunch.types.avro.AvroTypeFamily;
+import org.apache.crunch.types.writable.WritableTypeFamily;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.common.io.Files;
+
+public class WordCountIT {
+
+  enum WordCountStats {
+    ANDS
+  };
+
+  public static PTable<String, Long> wordCount(PCollection<String> words, PTypeFamily typeFamily) {
+    return Aggregate.count(words.parallelDo(new DoFn<String, String>() {
+
+      @Override
+      public void process(String line, Emitter<String> emitter) {
+        for (String word : line.split("\\s+")) {
+          emitter.emit(word);
+          if ("and".equals(word)) {
+            increment(WordCountStats.ANDS);
+          }
+        }
+      }
+    }, typeFamily.strings()));
+  }
+
+  public static PTable<String, Long> substr(PTable<String, Long> ptable) {
+    return ptable.parallelDo(new DoFn<Pair<String, Long>, Pair<String, Long>>() {
+
+      public void process(Pair<String, Long> input, Emitter<Pair<String, Long>> emitter) {
+        if (input.first().length() > 0) {
+          emitter.emit(Pair.of(input.first().substring(0, 1), input.second()));
+        }
+      }
+    }, ptable.getPTableType());
+  }
+
+  private boolean runSecond = false;
+  private boolean useToOutput = false;
+
+  @Test
+  public void testWritables() throws IOException {
+    run(new MRPipeline(WordCountIT.class), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testWritablesWithSecond() throws IOException {
+    runSecond = true;
+    run(new MRPipeline(WordCountIT.class), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testWritablesWithSecondUseToOutput() throws IOException {
+    runSecond = true;
+    useToOutput = true;
+    run(new MRPipeline(WordCountIT.class), WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testAvro() throws IOException {
+    run(new MRPipeline(WordCountIT.class), AvroTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testAvroWithSecond() throws IOException {
+    runSecond = true;
+    run(new MRPipeline(WordCountIT.class), AvroTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testWithTopWritable() throws IOException {
+    runWithTop(WritableTypeFamily.getInstance());
+  }
+
+  @Test
+  public void testWithTopAvro() throws IOException {
+    runWithTop(AvroTypeFamily.getInstance());
+  }
+
+  public static void runWithTop(PTypeFamily tf) throws IOException {
+    Pipeline pipeline = new MRPipeline(WordCountIT.class);
+    String inputPath = FileHelper.createTempCopyOf("shakes.txt");
+
+    PCollection<String> shakespeare = pipeline.read(At.textFile(inputPath, tf.strings()));
+    PTable<String, Long> wordCount = wordCount(shakespeare, tf);
+    List<Pair<String, Long>> top5 = Lists.newArrayList(Aggregate.top(wordCount, 5, true)
+        .materialize());
+    assertEquals(
+        ImmutableList.of(Pair.of("", 1470L), Pair.of("the", 620L), Pair.of("and", 427L),
+            Pair.of("of", 396L), Pair.of("to", 367L)), top5);
+  }
+
+  public void run(Pipeline pipeline, PTypeFamily typeFamily) throws IOException {
+    String inputPath = FileHelper.createTempCopyOf("shakes.txt");
+    File output = FileHelper.createOutputPath();
+    String outputPath = output.getAbsolutePath();
+
+    PCollection<String> shakespeare = pipeline.read(At.textFile(inputPath, typeFamily.strings()));
+    PTable<String, Long> wordCount = wordCount(shakespeare, typeFamily);
+    if (useToOutput) {
+      wordCount.write(To.textFile(outputPath));
+    } else {
+      pipeline.writeTextFile(wordCount, outputPath);
+    }
+
+    if (runSecond) {
+      File substrCount = File.createTempFile("substr", "");
+      String substrPath = substrCount.getAbsolutePath();
+      substrCount.delete();
+      PTable<String, Long> we = substr(wordCount).groupByKey().combineValues(
+          CombineFn.<String> SUM_LONGS());
+      pipeline.writeTextFile(we, substrPath);
+    }
+    PipelineResult res = pipeline.done();
+    assertTrue(res.succeeded());
+    List<PipelineResult.StageResult> stageResults = res.getStageResults();
+    if (runSecond) {
+      assertEquals(2, stageResults.size());
+    } else {
+      assertEquals(1, stageResults.size());
+      assertEquals(427, stageResults.get(0).getCounterValue(WordCountStats.ANDS));
+    }
+
+    File outputFile = new File(outputPath, "part-r-00000");
+    List<String> lines = Files.readLines(outputFile, Charset.defaultCharset());
+    boolean passed = false;
+    for (String line : lines) {
+      if (line.startsWith("Macbeth\t28")) {
+        passed = true;
+        break;
+      }
+    }
+    assertTrue(passed);
+    output.deleteOnExit();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-crunch/blob/42c9e4e5/crunch/src/it/java/org/apache/crunch/impl/mem/MemPipelineFileWritingIT.java
----------------------------------------------------------------------
diff --git a/crunch/src/it/java/org/apache/crunch/impl/mem/MemPipelineFileWritingIT.java b/crunch/src/it/java/org/apache/crunch/impl/mem/MemPipelineFileWritingIT.java
new file mode 100644
index 0000000..b725558
--- /dev/null
+++ b/crunch/src/it/java/org/apache/crunch/impl/mem/MemPipelineFileWritingIT.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.crunch.impl.mem;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.crunch.PCollection;
+import org.apache.crunch.Pipeline;
+import com.google.common.base.Charsets;
+import com.google.common.collect.ImmutableList;
+import com.google.common.io.Files;
+
+import org.junit.Test;
+
+public class MemPipelineFileWritingIT {
+  @Test
+  public void testMemPipelineFileWriter() throws Exception {
+    File tmpDir = Files.createTempDir();
+    tmpDir.delete();
+    Pipeline p = MemPipeline.getInstance();
+    PCollection<String> lines = MemPipeline.collectionOf("hello", "world");
+    p.writeTextFile(lines, tmpDir.getAbsolutePath());
+    p.done();
+    assertTrue(tmpDir.exists());
+    File[] files = tmpDir.listFiles();
+    assertTrue(files != null && files.length > 0);
+    for (File f : files) {
+      if (!f.getName().startsWith(".")) {
+        List<String> txt = Files.readLines(f, Charsets.UTF_8);
+        assertEquals(ImmutableList.of("hello", "world"), txt);
+      }
+    }
+  }
+}


Mime
View raw message