beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Work logged] (BEAM-3912) Add batching support for HadoopOutputFormatIO
Date Fri, 07 Sep 2018 07:41:00 GMT

     [ https://issues.apache.org/jira/browse/BEAM-3912?focusedWorklogId=142074&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-142074
]

ASF GitHub Bot logged work on BEAM-3912:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 07/Sep/18 07:40
            Start Date: 07/Sep/18 07:40
    Worklog Time Spent: 10m 
      Work Description: echauchot commented on a change in pull request #6306: [BEAM-3912]
Add HadoopOutputFormatIO support
URL: https://github.com/apache/beam/pull/6306#discussion_r215871896
 
 

 ##########
 File path: sdks/java/io/hadoop-output-format/src/test/java/org/apache/beam/sdk/io/hadoop/outputformat/HadoopOutputFormatIOTest.java
 ##########
 @@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information
regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version
2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain
a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under
the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express
+ * or implied. See the License for the specific language governing permissions and limitations
under
+ * the License.
+ */
+package org.apache.beam.sdk.io.hadoop.outputformat;
+
+import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.io.hadoop.SerializableConfiguration;
+import org.apache.beam.sdk.io.hadoop.inputformat.Employee;
+import org.apache.beam.sdk.io.hadoop.inputformat.TestEmployeeDataSet;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link HadoopOutputFormatIO}. */
+@RunWith(JUnit4.class)
+public class HadoopOutputFormatIOTest {
+  private static SerializableConfiguration serConf;
+
+  @Rule public final transient TestPipeline p = TestPipeline.create();
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @BeforeClass
+  public static void setUp() {
+    EmployeeOutputFormat.initWrittenOutput();
+    serConf = loadTestConfiguration(EmployeeOutputFormat.class, Text.class, Employee.class);
+  }
+
+  private static SerializableConfiguration loadTestConfiguration(
+      Class<?> outputFormatClassName, Class<?> keyClass, Class<?> valueClass)
{
+    Configuration conf = new Configuration();
+    conf.setClass("mapreduce.job.outputformat.class", outputFormatClassName, OutputFormat.class);
+    conf.setClass("mapreduce.job.outputformat.key.class", keyClass, Object.class);
+    conf.setClass("mapreduce.job.outputformat.value.class", valueClass, Object.class);
+    return new SerializableConfiguration(conf);
+  }
+
+  @Test
+  public void testWriteBuildsCorrectly() {
+    HadoopOutputFormatIO.Write<Text, Employee> write =
+        HadoopOutputFormatIO.<Text, Employee>write().withConfiguration(serConf.get());
+
+    assertEquals(serConf.get(), write.getConfiguration().get());
+    assertEquals(EmployeeOutputFormat.class, write.getOutputFormatClass().getRawType());
+    assertEquals(Text.class, write.getOutputFormatKeyClass().getRawType());
+    assertEquals(Employee.class, write.getOutputFormatValueClass().getRawType());
+  }
+
+  /**
+   * This test validates {@link HadoopOutputFormatIO.Write Write} transform object creation
fails
+   * with null configuration. {@link HadoopOutputFormatIO.Write#withConfiguration(Configuration)
+   * withConfiguration(Configuration)} method checks configuration is null and throws exception
if
+   * it is null.
+   */
+  @Test
+  public void testWriteObjectCreationFailsIfConfigurationIsNull() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Configuration can not be null");
+    HadoopOutputFormatIO.<Text, Employee>write().withConfiguration(null);
+  }
+
+  /**
+   * This test validates functionality of {@link
+   * HadoopOutputFormatIO.Write#withConfiguration(Configuration) withConfiguration(Configuration)}
+   * function when Hadoop OutputFormat class is not provided by the user in configuration.
+   */
+  @Test
+  public void testWriteValidationFailsMissingInputFormatInConf() {
+    Configuration configuration = new Configuration();
+    configuration.setClass(HadoopOutputFormatIO.OUTPUTFORMAT_KEY_CLASS, Text.class, Object.class);
+    configuration.setClass(
+        HadoopOutputFormatIO.OUTPUTFORMAT_VALUE_CLASS, Employee.class, Object.class);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Configuration must contain \"mapreduce.job.outputformat.class\"");
+    HadoopOutputFormatIO.<Text, Employee>write().withConfiguration(configuration);
+  }
+
+  /**
+   * This test validates functionality of {@link
+   * HadoopOutputFormatIO.Write#withConfiguration(Configuration) withConfiguration(Configuration)}
+   * function when key class is not provided by the user in configuration.
+   */
+  @Test
+  public void testWriteValidationFailsMissingKeyClassInConf() {
 
 Review comment:
   no objections of course, the more tests the better. It was just a interrogation

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 142074)
    Time Spent: 5h 10m  (was: 5h)

> Add batching support for HadoopOutputFormatIO
> ---------------------------------------------
>
>                 Key: BEAM-3912
>                 URL: https://issues.apache.org/jira/browse/BEAM-3912
>             Project: Beam
>          Issue Type: Sub-task
>          Components: io-java-hadoop
>            Reporter: Alexey Romanenko
>            Assignee: Alexey Romanenko
>            Priority: Minor
>          Time Spent: 5h 10m
>  Remaining Estimate: 0h
>




--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Mime
View raw message