drill-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [drill] arina-ielchiieva commented on a change in pull request #1962: DRILL-7554: Convert LTSV Format Plugin to EVF
Date Tue, 04 Feb 2020 09:57:05 GMT
arina-ielchiieva commented on a change in pull request #1962: DRILL-7554: Convert LTSV Format
Plugin to EVF
URL: https://github.com/apache/drill/pull/1962#discussion_r374571592
 
 

 ##########
 File path: contrib/format-ltsv/src/test/java/org/apache/drill/exec/store/ltsv/TestLTSVRecordReader.java
 ##########
 @@ -17,84 +17,190 @@
  */
 package org.apache.drill.exec.store.ltsv;
 
+import org.apache.drill.categories.RowSetTests;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.exec.ExecTest;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
 import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.store.dfs.FileSystemConfig;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.store.dfs.ZipCodec;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
+import org.apache.drill.test.QueryBuilder;
+import org.apache.drill.test.rowSet.RowSetComparison;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
-import java.util.HashMap;
-import java.util.Map;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Paths;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+@Category(RowSetTests.class)
 public class TestLTSVRecordReader extends ClusterTest {
 
   @BeforeClass
   public static void setup() throws Exception {
-    startCluster(ClusterFixture.builder(dirTestWatcher));
-
-    DrillbitContext context = cluster.drillbit().getContext();
-    FileSystemConfig original = (FileSystemConfig) context.getStorage().getPlugin("cp").getConfig();
-    Map<String, FormatPluginConfig> newFormats = new HashMap<>(original.getFormats());
-    newFormats.put("ltsv", new LTSVFormatPluginConfig());
-    FileSystemConfig pluginConfig = new FileSystemConfig(original.getConnection(), original.getConfig(),
original.getWorkspaces(), newFormats);
-    pluginConfig.setEnabled(true);
-    context.getStorage().createOrUpdate("cp", pluginConfig, true);
+    ClusterTest.startCluster(ClusterFixture.builder(dirTestWatcher));
+
+    LTSVFormatPluginConfig formatConfig = new LTSVFormatPluginConfig();
+    cluster.defineFormat("cp", "ltsv", formatConfig);
+
+    // Needed for compressed file unit test
+    dirTestWatcher.copyResourceToRoot(Paths.get("ltsv/"));
   }
 
   @Test
   public void testWildcard() throws Exception {
-    testBuilder()
-      .sqlQuery("SELECT * FROM cp.`simple.ltsv`")
-      .unOrdered()
-      .baselineColumns("host", "forwardedfor", "req", "status", "size", "referer", "ua",
"reqtime", "apptime", "vhost")
-      .baselineValues("xxx.xxx.xxx.xxx", "-", "GET /v1/xxx HTTP/1.1", "200", "4968", "-",
"Java/1.8.0_131", "2.532", "2.532", "api.example.com")
-      .baselineValues("xxx.xxx.xxx.xxx", "-", "GET /v1/yyy HTTP/1.1", "200", "412", "-",
"Java/1.8.0_201", "3.580", "3.580", "api.example.com")
-      .go();
+    String sql = "SELECT * FROM cp.`ltsv/simple.ltsv`";
+    QueryBuilder q = client.queryBuilder().sql(sql);
+    RowSet results = q.rowSet();
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+      .addNullable("host",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("forwardedfor",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("req",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("status",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("size",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("referer",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("ua",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("reqtime",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("apptime",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("vhost",  TypeProtos.MinorType.VARCHAR)
+      .buildSchema();
+
+    RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+      .addRow("xxx.xxx.xxx.xxx", "-", "GET /v1/xxx HTTP/1.1", "200", "4968", "-", "Java/1.8.0_131",
"2.532", "2.532", "api.example.com")
+      .addRow("xxx.xxx.xxx.xxx", "-", "GET /v1/yyy HTTP/1.1", "200", "412", "-", "Java/1.8.0_201",
"3.580", "3.580", "api.example.com")
+      .build();
+
+    new RowSetComparison(expected).verifyAndClearAll(results);
   }
 
   @Test
   public void testSelectColumns() throws Exception {
-    testBuilder()
-      .sqlQuery("SELECT ua, reqtime FROM cp.`simple.ltsv`")
-      .unOrdered()
-      .baselineColumns("ua", "reqtime")
-      .baselineValues("Java/1.8.0_131", "2.532")
-      .baselineValues("Java/1.8.0_201", "3.580")
-      .go();
+    String sql = "SELECT ua, reqtime FROM cp.`ltsv/simple.ltsv`";
+
+    QueryBuilder q = client.queryBuilder().sql(sql);
+    RowSet results = q.rowSet();
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+      .addNullable("ua",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("reqtime",  TypeProtos.MinorType.VARCHAR)
+      .buildSchema();
+
+    RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+      .addRow("Java/1.8.0_131", "2.532")
+      .addRow("Java/1.8.0_201", "3.580")
+      .build();
+
+    new RowSetComparison(expected).verifyAndClearAll(results);
   }
 
   @Test
   public void testQueryWithConditions() throws Exception {
-    testBuilder()
-      .sqlQuery("SELECT * FROM cp.`simple.ltsv` WHERE reqtime > 3.0")
-      .unOrdered()
-      .baselineColumns("host", "forwardedfor", "req", "status", "size", "referer", "ua",
"reqtime", "apptime", "vhost")
-      .baselineValues("xxx.xxx.xxx.xxx", "-", "GET /v1/yyy HTTP/1.1", "200", "412", "-",
"Java/1.8.0_201", "3.580", "3.580", "api.example.com")
-      .go();
+    String sql = "SELECT * FROM cp.`ltsv/simple.ltsv` WHERE reqtime > 3.0";
+
+    QueryBuilder q = client.queryBuilder().sql(sql);
+    RowSet results = q.rowSet();
+    TupleMetadata expectedSchema = new SchemaBuilder()
+      .addNullable("host",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("forwardedfor",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("req",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("status",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("size",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("referer",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("ua",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("reqtime",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("apptime",  TypeProtos.MinorType.VARCHAR)
+      .addNullable("vhost",  TypeProtos.MinorType.VARCHAR)
+      .buildSchema();
+
+    RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+      .addRow("xxx.xxx.xxx.xxx", "-", "GET /v1/yyy HTTP/1.1", "200", "412", "-", "Java/1.8.0_201",
"3.580", "3.580", "api.example.com")
+      .build();
+
+    new RowSetComparison(expected).verifyAndClearAll(results);
   }
 
   @Test
   public void testSkipEmptyLines() throws Exception {
-    assertEquals(2, queryBuilder().sql("SELECT * FROM cp.`emptylines.ltsv`").run().recordCount());
+    assertEquals(2, queryBuilder().sql("SELECT * FROM cp.`ltsv/emptylines.ltsv`").run().recordCount());
   }
 
   @Test
   public void testReadException() throws Exception {
     try {
-      run("SELECT * FROM cp.`invalid.ltsv`");
+      run("SELECT * FROM cp.`ltsv/invalid.ltsv`");
       fail();
     } catch (UserException e) {
       assertEquals(UserBitShared.DrillPBError.ErrorType.DATA_READ, e.getErrorType());
-      assertTrue(e.getMessage().contains("Failure while reading messages from /invalid.ltsv.
Record reader was at record: 1"));
+      assertTrue(e.getMessage().contains("Invalid LTSV format at line 1: time:30/Nov/2016:00:55:08
+0900"));
     }
   }
 
+  @Test
+  public void testSerDe() throws Exception {
+    String sql = "SELECT COUNT(*) as cnt FROM cp.`ltsv/simple.ltsv`";
+    String plan = queryBuilder().sql(sql).explainJson();
+    long cnt = queryBuilder().physical(plan).singletonLong();
+    assertEquals("Counts should match",2L, cnt);
+  }
+
+  @Test
 
 Review comment:
   Also please add test with provided schema.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message