avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From th...@apache.org
Subject [20/36] avro git commit: [merge] marge with latest changes
Date Fri, 19 May 2017 17:12:34 GMT
[merge] marge with latest changes

Project: http://git-wip-us.apache.org/repos/asf/avro/repo
Commit: http://git-wip-us.apache.org/repos/asf/avro/commit/884fbabd
Tree: http://git-wip-us.apache.org/repos/asf/avro/tree/884fbabd
Diff: http://git-wip-us.apache.org/repos/asf/avro/diff/884fbabd

Branch: refs/heads/master
Commit: 884fbabd3a07e6442dac9a43f34eb9ca0e5a0cbc
Parents: c3384e4
Author: Zoltan Farkas <zolyfarkas@yahoo.com>
Authored: Sun Apr 2 11:28:00 2017 -0400
Committer: Zoltan Farkas <zolyfarkas@yahoo.com>
Committed: Sun Apr 2 11:28:00 2017 -0400

----------------------------------------------------------------------
 CHANGES.txt                                     |  30 ++
 doc/src/content/xdocs/gettingstartedjava.xml    |   2 +-
 lang/c++/CMakeLists.txt                         |  27 +-
 lang/c++/FindSnappy.cmake                       |  54 +++
 lang/c++/README                                 |  32 +-
 lang/c++/api/DataFile.hh                        |   9 +-
 lang/c++/api/GenericDatum.hh                    |  32 +-
 lang/c++/api/Specific.hh                        |  24 ++
 lang/c++/impl/Compiler.cc                       |  37 +-
 lang/c++/impl/DataFile.cc                       | 116 +++++-
 lang/c++/impl/Generic.cc                        |   4 -
 lang/c++/impl/avrogencpp.cc                     |  42 +-
 lang/c++/impl/json/JsonIO.cc                    |  12 +-
 lang/c++/impl/json/JsonIO.hh                    |   8 +-
 lang/c++/impl/parsing/Symbol.hh                 |   3 +
 lang/c++/jsonschemas/bigrecord                  |   4 +
 lang/c++/jsonschemas/bigrecord_r                |   6 +-
 lang/c++/jsonschemas/crossref                   |  28 ++
 lang/c++/jsonschemas/padded_record              |  14 +
 lang/c++/jsonschemas/primitivetypes             |  15 +
 lang/c++/jsonschemas/tree1                      |  25 ++
 lang/c++/jsonschemas/tree2                      |  18 +
 lang/c++/test/AvrogencppTests.cc                |   4 +
 lang/c++/test/CodecTests.cc                     |  74 +++-
 lang/c++/test/DataFileTests.cc                  |  40 +-
 lang/c++/test/JsonTests.cc                      |   3 +
 lang/c/src/codec.c                              |   3 +
 .../src/main/java/org/apache/avro/Schema.java   |   3 +
 .../java/org/apache/avro/SchemaBuilder.java     |   4 +
 .../java/org/apache/avro/file/BZip2Codec.java   |   2 +-
 .../java/org/apache/avro/file/DeflateCodec.java |   2 +-
 .../java/org/apache/avro/file/NullCodec.java    |   2 +-
 .../java/org/apache/avro/file/SnappyCodec.java  |   2 +-
 .../main/java/org/apache/avro/file/XZCodec.java |   2 +-
 .../avro/specific/SpecificRecordBase.java       |   2 +-
 .../apache/avro/util/WeakIdentityHashMap.java   |   6 +
 .../apache/avro/util/internal/JacksonUtils.java |   3 +-
 .../test/java/org/apache/avro/TestFixed.java    |  21 +-
 .../java/compiler/src/test/idl/input/cycle.avdl |   2 +-
 .../compiler/src/test/idl/output/cycle.avpr     |   3 +-
 .../avro/ipc/TestNettyServerWithCallbacks.java  |  35 +-
 lang/java/pom.xml                               |  60 ++-
 lang/java/tools/pom.xml                         |   5 -
 .../java/org/apache/avro/tool/ConcatTool.java   |  28 +-
 .../main/java/org/apache/avro/tool/Util.java    |  39 +-
 .../tools/src/main/resources/log4j.properties   |  22 +
 .../java/org/apache/avro/tool/TestCatTool.java  |  14 +
 .../org/apache/avro/tool/TestConcatTool.java    |  79 ++++
 lang/ruby/Manifest                              |   2 +
 lang/ruby/lib/avro.rb                           |   1 +
 lang/ruby/lib/avro/schema.rb                    |  39 +-
 lang/ruby/lib/avro/schema_validator.rb          | 196 +++++++++
 lang/ruby/test/test_schema.rb                   |  56 +--
 lang/ruby/test/test_schema_validator.rb         | 402 +++++++++++++++++++
 54 files changed, 1439 insertions(+), 259 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index fbb1c2f..bd03204 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -40,6 +40,14 @@ Trunk (not yet released)
     AVRO-1897: Fix build issues due to VERSION.txt newline, avro-tools.
     (Suraj Acharya via blue)
 
+    AVRO-1993: C++ Byte ordering macro does not work on FreeBSD (thiru)
+
+    AVRO-1975: Upgrade java dependencies (gabor)
+
+    AVRO-1960: Add log4j properties for avro-tools
+
+    AVRO-1748. Add Snappy Compression to C++ DataFile (J. Langley via thiru)
+
   BUG FIXES
 
     AVRO-1741: Python3: Fix error when codec is not in the header.
@@ -117,6 +125,28 @@ Trunk (not yet released)
     AVRO-1954: Java: Schema.Field.defaultVal() generates: Unknown datum type
     (Nandor Kollar via tomwhite)
 
+    AVRO-1930: JsonParser doesn't handle integer scientific notation (Pietro Cerutti via thiru)
+
+    AVRO-1912: C++ Resolving Decoding doesn't work if element removed from record in array. (via thiru)
+
+    AVRO-1866. JsonNullFormatter fwd-declared as class, defined as struct ( Pietro Cerutti via thiru)
+
+    AVRO-1750. GenericDatum API behavior breaking change (thiru)
+
+    AVRO-1995: JSON Parser does not properly check current state (Victor Mota via thiru)
+
+    AVRO-1216. Setting precision for the output stream (John McClean via thiru)
+
+    AVRO-1813: Incorrect link to build instructions in Java Getting Started (Pietro Menna via gabor)
+
+    AVRO-1937: C++ generator for recursive structure crashes (thiru)
+
+    AVRO-1892. C++ library cannot parse unions with default values (Hua Zhang via thiru)
+
+    AVRO-1994. C++ Code Generator Generates Invalid Code if Field is of type Null (Darryl Green via thiru)
+
+    AVRO-1997. Avro Field.defaultVal broken for Fixed fields. (Zoltan Farkasi via thiru)
+
 Avro 1.8.1 (14 May 2016)
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/doc/src/content/xdocs/gettingstartedjava.xml
----------------------------------------------------------------------
diff --git a/doc/src/content/xdocs/gettingstartedjava.xml b/doc/src/content/xdocs/gettingstartedjava.xml
index ea760b3..6474ebc 100644
--- a/doc/src/content/xdocs/gettingstartedjava.xml
+++ b/doc/src/content/xdocs/gettingstartedjava.xml
@@ -93,7 +93,7 @@
       <p>
         You may also build the required Avro jars from source.  Building Avro is
         beyond the scope of this guide; see the <a
-        href="https://cwiki.apache.org/AVRO/build-documentation.html">Build
+        href="https://cwiki.apache.org/AVRO/Build+Documentation">Build
         Documentation</a> page in the wiki for more information.
       </p>
     </section>

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/CMakeLists.txt
----------------------------------------------------------------------
diff --git a/lang/c++/CMakeLists.txt b/lang/c++/CMakeLists.txt
index e8efe86..be39215 100644
--- a/lang/c++/CMakeLists.txt
+++ b/lang/c++/CMakeLists.txt
@@ -36,6 +36,7 @@ set (AVRO_VERSION_MAJOR ${AVRO_VERSION})
 set (AVRO_VERSION_MINOR "0")
 
 project (Avro-cpp)
+set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR})
 
 if (WIN32 AND NOT CYGWIN AND NOT MSYS)
 add_definitions (/EHa)
@@ -61,6 +62,17 @@ endif ()
 find_package (Boost 1.38 REQUIRED
     COMPONENTS filesystem system program_options iostreams)
 
+find_package(Snappy)
+if (SNAPPY_FOUND)
+    set(SNAPPY_PKG libsnappy)
+    add_definitions(-DSNAPPY_CODEC_AVAILABLE)
+    message("Enabled snappy codec")
+else (SNAPPY_FOUND)
+    set(SNAPPY_PKG "")
+    set(SNAPPY_LIBRARIES "")
+    message("Disabled snappy codec. libsnappy not found.")
+endif (SNAPPY_FOUND)
+
 add_definitions (${Boost_LIB_DIAGNOSTIC_DEFINITIONS})
 
 include_directories (api ${CMAKE_CURRENT_BINARY_DIR} ${Boost_INCLUDE_DIRS})
@@ -98,11 +110,11 @@ set_target_properties (avrocpp PROPERTIES
 set_target_properties (avrocpp_s PROPERTIES
     VERSION ${AVRO_VERSION_MAJOR}.${AVRO_VERSION_MINOR})
 
-target_link_libraries (avrocpp ${Boost_LIBRARIES})
+target_link_libraries (avrocpp ${Boost_LIBRARIES} ${SNAPPY_LIBRARIES})
 
 add_executable (precompile test/precompile.cc)
 
-target_link_libraries (precompile avrocpp_s ${Boost_LIBRARIES})
+target_link_libraries (precompile avrocpp_s ${Boost_LIBRARIES} ${SNAPPY_LIBRARIES})
 
 macro (gen file ns)
     add_custom_command (OUTPUT ${file}.hh
@@ -125,15 +137,19 @@ gen (union_conflict uc)
 gen (recursive rec)
 gen (reuse ru)
 gen (circulardep cd)
+gen (tree1 tr1)
+gen (tree2 tr2)
+gen (crossref cr)
+gen (primitivetypes pt)
 
 add_executable (avrogencpp impl/avrogencpp.cc)
-target_link_libraries (avrogencpp avrocpp_s ${Boost_LIBRARIES})
+target_link_libraries (avrogencpp avrocpp_s ${Boost_LIBRARIES} ${SNAPPY_LIBRARIES})
 
 enable_testing()
 
 macro (unittest name)
     add_executable (${name} test/${name}.cc)
-    target_link_libraries (${name} avrocpp ${Boost_LIBRARIES})
+    target_link_libraries (${name} avrocpp ${Boost_LIBRARIES} ${SNAPPY_LIBRARIES})
     add_test (NAME ${name} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
         COMMAND ${CMAKE_CURRENT_BINARY_DIR}/${name})
 endmacro (unittest)
@@ -153,7 +169,8 @@ unittest (CompilerTests)
 add_dependencies (AvrogencppTests bigrecord_hh bigrecord_r_hh bigrecord2_hh
     tweet_hh
     union_array_union_hh union_map_union_hh union_conflict_hh
-    recursive_hh reuse_hh circulardep_hh empty_record_hh)
+    recursive_hh reuse_hh circulardep_hh tree1_hh tree2_hh crossref_hh
+    primitivetypes_hh empty_record_hh)
 
 include (InstallRequiredSystemLibraries)
 

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/FindSnappy.cmake
----------------------------------------------------------------------
diff --git a/lang/c++/FindSnappy.cmake b/lang/c++/FindSnappy.cmake
new file mode 100644
index 0000000..e9053af
--- /dev/null
+++ b/lang/c++/FindSnappy.cmake
@@ -0,0 +1,54 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# Tries to find Snappy headers and libraries.
+#
+# Usage of this module as follows:
+#
+#  find_package(Snappy)
+#
+# Variables used by this module, they can change the default behaviour and need
+# to be set before calling find_package:
+#
+#  SNAPPY_ROOT_DIR  Set this variable to the root installation of
+#                    Snappy if the module has problems finding
+#                    the proper installation path.
+#
+# Variables defined by this module:
+#
+#  SNAPPY_FOUND              System has Snappy libs/headers
+#  SNAPPY_LIBRARIES          The Snappy libraries
+#  SNAPPY_INCLUDE_DIR        The location of Snappy headers
+
+find_path(SNAPPY_INCLUDE_DIR
+    NAMES snappy.h
+    HINTS ${SNAPPY_ROOT_DIR}/include)
+
+find_library(SNAPPY_LIBRARIES
+    NAMES snappy
+    HINTS ${SNAPPY_ROOT_DIR}/lib)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Snappy DEFAULT_MSG
+    SNAPPY_LIBRARIES
+    SNAPPY_INCLUDE_DIR)
+
+mark_as_advanced(
+    SNAPPY_ROOT_DIR
+    SNAPPY_LIBRARIES
+    SNAPPY_INCLUDE_DIR)

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/README
----------------------------------------------------------------------
diff --git a/lang/c++/README b/lang/c++/README
index 7a79971..f1e9b66 100644
--- a/lang/c++/README
+++ b/lang/c++/README
@@ -23,18 +23,21 @@ The C++ port is thus far incomplete.  Currently, it contains:
    objects of the same data types, and the code to serialize and parse
    it.
 
-What's missing: Defaults are not yet supported.  And the file and rpc
-containers are not yet implemented. Documentation, sparse.
+What's missing: Rpc containers are not yet implemented. Documentation is sparse.
 
 INSTRUCTIONS
 
-To compile requires boost headers, and the boost regex library.
-Additionally, to generate the avro spec compiler requires flex and bison.
+Pre-requisites:
+
+To compile requires boost headers, and the boost regex library. Optionally, it requires Snappy compression library. If Snappy is available, it builds support for Snappy compression and skips it otherwise. (Please see your OS-specific instructions on how to install Boost and Snappy for your OS).
+
 To build one requires cmake 2.6 or later.
 
-To generate a Makefile under Unix or Cygwin use:
+To generate a Makefile under Unix, MacOS (using GNU) or Cygwin use:
 
-cmake -G "Unix Makefiles"
+mkdir build
+cd build
+cmake -G "Unix Makefiles" ..
 
 If it doesn't work, either you are missing some packages (boost, flex or bison),
 or you need to help configure locate them.
@@ -42,11 +45,7 @@ or you need to help configure locate them.
 If the Makefile is configured correctly, then you can make and run tests:
 
     make
-    ./build/unittest
-    ./build/buffertest
-    ./build/testgentest
-    ./build/CodecTests
-    ./build/StreamTests
+    ctest
 
 To install
 
@@ -54,6 +53,17 @@ To install
 
 and then untar the generated .tar.gz file.
 
+To build and test on MacOS (using Xcode)
+
+mkdir build.mac
+cd build.mac
+cmake -G Xcode
+
+xcodebuild -configuration Release
+ctest -C Release
+
+If debug version is required, replace 'Release' above with 'Debug'.
+
 Note: The LICENSE and NOTICE files in the lang/c++ source directory are used to
 build the binary distribution. The LICENSE and NOTICE information for the Avro
 C++ source distribution is in the root directory.

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/api/DataFile.hh
----------------------------------------------------------------------
diff --git a/lang/c++/api/DataFile.hh b/lang/c++/api/DataFile.hh
index 98779b6..bff3097 100644
--- a/lang/c++/api/DataFile.hh
+++ b/lang/c++/api/DataFile.hh
@@ -40,7 +40,12 @@ namespace avro {
 /** Specify type of compression to use when writing data files. */
 enum Codec {
   NULL_CODEC,
-  DEFLATE_CODEC
+  DEFLATE_CODEC,
+
+#ifdef SNAPPY_CODEC_AVAILABLE
+  SNAPPY_CODEC
+#endif
+
 };
 
 /**
@@ -185,7 +190,7 @@ class AVRO_DECL DataFileReaderBase : boost::noncopyable {
     // for compressed buffer
     boost::scoped_ptr<boost::iostreams::filtering_istream> os_;
     std::vector<char> compressed_;
-
+    std::string uncompressed;
     void readHeader();
 
     bool readDataBlock();

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/api/GenericDatum.hh
----------------------------------------------------------------------
diff --git a/lang/c++/api/GenericDatum.hh b/lang/c++/api/GenericDatum.hh
index 5efcb7f..2b1b3a4 100644
--- a/lang/c++/api/GenericDatum.hh
+++ b/lang/c++/api/GenericDatum.hh
@@ -66,18 +66,14 @@ public:
     /**
      * The avro data type this datum holds.
      */
-    Type type() const {
-        return type_;
-    }
+    Type type() const;
 
     /**
      * Returns the value held by this datum.
      * T The type for the value. This must correspond to the
      * avro type returned by type().
      */
-    template<typename T> const T& value() const {
-        return *boost::any_cast<T>(&value_);
-    }
+    template<typename T> const T& value() const;
 
     /**
      * Returns the reference to the value held by this datum, which
@@ -88,9 +84,7 @@ public:
      * T The type for the value. This must correspond to the
      * avro type returned by type().
      */
-    template<typename T> T& value() {
-        return *boost::any_cast<T>(&value_);
-    }
+    template<typename T> T& value();
 
     /**
      * Returns true if and only if this datum is a union.
@@ -153,7 +147,7 @@ public:
     GenericDatum(const NodePtr& schema, const T& v) :
         type_(schema->type()) {
         init(schema);
-        value<T>() = v;
+        *boost::any_cast<T>(&value_) = v;
     }
 
     /**
@@ -493,6 +487,24 @@ public:
     }
 };
 
+inline Type GenericDatum::type() const {
+    return (type_ == AVRO_UNION) ?
+        boost::any_cast<GenericUnion>(&value_)->datum().type() :
+        type_;
+}
+
+template<typename T> T& GenericDatum::value() {
+    return (type_ == AVRO_UNION) ?
+        boost::any_cast<GenericUnion>(&value_)->datum().value<T>() :
+        *boost::any_cast<T>(&value_);
+}
+
+template<typename T> const T& GenericDatum::value() const {
+    return (type_ == AVRO_UNION) ?
+        boost::any_cast<GenericUnion>(&value_)->datum().value<T>() :
+        *boost::any_cast<T>(&value_);
+}
+
 inline size_t GenericDatum::unionBranch() const {
     return boost::any_cast<GenericUnion>(&value_)->currentBranch();
 }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/api/Specific.hh
----------------------------------------------------------------------
diff --git a/lang/c++/api/Specific.hh b/lang/c++/api/Specific.hh
index ef50318..0a00fb3 100644
--- a/lang/c++/api/Specific.hh
+++ b/lang/c++/api/Specific.hh
@@ -25,6 +25,7 @@
 #include <algorithm>
 
 #include "boost/array.hpp"
+#include "boost/blank.hpp"
 
 #include "Config.hh"
 #include "Encoder.hh"
@@ -46,6 +47,8 @@
  */
 namespace avro {
 
+typedef boost::blank null;
+
 template <typename T> void encode(Encoder& e, const T& t);
 template <typename T> void decode(Decoder& d, T& t);
 
@@ -290,6 +293,27 @@ template <typename T> struct codec_traits<std::map<std::string, T> > {
 };
 
 /**
+* codec_traits for Avro null.
+*/
+template <> struct codec_traits<avro::null> {
+	/**
+	* Encodes a given value.
+	*/
+	static void encode(Encoder& e, const avro::null&) {
+		e.encodeNull();
+	}
+
+	/**
+	* Decodes into a given value.
+	*/
+	static void decode(Decoder& d, avro::null&) {
+		d.decodeNull();
+	}
+};
+
+
+
+/**
  * Generic encoder function that makes use of the codec_traits.
  */
 template <typename T>

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/Compiler.cc
----------------------------------------------------------------------
diff --git a/lang/c++/impl/Compiler.cc b/lang/c++/impl/Compiler.cc
index 9670844..be5fe3f 100644
--- a/lang/c++/impl/Compiler.cc
+++ b/lang/c++/impl/Compiler.cc
@@ -79,7 +79,7 @@ static bool isFullName(const string& s)
 {
     return s.find('.') != string::npos;
 }
-    
+
 static Name getName(const string& name, const string& ns)
 {
     return (isFullName(name)) ? Name(name) : Name(name, ns);
@@ -143,7 +143,7 @@ const int64_t getLongField(const Entity& e, const Object& m,
     ensureType<int64_t>(it->second, fieldName);
     return it->second.longValue();
 }
-    
+
 struct Field {
     const string& name;
     const NodePtr schema;
@@ -283,32 +283,9 @@ static GenericDatum makeGenericDatum(NodePtr n, const Entity& e,
     case AVRO_UNION:
     {
         GenericUnion result(n);
-        string name;
-        Entity e2;
-        if (e.type() == json::etNull) {
-            name = "null";
-            e2 = e;
-        } else {
-            assertType(e, json::etObject);
-            const map<string, Entity>& v = e.objectValue();
-            if (v.size() != 1) {
-                throw Exception(boost::format("Default value for "
-                    "union has more than one field: %1%") % e.toString());
-            }
-            map<string, Entity>::const_iterator it = v.begin();
-            name = it->first;
-            e2 = it->second;
-        }
-        for (size_t i = 0; i < n->leaves(); ++i) {
-            const NodePtr& b = n->leafAt(i);
-            if (nameof(b) == name) {
-                result.selectBranch(i);
-                result.datum() = makeGenericDatum(b, e2, st);
-                return GenericDatum(n, result);
-            }
-        }
-        throw Exception(boost::format("Invalid default value %1%") %
-            e.toString());
+        result.selectBranch(0);
+        result.datum() = makeGenericDatum(n->leafAt(0), e, st);
+        return GenericDatum(n, result);
     }
     case AVRO_FIXED:
         assertType(e, json::etString);
@@ -334,12 +311,12 @@ static Field makeField(const Entity& e, SymbolTable& st, const string& ns)
 
 static NodePtr makeRecordNode(const Entity& e,
     const Name& name, const Object& m, SymbolTable& st, const string& ns)
-{        
+{
     const Array& v = getArrayField(e, m, "fields");
     concepts::MultiAttribute<string> fieldNames;
     concepts::MultiAttribute<NodePtr> fieldValues;
     vector<GenericDatum> defaultValues;
-    
+
     for (Array::const_iterator it = v.begin(); it != v.end(); ++it) {
         Field f = makeField(*it, st, ns);
         fieldNames.add(f.name);

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/DataFile.cc
----------------------------------------------------------------------
diff --git a/lang/c++/impl/DataFile.cc b/lang/c++/impl/DataFile.cc
index 035dd27..ee8f62c 100644
--- a/lang/c++/impl/DataFile.cc
+++ b/lang/c++/impl/DataFile.cc
@@ -26,6 +26,11 @@
 #include <boost/iostreams/device/file.hpp>
 #include <boost/iostreams/filter/gzip.hpp>
 #include <boost/iostreams/filter/zlib.hpp>
+#include <boost/crc.hpp>  // for boost::crc_32_type
+
+#ifdef SNAPPY_CODEC_AVAILABLE
+#include <snappy.h>
+#endif
 
 namespace avro {
 using std::auto_ptr;
@@ -43,6 +48,10 @@ const string AVRO_CODEC_KEY("avro.codec");
 const string AVRO_NULL_CODEC("null");
 const string AVRO_DEFLATE_CODEC("deflate");
 
+#ifdef SNAPPY_CODEC_AVAILABLE
+const string AVRO_SNAPPY_CODEC = "snappy";
+#endif
+
 const size_t minSyncInterval = 32;
 const size_t maxSyncInterval = 1u << 30;
 const size_t defaultSyncInterval = 64 * 1024;
@@ -83,8 +92,12 @@ DataFileWriterBase::DataFileWriterBase(const char* filename,
       setMetadata(AVRO_CODEC_KEY, AVRO_NULL_CODEC);
     } else if (codec_ == DEFLATE_CODEC) {
       setMetadata(AVRO_CODEC_KEY, AVRO_DEFLATE_CODEC);
+#ifdef SNAPPY_CODEC_AVAILABLE
+    } else if (codec_ == SNAPPY_CODEC) {
+      setMetadata(AVRO_CODEC_KEY, AVRO_SNAPPY_CODEC);
+#endif
     } else {
-      throw Exception("Unknown codec codec");
+      throw Exception(boost::format("Unknown codec: %1%") % codec);
     }
     setMetadata(AVRO_SCHEMA_KEY, toString(schema));
 
@@ -117,13 +130,11 @@ void DataFileWriterBase::sync()
         encoderPtr_->flush();
         std::auto_ptr<InputStream> in = memoryInputStream(*buffer_);
         copy(*in, *stream_);
-    } else {
+    } else if (codec_ == DEFLATE_CODEC) {
         std::vector<char> buf;
         {
             boost::iostreams::filtering_ostream os;
-            if (codec_ == DEFLATE_CODEC) {
-                os.push(boost::iostreams::zlib_compressor(get_zlib_params()));
-            }
+            os.push(boost::iostreams::zlib_compressor(get_zlib_params()));
             os.push(boost::iostreams::back_inserter(buf));
             const uint8_t* data;
             size_t len;
@@ -139,6 +150,49 @@ void DataFileWriterBase::sync()
         avro::encode(*encoderPtr_, byteCount);
         encoderPtr_->flush();
         copy(*in, *stream_);
+#ifdef SNAPPY_CODEC_AVAILABLE
+    } else if (codec_ == SNAPPY_CODEC) {
+        std::vector<char> temp;
+        std::string compressed;
+        boost::crc_32_type crc;
+        {
+            boost::iostreams::filtering_ostream os;
+            os.push(boost::iostreams::back_inserter(temp));
+            const uint8_t* data;
+            size_t len;
+
+            std::auto_ptr<InputStream> input = memoryInputStream(*buffer_);
+            while (input->next(&data, &len)) {
+                boost::iostreams::write(os, reinterpret_cast<const char*>(data),
+                        len);
+            }
+        } // make sure all is flushed
+
+        crc.process_bytes(reinterpret_cast<const char*>(&temp[0]), temp.size());
+        // For Snappy, add the CRC32 checksum
+        int32_t checksum = crc();
+
+        // Now compress
+        size_t compressed_size = snappy::Compress(
+                reinterpret_cast<const char*>(&temp[0]), temp.size(),
+                &compressed);
+        temp.clear();
+        {
+            boost::iostreams::filtering_ostream os;
+            os.push(boost::iostreams::back_inserter(temp));
+            boost::iostreams::write(os, compressed.c_str(), compressed_size);
+        }
+        temp.push_back((checksum >> 24) & 0xFF);
+        temp.push_back((checksum >> 16) & 0xFF);
+        temp.push_back((checksum >> 8) & 0xFF);
+        temp.push_back(checksum & 0xFF);
+        std::auto_ptr<InputStream> in = memoryInputStream(
+                reinterpret_cast<const uint8_t*>(&temp[0]), temp.size());
+        int64_t byteCount = temp.size();
+        avro::encode(*encoderPtr_, byteCount);
+        encoderPtr_->flush();
+        copy(*in, *stream_);
+#endif
     }
 
     encoderPtr_->init(*stream_);
@@ -320,7 +374,7 @@ bool DataFileReaderBase::readDataBlock()
     if (codec_ == NULL_CODEC) {
         dataDecoder_->init(*st);
         dataStream_ = st;
-    } else {
+    } else if (codec_ == DEFLATE_CODEC) {
         compressed_.clear();
         const uint8_t* data;
         size_t len;
@@ -329,17 +383,52 @@ bool DataFileReaderBase::readDataBlock()
         }
         // boost::iostreams::write(os, reinterpret_cast<const char*>(data), len);
         os_.reset(new boost::iostreams::filtering_istream());
-        if (codec_ == DEFLATE_CODEC) {
-            os_->push(boost::iostreams::zlib_decompressor(get_zlib_params()));
-        } else {
-            throw Exception("Bad codec");
-        }
+        os_->push(boost::iostreams::zlib_decompressor(get_zlib_params()));
         os_->push(boost::iostreams::basic_array_source<char>(
             &compressed_[0], compressed_.size()));
 
         std::auto_ptr<InputStream> in = istreamInputStream(*os_);
         dataDecoder_->init(*in);
         dataStream_ = in;
+#ifdef SNAPPY_CODEC_AVAILABLE
+    } else if (codec_ == SNAPPY_CODEC) {
+        boost::crc_32_type crc;
+        uint32_t checksum = 0;
+        compressed_.clear();
+        uncompressed.clear();
+        const uint8_t* data;
+        size_t len;
+        while (st->next(&data, &len)) {
+            compressed_.insert(compressed_.end(), data, data + len);
+        }
+        len = compressed_.size();
+        int b1 = compressed_[len - 4] & 0xFF;
+        int b2 = compressed_[len - 3] & 0xFF;
+        int b3 = compressed_[len - 2] & 0xFF;
+        int b4 = compressed_[len - 1] & 0xFF;
+
+        checksum = (b1 << 24) + (b2 << 16) + (b3 << 8) + (b4);
+        if (!snappy::Uncompress(reinterpret_cast<const char*>(&compressed_[0]),
+                len - 4, &uncompressed)) {
+            throw Exception(
+                    "Snappy Compression reported an error when decompressing");
+        }
+        crc.process_bytes(uncompressed.c_str(), uncompressed.size());
+        uint32_t c = crc();
+        if (checksum != c) {
+            throw Exception(boost::format("Checksum did not match for Snappy compression: Expected: %1%, computed: %2%") % checksum % c);
+        }
+        os_.reset(new boost::iostreams::filtering_istream());
+        os_->push(
+                boost::iostreams::basic_array_source<char>(uncompressed.c_str(),
+                        uncompressed.size()));
+        std::auto_ptr<InputStream> in = istreamInputStream(*os_);
+
+        dataDecoder_->init(*in);
+        dataStream_ = in;
+#endif
+    } else {
+        throw Exception("Bad codec");
     }
     return true;
 }
@@ -387,6 +476,11 @@ void DataFileReaderBase::readHeader()
     it = metadata_.find(AVRO_CODEC_KEY);
     if (it != metadata_.end() && toString(it->second) == AVRO_DEFLATE_CODEC) {
         codec_ = DEFLATE_CODEC;
+#ifdef SNAPPY_CODEC_AVAILABLE
+    } else if (it != metadata_.end()
+            && toString(it->second) == AVRO_SNAPPY_CODEC) {
+        codec_ = SNAPPY_CODEC;
+#endif
     } else {
         codec_ = NULL_CODEC;
         if (it != metadata_.end() && toString(it->second) != AVRO_NULL_CODEC) {

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/Generic.cc
----------------------------------------------------------------------
diff --git a/lang/c++/impl/Generic.cc b/lang/c++/impl/Generic.cc
index 884fadb..9eaa56f 100644
--- a/lang/c++/impl/Generic.cc
+++ b/lang/c++/impl/Generic.cc
@@ -58,8 +58,6 @@ void GenericReader::read(GenericDatum& datum, Decoder& d, bool isResolving)
 {
     if (datum.isUnion()) {
         datum.selectBranch(d.decodeUnionIndex());
-        read(datum.value<GenericUnion>().datum(), d, isResolving);
-        return;
     }
     switch (datum.type()) {
     case AVRO_NULL:
@@ -176,8 +174,6 @@ void GenericWriter::write(const GenericDatum& datum, Encoder& e)
 {
     if (datum.isUnion()) {
         e.encodeUnionIndex(datum.unionBranch());
-        write(datum.value<GenericUnion>().datum(), e);
-        return;
     }
     switch (datum.type()) {
     case AVRO_NULL:

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/avrogencpp.cc
----------------------------------------------------------------------
diff --git a/lang/c++/impl/avrogencpp.cc b/lang/c++/impl/avrogencpp.cc
index a6a858d..a44fe7d 100644
--- a/lang/c++/impl/avrogencpp.cc
+++ b/lang/c++/impl/avrogencpp.cc
@@ -173,6 +173,8 @@ string CodeGen::cppTypeOf(const NodePtr& n)
         return cppTypeOf(resolveSymbol(n));
     case avro::AVRO_UNION:
         return fullname(done[n]);
+    case avro::AVRO_NULL:
+        return "avro::null";
     default:
         return "$Undefined$";
     }
@@ -215,6 +217,7 @@ static string cppNameOf(const NodePtr& n)
 string CodeGen::generateRecordType(const NodePtr& n)
 {
     size_t c = n->leaves();
+    string decoratedName = decorate(n->name());
     vector<string> types;
     for (size_t i = 0; i < c; ++i) {
         types.push_back(generateType(n->leafAt(i)));
@@ -225,7 +228,6 @@ string CodeGen::generateRecordType(const NodePtr& n)
         return it->second;
     }
 
-    string decoratedName = decorate(n->name());
     os_ << "struct " << decoratedName << " {\n";
     if (! noUnion_) {
         for (size_t i = 0; i < c; ++i) {
@@ -264,7 +266,7 @@ string CodeGen::generateRecordType(const NodePtr& n)
     }
     os_ << "        { }\n";
     os_ << "};\n\n";
-    return decorate(n->name());
+    return decoratedName;
 }
 
 void makeCanonical(string& s, bool foldCase)
@@ -391,7 +393,7 @@ string CodeGen::generateUnionType(const NodePtr& n)
     pendingConstructors.push_back(PendingConstructor(result, types[0],
         n->leafAt(0)->type() != avro::AVRO_NULL));
     os_ << "};\n\n";
-    
+
     return result;
 }
 
@@ -425,9 +427,31 @@ string CodeGen::doGenerateType(const NodePtr& n)
     case avro::AVRO_FIXED:
         return cppTypeOf(n);
     case avro::AVRO_ARRAY:
-        return "std::vector<" + generateType(n->leafAt(0)) + " >";
+    {
+        const NodePtr& ln = n->leafAt(0);
+        string dn;
+        if (doing.find(n) == doing.end()) {
+            doing.insert(n);
+            dn = generateType(ln);
+            doing.erase(n);
+        } else {
+            dn = generateDeclaration(ln);
+        }
+        return "std::vector<" + dn + " >";
+    }
     case avro::AVRO_MAP:
-        return "std::map<std::string, " + generateType(n->leafAt(1)) + " >";
+    {
+        const NodePtr& ln = n->leafAt(1);
+        string dn;
+        if (doing.find(n) == doing.end()) {
+            doing.insert(n);
+            dn = generateType(ln);
+            doing.erase(n);
+        } else {
+            dn = generateDeclaration(ln);
+        }
+        return "std::map<std::string, " + dn + " >";
+    }
     case avro::AVRO_RECORD:
         return generateRecordType(n);
     case avro::AVRO_ENUM:
@@ -478,7 +502,7 @@ void CodeGen::generateEnumTraits(const NodePtr& n)
 	string dname = decorate(n->name());
 	string fn = fullname(dname);
 	size_t c = n->names();
-	string first; 
+	string first;
 	string last;
 	if (!ns_.empty())
 	{
@@ -495,7 +519,7 @@ void CodeGen::generateEnumTraits(const NodePtr& n)
 	}
 	os_ << "template<> struct codec_traits<" << fn << "> {\n"
 		<< "    static void encode(Encoder& e, " << fn << " v) {\n"
-		<< "		if (v < "  << first << " || v > " << last << ")\n" 
+		<< "		if (v < "  << first << " || v > " << last << ")\n"
 		<< "		{\n"
 		<< "			std::ostringstream error;\n"
 		<< "			error << \"enum value \" << v << \" is out of bound for " << fn << " and cannot be encoded\";\n"
@@ -505,7 +529,7 @@ void CodeGen::generateEnumTraits(const NodePtr& n)
 		<< "    }\n"
 		<< "    static void decode(Decoder& d, " << fn << "& v) {\n"
 		<< "		size_t index = d.decodeEnum();\n"
-		<< "		if (index < " << first << " || index > " << last << ")\n" 
+		<< "		if (index < " << first << " || index > " << last << ")\n"
 		<< "		{\n"
 		<< "			std::ostringstream error;\n"
 		<< "			error << \"enum value \" << index << \" is out of bound for " << fn << " and cannot be decoded\";\n"
@@ -650,7 +674,7 @@ void CodeGen::generateTraits(const NodePtr& n)
 
 void CodeGen::emitCopyright()
 {
-    os_ << 
+    os_ <<
         "/**\n"
         " * Licensed to the Apache Software Foundation (ASF) under one\n"
         " * or more contributor license agreements.  See the NOTICE file\n"

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/json/JsonIO.cc
----------------------------------------------------------------------
diff --git a/lang/c++/impl/json/JsonIO.cc b/lang/c++/impl/json/JsonIO.cc
index 2e7d82f..be5cc2f 100644
--- a/lang/c++/impl/json/JsonIO.cc
+++ b/lang/c++/impl/json/JsonIO.cc
@@ -76,7 +76,7 @@ JsonParser::Token JsonParser::doAdvance()
 {
     char ch = next();
     if (ch == ']') {
-        if (curState == stArray0 || stArrayN) {
+        if (curState == stArray0 || curState == stArrayN) {
             curState = stateStack.top();
             stateStack.pop();
             return tkArrayEnd;
@@ -84,7 +84,7 @@ JsonParser::Token JsonParser::doAdvance()
             throw unexpected(ch);
         }
     } else if (ch == '}') {
-        if (curState == stObject0 || stObjectN) {
+        if (curState == stObject0 || curState == stObjectN) {
             curState = stateStack.top();
             stateStack.pop();
             return tkObjectEnd;
@@ -171,6 +171,10 @@ JsonParser::Token JsonParser::tryNumber(char ch)
                     state = 3;
                     sv.push_back(ch);
                     continue;
+                } else if (ch == 'e' || ch == 'E') {
+                    sv.push_back(ch);
+                    state = 5;
+                    continue;
                 }
                 hasNext = true;
             }
@@ -185,6 +189,10 @@ JsonParser::Token JsonParser::tryNumber(char ch)
                     state = 3;
                     sv.push_back(ch);
                     continue;
+                } else if (ch == 'e' || ch == 'E') {
+                    sv.push_back(ch);
+                    state = 5;
+                    continue;
                 }
                 hasNext = true;
             }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/json/JsonIO.hh
----------------------------------------------------------------------
diff --git a/lang/c++/impl/json/JsonIO.hh b/lang/c++/impl/json/JsonIO.hh
index a5ada2d..c87f73a 100644
--- a/lang/c++/impl/json/JsonIO.hh
+++ b/lang/c++/impl/json/JsonIO.hh
@@ -24,6 +24,7 @@
 #include <string>
 #include <sstream>
 #include <boost/math/special_functions/fpclassify.hpp>
+#include <boost/lexical_cast.hpp>
 #include <boost/utility.hpp>
 
 #include "Config.hh"
@@ -133,7 +134,8 @@ public:
     }
 };
 
-struct AVRO_DECL JsonNullFormatter {
+class AVRO_DECL JsonNullFormatter {
+public:
     JsonNullFormatter(StreamWriter&) { }
 
     void handleObjectStart() {}
@@ -304,7 +306,7 @@ public:
     void encodeNumber(T t) {
         sep();
         std::ostringstream oss;
-        oss << t;
+        oss << boost::lexical_cast<std::string>(t);
         const std::string& s = oss.str();
         out_.writeBytes(reinterpret_cast<const uint8_t*>(&s[0]), s.size());
         sep2();
@@ -314,7 +316,7 @@ public:
         sep();
         std::ostringstream oss;
         if (boost::math::isfinite(t)) {
-            oss << t;
+            oss << boost::lexical_cast<std::string>(t);
         } else if (boost::math::isnan(t)) {
             oss << "NaN";
         } else if (t == std::numeric_limits<double>::infinity()) {

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/impl/parsing/Symbol.hh
----------------------------------------------------------------------
diff --git a/lang/c++/impl/parsing/Symbol.hh b/lang/c++/impl/parsing/Symbol.hh
index a7c0997..2911752 100644
--- a/lang/c++/impl/parsing/Symbol.hh
+++ b/lang/c++/impl/parsing/Symbol.hh
@@ -737,6 +737,9 @@ public:
             if (s.isImplicitAction()) {
                 handler_.handle(s);
                 parsingStack.pop();
+            } else if (s.kind() == Symbol::sSkipStart) {
+                parsingStack.pop();
+                skip(*decoder_);
             } else {
                 break;
             }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/bigrecord
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/bigrecord b/lang/c++/jsonschemas/bigrecord
index 02dbccb..ba430a0 100644
--- a/lang/c++/jsonschemas/bigrecord
+++ b/lang/c++/jsonschemas/bigrecord
@@ -102,6 +102,10 @@
         {
             "name": "bytes",
             "type": "bytes"
+        },
+		        {
+            "name": "null",
+            "type": "null"
         }
     ]
 }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/bigrecord_r
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/bigrecord_r b/lang/c++/jsonschemas/bigrecord_r
index f079162..7c477cd 100644
--- a/lang/c++/jsonschemas/bigrecord_r
+++ b/lang/c++/jsonschemas/bigrecord_r
@@ -70,13 +70,11 @@
         {
             "name": "union1WithDefaultValue",
             "type": [ "string", "int" ],
-            "default": {
-                "string": "sval"
-            }
+            "default": "sval"
         },
         {
             "name": "union2WithDefaultValue",
-            "type": [ "string", "null" ],
+            "type": [ "null", "string" ],
             "default": null
         },
         {

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/crossref
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/crossref b/lang/c++/jsonschemas/crossref
new file mode 100644
index 0000000..fd1d38a
--- /dev/null
+++ b/lang/c++/jsonschemas/crossref
@@ -0,0 +1,28 @@
+{
+  "name": "A",
+  "type": "record",
+  "fields": [
+    {
+      "name": "edges",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "B",
+          "fields": [
+            { "name": "child",
+              "type": [{
+                "type": "record",
+                "name": "C",
+                "fields": [
+                  { "name": "x", "type": { "type": "map", "values": "A" } }
+                ]
+              }, "int"]
+            }
+          ]
+        }
+      }
+    }
+  ]
+}
+

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/padded_record
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/padded_record b/lang/c++/jsonschemas/padded_record
new file mode 100644
index 0000000..cac0e97
--- /dev/null
+++ b/lang/c++/jsonschemas/padded_record
@@ -0,0 +1,14 @@
+{
+    "type" : "record",
+    "name" : "PaddedRecord",
+    "fields" : [
+        {
+            "type" : "int",
+            "name" : "index"
+        },
+        {
+            "type" : "bytes",
+            "name" : "padding"
+        }
+    ]
+}

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/primitivetypes
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/primitivetypes b/lang/c++/jsonschemas/primitivetypes
new file mode 100644
index 0000000..0512323
--- /dev/null
+++ b/lang/c++/jsonschemas/primitivetypes
@@ -0,0 +1,15 @@
+{
+    "name": "TestPrimitiveTypes",
+    "type": "record",
+    "fields": [
+        { "name": "Null", "type": "null" },
+        { "name": "Boolean", "type": "boolean" },
+        { "name": "Int", "type": "int" },
+        { "name": "Long", "type": "long" },
+        { "name": "Float", "type": "float" },
+        { "name": "Double", "type": "double" },
+        { "name": "Bytes", "type": "bytes" },
+        { "name": "String", "type": "string" },
+        { "name": "SecondNull", "type": "null" }
+    ]
+}

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/tree1
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/tree1 b/lang/c++/jsonschemas/tree1
new file mode 100644
index 0000000..3add01d
--- /dev/null
+++ b/lang/c++/jsonschemas/tree1
@@ -0,0 +1,25 @@
+{
+  "name": "Node",
+  "type": "record",
+  "fields": [
+    {
+      "name": "payload",
+      "type": "int",
+      "default": 0
+    },
+    {
+      "name": "edges",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "Edge",
+          "fields": [
+            { "name": "child", "type": "Node" },
+            { "name": "label", "type": "string" }
+          ]
+        }
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/jsonschemas/tree2
----------------------------------------------------------------------
diff --git a/lang/c++/jsonschemas/tree2 b/lang/c++/jsonschemas/tree2
new file mode 100644
index 0000000..1cb69b4
--- /dev/null
+++ b/lang/c++/jsonschemas/tree2
@@ -0,0 +1,18 @@
+{
+  "name": "Node",
+  "type": "record",
+  "fields": [
+    {
+      "name": "payload",
+      "type": "int",
+      "default": 0
+    },
+    {
+      "name": "edges",
+      "type": {
+        "type": "map",
+        "values": "Node"
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/test/AvrogencppTests.cc
----------------------------------------------------------------------
diff --git a/lang/c++/test/AvrogencppTests.cc b/lang/c++/test/AvrogencppTests.cc
index 26d0155..1b42943 100644
--- a/lang/c++/test/AvrogencppTests.cc
+++ b/lang/c++/test/AvrogencppTests.cc
@@ -27,6 +27,10 @@
 #include "recursive.hh"
 #include "circulardep.hh"
 #include "reuse.hh"
+#include "tree1.hh"
+#include "tree2.hh"
+#include "crossref.hh"
+#include "primitivetypes.hh"
 #include "Compiler.hh"
 
 #include <fstream>

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/test/CodecTests.cc
----------------------------------------------------------------------
diff --git a/lang/c++/test/CodecTests.cc b/lang/c++/test/CodecTests.cc
index c0ca1e0..f8bbe84 100644
--- a/lang/c++/test/CodecTests.cc
+++ b/lang/c++/test/CodecTests.cc
@@ -1274,6 +1274,21 @@ static const TestData4 data4[] = {
         "[Rc1sI]",
         { "100", NULL }, 1 },
 
+    // Record of array of record with deleted field as last field
+    { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+        "{\"name\": \"g1\","
+            "\"type\":{\"type\":\"array\",\"items\":{"
+                "\"name\":\"item\",\"type\":\"record\",\"fields\":["
+                "{\"name\":\"f1\", \"type\":\"int\"},"
+                "{\"name\":\"f2\", \"type\": \"long\", \"default\": 0}]}}}]}", "[c1sIL]",
+        { "10", "11", NULL },
+        "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+        "{\"name\": \"g1\","
+            "\"type\":{\"type\":\"array\",\"items\":{"
+                "\"name\":\"item\",\"type\":\"record\",\"fields\":["
+                "{\"name\":\"f1\", \"type\":\"int\"}]}}}]}", "R[c1sI]",
+        { "10", NULL }, 2 },
+
     // Enum resolution
     { "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"x\",\"y\",\"z\"]}",
         "e2",
@@ -1302,20 +1317,12 @@ static const TestData4 data4[] = {
         "[c2sU1IsU1I]", { "100", "100", NULL } ,
         "{\"type\":\"array\", \"items\": \"int\"}",
             "[c2sIsI]", { "100", "100", NULL }, 2 },
-    { "{\"type\":\"array\", \"items\":[ \"long\", \"int\"]}",
-        "[c1sU1Ic1sU1I]", { "100", "100", NULL } ,
-        "{\"type\":\"array\", \"items\": \"int\"}",
-            "[c1sIc1sI]", { "100", "100", NULL }, 2 },
 
     // Map of unions
     { "{\"type\":\"map\", \"values\":[ \"long\", \"int\"]}",
         "{c2sS10U1IsS10U1I}", { "k1", "100", "k2", "100", NULL } ,
         "{\"type\":\"map\", \"values\": \"int\"}",
             "{c2sS10IsS10I}", { "k1", "100", "k2", "100", NULL }, 2 },
-    { "{\"type\":\"map\", \"values\":[ \"long\", \"int\"]}",
-        "{c1sS10U1Ic1sS10U1I}", { "k1", "100", "k2", "100", NULL } ,
-        "{\"type\":\"map\", \"values\": \"int\"}",
-            "{c1sS10Ic1sS10I}", { "k1", "100", "k2", "100", NULL }, 2 },
 
     // Union + promotion
     { "\"int\"", "I", { "100", NULL },
@@ -1339,6 +1346,20 @@ static const TestData4 data4[] = {
         { "1", "100", "10.75", NULL }, 1 },
 };
 
+static const TestData4 data4BinaryOnly[] = {
+    // Arrray of unions
+    { "{\"type\":\"array\", \"items\":[ \"long\", \"int\"]}",
+        "[c1sU1Ic1sU1I]", { "100", "100", NULL } ,
+        "{\"type\":\"array\", \"items\": \"int\"}",
+            "[c1sIc1sI]", { "100", "100", NULL }, 2 },
+
+    // Map of unions
+    { "{\"type\":\"map\", \"values\":[ \"long\", \"int\"]}",
+        "{c1sS10U1Ic1sS10U1I}", { "k1", "100", "k2", "100", NULL } ,
+        "{\"type\":\"map\", \"values\": \"int\"}",
+            "{c1sS10Ic1sS10I}", { "k1", "100", "k2", "100", NULL }, 2 },
+};
+
 #define COUNTOF(x)  sizeof(x) / sizeof(x[0])
 #define ENDOF(x)    (x) + COUNTOF(x)
 
@@ -1405,6 +1426,21 @@ struct BinaryEncoderResolvingDecoderFactory : public BinaryEncoderFactory {
     }
 };
 
+struct JsonEncoderResolvingDecoderFactory {
+    static EncoderPtr newEncoder(const ValidSchema& schema) {
+        return jsonEncoder(schema);
+    }
+
+    static DecoderPtr newDecoder(const ValidSchema& schema) {
+        return resolvingDecoder(schema, schema, jsonDecoder(schema));
+    }
+
+    static DecoderPtr newDecoder(const ValidSchema& writer,
+        const ValidSchema& reader) {
+        return resolvingDecoder(writer, reader, jsonDecoder(writer));
+    }
+};
+
 struct ValidatingEncoderResolvingDecoderFactory :
     public ValidatingEncoderFactory {
     static DecoderPtr newDecoder(const ValidSchema& schema) {
@@ -1426,14 +1462,21 @@ void add_tests(boost::unit_test::test_suite& ts)
     ADD_TESTS(ts, JsonCodec, testCodec, data);
     ADD_TESTS(ts, JsonPrettyCodec, testCodec, data);
     ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory, testCodec, data);
+    ADD_TESTS(ts, JsonEncoderResolvingDecoderFactory, testCodec, data);
     ADD_TESTS(ts, ValidatingCodecFactory, testReaderFail, data2);
     ADD_TESTS(ts, ValidatingCodecFactory, testWriterFail, data2);
     ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory,
         testCodecResolving, data3);
+    ADD_TESTS(ts, JsonEncoderResolvingDecoderFactory,
+        testCodecResolving, data3);
     ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory,
         testCodecResolving2, data4);
+    ADD_TESTS(ts, JsonEncoderResolvingDecoderFactory,
+        testCodecResolving2, data4);
     ADD_TESTS(ts, ValidatingEncoderResolvingDecoderFactory,
         testCodecResolving2, data4);
+    ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory,
+        testCodecResolving2, data4BinaryOnly);
 
     ADD_TESTS(ts, ValidatingCodecFactory, testGeneric, data);
     ADD_TESTS(ts, ValidatingCodecFactory, testGenericResolving, data3);
@@ -1470,9 +1513,13 @@ static void testLimits(const EncoderPtr& e, const DecoderPtr& d)
         e->encodeDouble(std::numeric_limits<double>::infinity());
         e->encodeDouble(-std::numeric_limits<double>::infinity());
         e->encodeDouble(std::numeric_limits<double>::quiet_NaN());
+        e->encodeDouble(std::numeric_limits<double>::max());
+        e->encodeDouble(std::numeric_limits<double>::min());
         e->encodeFloat(std::numeric_limits<float>::infinity());
         e->encodeFloat(-std::numeric_limits<float>::infinity());
         e->encodeFloat(std::numeric_limits<float>::quiet_NaN());
+        e->encodeFloat(std::numeric_limits<float>::max());
+        e->encodeFloat(std::numeric_limits<float>::min());
         e->flush();
     }
 
@@ -1484,13 +1531,16 @@ static void testLimits(const EncoderPtr& e, const DecoderPtr& d)
         BOOST_CHECK_EQUAL(d->decodeDouble(),
             -std::numeric_limits<double>::infinity());
         BOOST_CHECK(boost::math::isnan(d->decodeDouble()));
+        BOOST_CHECK(d->decodeDouble() == std::numeric_limits<double>::max());
+        BOOST_CHECK(d->decodeDouble() == std::numeric_limits<double>::min());
         BOOST_CHECK_EQUAL(d->decodeFloat(),
             std::numeric_limits<float>::infinity());
         BOOST_CHECK_EQUAL(d->decodeFloat(),
             -std::numeric_limits<float>::infinity());
         BOOST_CHECK(boost::math::isnan(d->decodeFloat()));
+        BOOST_CHECK_CLOSE(d->decodeFloat(), std::numeric_limits<float>::max(), 0.00011);
+        BOOST_CHECK_CLOSE(d->decodeFloat(), std::numeric_limits<float>::min(), 0.00011);
     }
-
 }
 
 static void testLimitsBinaryCodec()
@@ -1504,9 +1554,13 @@ static void testLimitsJsonCodec()
         "{ \"name\": \"d1\", \"type\": \"double\" },"
         "{ \"name\": \"d2\", \"type\": \"double\" },"
         "{ \"name\": \"d3\", \"type\": \"double\" },"
+        "{ \"name\": \"d4\", \"type\": \"double\" },"
+        "{ \"name\": \"d5\", \"type\": \"double\" },"
         "{ \"name\": \"f1\", \"type\": \"float\" },"
         "{ \"name\": \"f2\", \"type\": \"float\" },"
-        "{ \"name\": \"f3\", \"type\": \"float\" }"
+        "{ \"name\": \"f3\", \"type\": \"float\" },"
+        "{ \"name\": \"f4\", \"type\": \"float\" },"
+        "{ \"name\": \"f5\", \"type\": \"float\" }"
     "]}";
     ValidSchema schema = parsing::makeValidSchema(s);
     testLimits(jsonEncoder(schema), jsonDecoder(schema));

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/test/DataFileTests.cc
----------------------------------------------------------------------
diff --git a/lang/c++/test/DataFileTests.cc b/lang/c++/test/DataFileTests.cc
index 95e80b1..27a7ce9 100644
--- a/lang/c++/test/DataFileTests.cc
+++ b/lang/c++/test/DataFileTests.cc
@@ -434,11 +434,44 @@ public:
         }
     }
 
+#ifdef SNAPPY_CODEC_AVAILABLE
+    void testSnappy() {
+        // Add enough objects to span multiple blocks
+        const size_t number_of_objects = 1000000;
+        // first create a large file
+        ValidSchema dschema = avro::compileJsonSchemaFromString(sch);
+        {
+            avro::DataFileWriter<ComplexInteger> writer(
+              filename, dschema, 16 * 1024, avro::SNAPPY_CODEC);
+
+            for (size_t i = 0; i < number_of_objects; ++i) {
+                ComplexInteger d;
+                d.re = i;
+                d.im = 2 * i;
+                writer.write(d);
+            }
+        }
+        {
+            avro::DataFileReader<ComplexInteger> reader(filename, dschema);
+            sleep(1);
+            std::vector<int64_t> found;
+            ComplexInteger record;
+            while (reader.read(record)) {
+                found.push_back(record.re);
+            }
+            BOOST_CHECK_EQUAL(found.size(), number_of_objects);
+            for (unsigned int i = 0; i < found.size(); ++i) {
+                BOOST_CHECK_EQUAL(found[i], i);
+            }
+        }
+    }
+#endif
+
     void testSchemaReadWrite() {
     uint32_t a=42;
     {
             avro::DataFileWriter<uint32_t> df(filename, writerSchema);
-        df.write(a);    
+        df.write(a);
         }
 
         {
@@ -492,7 +525,10 @@ init_unit_test_suite( int argc, char* argv[] )
 
     shared_ptr<DataFileTest> t6(new DataFileTest("test6.df", dsch, dblsch));
     ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testZip, t6));
-
+    shared_ptr<DataFileTest> t8(new DataFileTest("test8.df", dsch, dblsch));
+#ifdef SNAPPY_CODEC_AVAILABLE
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testSnappy, t8));
+#endif
     shared_ptr<DataFileTest> t7(new DataFileTest("test7.df",fsch,fsch));
     ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testSchemaReadWrite,t7));
     ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testCleanup,t7));

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c++/test/JsonTests.cc
----------------------------------------------------------------------
diff --git a/lang/c++/test/JsonTests.cc b/lang/c++/test/JsonTests.cc
index 823f15f..79e6c27 100644
--- a/lang/c++/test/JsonTests.cc
+++ b/lang/c++/test/JsonTests.cc
@@ -56,6 +56,9 @@ TestData<double> doubleData[] = {
     { "1.0", etDouble, 1.0 },
     { "4.7e3", etDouble, 4700.0 },
     { "-7.2e-4", etDouble, -0.00072 },
+    { "1e4", etDouble, 10000 },
+    { "-1e-4", etDouble, -0.0001 },
+    { "-0e0", etDouble, 0.0 },
 };
 
 TestData<const char*> stringData[] = {

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/c/src/codec.c
----------------------------------------------------------------------
diff --git a/lang/c/src/codec.c b/lang/c/src/codec.c
index 4a2502b..e0d35be 100644
--- a/lang/c/src/codec.c
+++ b/lang/c/src/codec.c
@@ -21,6 +21,9 @@
 #  if defined(__APPLE__)
 #    include <libkern/OSByteOrder.h>
 #    define __bswap_32 OSSwapInt32
+#  elif defined(__FreeBSD__)
+#    include <sys/endian.h>
+#    define __bswap_32 bswap32
 #  else
 #    include <byteswap.h>
 #  endif

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/Schema.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Schema.java b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
index 2019c1f..53e5e2e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/Schema.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
@@ -464,6 +464,8 @@ public abstract class Schema extends JsonProperties {
     private boolean defaultValueEquals(JsonNode thatDefaultValue) {
       if (defaultValue == null)
         return thatDefaultValue == null;
+      if (thatDefaultValue == null)
+        return false;
       if (Double.isNaN(defaultValue.getDoubleValue()))
         return Double.isNaN(thatDefaultValue.getDoubleValue());
       return defaultValue.equals(thatDefaultValue);
@@ -587,6 +589,7 @@ public abstract class Schema extends JsonProperties {
     private Object s1; private Object s2;
     private SeenPair(Object s1, Object s2) { this.s1 = s1; this.s2 = s2; }
     public boolean equals(Object o) {
+      if (!(o instanceof SeenPair)) return false;
       return this.s1 == ((SeenPair)o).s1 && this.s2 == ((SeenPair)o).s2;
     }
     public int hashCode() {

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
index f1a1faa..ce038d4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
@@ -2587,6 +2587,10 @@ public class SchemaBuilder {
         s = new String(data, "ISO-8859-1");
         char[] quoted = JsonStringEncoder.getInstance().quoteAsString(s);
         s = "\"" + new String(quoted) + "\"";
+      } else if (o instanceof byte[]) {
+        s = new String((byte[]) o, "ISO-8859-1");
+        char[] quoted = JsonStringEncoder.getInstance().quoteAsString(s);
+        s = '\"' + new String(quoted) + '\"';
       } else {
         s = GenericData.get().toString(o);
       }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java b/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
index 8dccfc3..ca90d6e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
@@ -87,7 +87,7 @@ public class BZip2Codec extends Codec {
   public boolean equals(Object obj) {
     if (this == obj)
       return true;
-    if (getClass() != obj.getClass())
+    if (obj == null || obj.getClass() != getClass())
       return false;
     return true;
   }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
index f8f6ac4..bfe9e6d 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
@@ -132,7 +132,7 @@ class DeflateCodec extends Codec {
   public boolean equals(Object obj) {
     if (this == obj)
       return true;
-    if (getClass() != obj.getClass())
+    if (obj == null || obj.getClass() != getClass())
       return false;
     DeflateCodec other = (DeflateCodec)obj;
     return (this.nowrap == other.nowrap);

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
index e95f699..bc07f14 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
@@ -54,7 +54,7 @@ final class NullCodec extends Codec {
   public boolean equals(Object other) {
     if (this == other)
       return true;
-    return (this.getClass() == other.getClass());
+    return (other != null && other.getClass() == getClass());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
index 1a5d252..6206c8d 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
@@ -75,7 +75,7 @@ class SnappyCodec extends Codec {
   public boolean equals(Object obj) {
     if (this == obj)
       return true;
-    if (getClass() != obj.getClass())
+    if (obj == null || obj.getClass() != getClass())
       return false;
     return true;
   }

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java
index 7677b3f..23aa830 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java
@@ -109,7 +109,7 @@ public class XZCodec extends Codec {
   public boolean equals(Object obj) {
     if (this == obj)
       return true;
-    if (getClass() != obj.getClass())
+    if (obj == null || obj.getClass() != getClass())
       return false;
     XZCodec other = (XZCodec)obj;
     return (this.compressionLevel == other.compressionLevel);

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
index baedeb8..20d3dc3 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
@@ -50,7 +50,7 @@ public abstract class SpecificRecordBase
     return get(getSchema().getField(fieldName).pos());
   }
 
-  public Conversion<?> getConverion(String fieldName) {
+  public Conversion<?> getConversion(String fieldName) {
     return getConversion(getSchema().getField(fieldName).pos());
   }
 

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
index a22708a..6958798 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
@@ -96,6 +96,9 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
   }
 
   public boolean equals(Object o) {
+    if (!(o instanceof WeakIdentityHashMap)) {
+      return false;
+    }
     return backingStore.equals(((WeakIdentityHashMap)o).backingStore);
   }
 
@@ -159,6 +162,9 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
       if (this == o) {
         return true;
       }
+      if (!(o instanceof WeakIdentityHashMap.IdentityWeakReference)) {
+        return false;
+      }
       IdentityWeakReference ref = (IdentityWeakReference)o;
       if (this.get() == ref.get()) {
         return true;

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java b/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java
index ca98e4c..49b939e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java
@@ -120,7 +120,8 @@ public class JacksonUtils {
       if (schema == null || schema.getType().equals(Schema.Type.STRING) ||
           schema.getType().equals(Schema.Type.ENUM)) {
         return jsonNode.asText();
-      } else if (schema.getType().equals(Schema.Type.BYTES)) {
+      } else if (schema.getType().equals(Schema.Type.BYTES)
+              || schema.getType().equals(Schema.Type.FIXED)) {
         try {
           return jsonNode.getTextValue().getBytes(BYTES_CHARSET);
         } catch (UnsupportedEncodingException e) {

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/avro/src/test/java/org/apache/avro/TestFixed.java
----------------------------------------------------------------------
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestFixed.java b/lang/java/avro/src/test/java/org/apache/avro/TestFixed.java
index 14ff5ce..5b69d1e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestFixed.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestFixed.java
@@ -1,21 +1,5 @@
-/*
- * Copyright 2017 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.avro;
 
-import java.nio.ByteBuffer;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -26,9 +10,10 @@ public class TestFixed {
   public void testFixedDefaultValueDrop() {
     Schema md5 = SchemaBuilder.builder().fixed("MD5").size(16);
     Schema frec = SchemaBuilder.builder().record("test")
-            .fields().name("hash").type(md5).withDefault(ByteBuffer.wrap(new byte[16])).endRecord();
+            .fields().name("hash").type(md5).withDefault(new byte[16]).endRecord();
     Schema.Field field = frec.getField("hash");
     Assert.assertNotNull(field.defaultVal());
+    Assert.assertArrayEquals(new byte[16], (byte[]) field.defaultVal());
   }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/compiler/src/test/idl/input/cycle.avdl
----------------------------------------------------------------------
diff --git a/lang/java/compiler/src/test/idl/input/cycle.avdl b/lang/java/compiler/src/test/idl/input/cycle.avdl
index f434431..07c9675 100644
--- a/lang/java/compiler/src/test/idl/input/cycle.avdl
+++ b/lang/java/compiler/src/test/idl/input/cycle.avdl
@@ -7,7 +7,7 @@ protocol Cycle {
     }
 
     record Method {
-      string declaringClass;
+      string @testAttribute("testValue") declaringClass;
       string methodName;
     }
 

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/compiler/src/test/idl/output/cycle.avpr
----------------------------------------------------------------------
diff --git a/lang/java/compiler/src/test/idl/output/cycle.avpr b/lang/java/compiler/src/test/idl/output/cycle.avpr
index 53658af..190c36b 100644
--- a/lang/java/compiler/src/test/idl/output/cycle.avpr
+++ b/lang/java/compiler/src/test/idl/output/cycle.avpr
@@ -22,7 +22,8 @@
               "name" : "Method",
               "fields" : [ {
                 "name" : "declaringClass",
-                "type" : "string"
+                "type" : "string",
+                "testAttribute":"testValue"
               }, {
                 "name" : "methodName",
                 "type" : "string"

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
----------------------------------------------------------------------
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
index 3a9e158..6ed898a 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
@@ -25,6 +25,7 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
 import java.util.concurrent.Semaphore;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
@@ -370,8 +371,8 @@ public class TestNettyServerWithCallbacks {
     }
   }
 
-  @Test
-  public void cancelPendingRequestsAfterChannelCloseByServerShutdown() throws Exception {
+  @Test(timeout = 20000)
+  public void cancelPendingRequestsAfterChannelCloseByServerShutdown() throws Throwable {
     // The purpose of this test is to verify that a client doesn't stay
     // blocked when a server is unexpectedly killed (or when for some
     // other reason the channel is suddenly closed) while the server
@@ -381,7 +382,7 @@ public class TestNettyServerWithCallbacks {
     // Start up a second server so that closing the server doesn't
     // interfere with the other unit tests:
     BlockingSimpleImpl blockingSimpleImpl = new BlockingSimpleImpl();
-    Server server2 = new NettyServer(new SpecificResponder(Simple.class,
+    final Server server2 = new NettyServer(new SpecificResponder(Simple.class,
         blockingSimpleImpl), new InetSocketAddress(0));
     server2.start();
 
@@ -404,7 +405,8 @@ public class TestNettyServerWithCallbacks {
       // Acquire the run permit, to avoid that the server method returns immediately
       blockingSimpleImpl.acquireRunPermit();
 
-      Thread t = new Thread(new Runnable() {
+      // Start client call
+      Future<?> clientFuture = Executors.newSingleThreadExecutor().submit(new Runnable() {
         @Override
         public void run() {
           try {
@@ -416,23 +418,30 @@ public class TestNettyServerWithCallbacks {
         }
       });
 
-      // Start client call
-      t.start();
-
       // Wait until method is entered on the server side
       blockingSimpleImpl.acquireEnterPermit();
 
       // The server side method is now blocked waiting on the run permit
       // (= is busy handling the request)
 
-      // Stop the server
-      server2.close();
+      // Stop the server in a separate thread as it blocks the actual thread until the server side
+      // method is running
+      new Thread(new Runnable() {
+        @Override
+        public void run() {
+          server2.close();
+        }
+      }).start();
 
       // With the server gone, we expect the client to get some exception and exit
-      // Wait for client thread to exit
-      t.join(10000);
-
-      Assert.assertFalse("Client request should not be blocked on server shutdown", t.isAlive());
+      // Wait for the client call to exit
+      try {
+        clientFuture.get(10, TimeUnit.SECONDS);
+      } catch (ExecutionException e) {
+        throw e.getCause();
+      } catch (TimeoutException e) {
+        Assert.fail("Client request should not be blocked on server shutdown");
+      }
 
     } finally {
       blockingSimpleImpl.releaseRunPermit();

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index 8afcefd..70c8693 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -43,51 +43,49 @@
       -Dhadoop.version=1 or leave unspecified to build against Hadoop 2
     -->
     <hadoop1.version>1.2.1</hadoop1.version>
-    <hadoop2.version>2.5.1</hadoop2.version>
+    <hadoop2.version>2.7.3</hadoop2.version>
     <jackson.version>1.9.13</jackson.version>
     <jetty.version>6.1.26</jetty.version>
     <jetty-servlet-api.version>2.5-20081211</jetty-servlet-api.version>
-    <jopt-simple.version>4.7</jopt-simple.version>
-    <junit.version>4.11</junit.version>
-    <netty.version>3.5.13.Final</netty.version>
-    <paranamer.version>2.7</paranamer.version>
-    <protobuf.version>2.5.0</protobuf.version>
-    <thrift.version>0.9.1</thrift.version>
-    <slf4j.version>1.7.7</slf4j.version>
-    <snappy.version>1.1.1.3</snappy.version>
+    <jopt-simple.version>5.0.3</jopt-simple.version>
+    <junit.version>4.12</junit.version>
+    <netty.version>3.10.6.Final</netty.version>
+    <paranamer.version>2.8</paranamer.version>
+    <protobuf.version>2.6.1</protobuf.version>
+    <thrift.version>0.9.3</thrift.version>
+    <slf4j.version>1.7.22</slf4j.version>
+    <snappy.version>1.1.2.6</snappy.version>
     <velocity.version>1.7</velocity.version>
-    <maven.version>2.0.10</maven.version>
-    <ant.version>1.9.0</ant.version>
-    <commons-cli.version>1.2</commons-cli.version>
-    <commons-codec.version>1.9</commons-codec.version>
-    <commons-compress.version>1.8.1</commons-compress.version>
-    <commons-httpclient.version>3.1</commons-httpclient.version>
+    <maven.version>2.0.11</maven.version>
+    <ant.version>1.10.0</ant.version>
+    <commons-cli.version>1.3.1</commons-cli.version>
+    <commons-codec.version>1.10</commons-codec.version>
+    <commons-compress.version>1.13</commons-compress.version>
     <commons-lang.version>2.6</commons-lang.version>
-    <commons-logging.version>1.1.1</commons-logging.version>
-    <tukaani.version>1.5</tukaani.version>
-    <easymock.version>3.2</easymock.version>
+    <commons-logging.version>1.2</commons-logging.version>
+    <tukaani.version>1.6</tukaani.version>
+    <easymock.version>3.4</easymock.version>
     <hamcrest.version>1.3</hamcrest.version>
-    <commons-httpclient.version>3.1</commons-httpclient.version>
-    <joda.version>2.7</joda.version>
+    <joda.version>2.9.7</joda.version>
     <!-- This Guava version should match Hadoop's Guava version. See AVRO-1781. -->
     <guava.version>11.0.2</guava.version>
     <findbugs-annotations.version>1.3.9-1</findbugs-annotations.version>
 
     <!-- version properties for plugins -->
-    <bundle-plugin-version>2.5.3</bundle-plugin-version>
-    <compiler-plugin.version>3.1</compiler-plugin.version>
-    <exec-plugin.version>1.3.2</exec-plugin.version>
-    <jar-plugin.version>2.5</jar-plugin.version>
+    <bundle-plugin-version>3.2.0</bundle-plugin-version>
+    <compiler-plugin.version>3.6.0</compiler-plugin.version>
+    <exec-plugin.version>1.5.0</exec-plugin.version>
+    <jar-plugin.version>2.6</jar-plugin.version>
     <javacc-plugin.version>2.6</javacc-plugin.version>
-    <javadoc-plugin.version>2.9.1</javadoc-plugin.version>
-    <plugin-tools-javadoc.version>3.2</plugin-tools-javadoc.version>
-    <maven-site-plugin.version>3.3</maven-site-plugin.version>
-    <plugin-plugin.version>3.3</plugin-plugin.version>
-    <source-plugin.version>2.3</source-plugin.version>
-    <surefire-plugin.version>2.17</surefire-plugin.version>
+    <javadoc-plugin.version>2.10.4</javadoc-plugin.version>
+    <plugin-tools-javadoc.version>3.5</plugin-tools-javadoc.version>
+    <maven-site-plugin.version>3.6</maven-site-plugin.version>
+    <plugin-plugin.version>3.5</plugin-plugin.version>
+    <source-plugin.version>2.4</source-plugin.version>
+    <surefire-plugin.version>2.19.1</surefire-plugin.version>
     <file-management.version>1.2.1</file-management.version>
     <shade-plugin.version>1.7.1</shade-plugin.version>
-    <archetype-plugin.version>2.2</archetype-plugin.version>
+    <archetype-plugin.version>2.4</archetype-plugin.version>
   </properties>
 
   <modules>

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/tools/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/tools/pom.xml b/lang/java/tools/pom.xml
index 18e0155..11bac66 100644
--- a/lang/java/tools/pom.xml
+++ b/lang/java/tools/pom.xml
@@ -221,11 +221,6 @@
       <version>${commons-logging.version}</version>
     </dependency>
     <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>${commons-httpclient.version}</version>
-    </dependency>
-    <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>trevni-core</artifactId>
       <version>${project.version}</version>

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
----------------------------------------------------------------------
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
index 6026796..e782321 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
@@ -17,9 +17,11 @@
  */
 package org.apache.avro.tool;
 
+import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.PrintStream;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
@@ -33,6 +35,7 @@ import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
+import org.apache.hadoop.fs.Path;
 
 /**
  * Tool to concatenate avro files with the same schema and non-reserved
@@ -65,7 +68,7 @@ public class ConcatTool implements Tool {
     Map<String, byte[]> metadata = new TreeMap<String, byte[]>();
     String inputCodec = null;
 
-    for (String inFile : args) {
+    for (String inFile : expandsInputFiles(args)) {
       InputStream input = Util.fileOrStdin(inFile, in);
       DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
         input, new GenericDatumReader<GenericRecord>());
@@ -124,6 +127,24 @@ public class ConcatTool implements Tool {
     return 0;
   }
 
+  /** Processes a list of input files to expand directories if needed. */
+  private static List<String> expandsInputFiles(List<String> args) throws IOException {
+    List<String> files = new ArrayList<String>();
+
+    for (String arg : args) {
+      if (arg.equals("-")) {
+        files.add(arg);
+      } else {
+        List<Path> paths = Util.getFiles(arg);
+        for (Path path : paths) {
+          files.add(path.toString());
+        }
+      }
+    }
+
+    return files;
+  }
+
   private void printHelp(PrintStream out) {
     out.println("concat [input-file...] output-file");
     out.println();
@@ -136,8 +157,9 @@ public class ConcatTool implements Tool {
     out.println("  3 if the codecs don't match");
     out.println("If no input files are given stdin will be used. The tool");
     out.println("0 on success. A dash ('-') can be given as an input file");
-    out.println("to use stdin, and as an output file to use stdout.");
-
+    out.println("to use stdin, and as an output file to use stdout. If a directory");
+    out.println("is given as an input-file all the files within this directory");
+    out.println("are used.");
   }
 
 @Override

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
----------------------------------------------------------------------
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
index 708bb41..9f1cae1 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
@@ -22,6 +22,7 @@ import static org.apache.avro.file.DataFileConstants.DEFLATE_CODEC;
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -168,42 +169,52 @@ class Util {
     }
   }
 
-  /**If pathname is a file, this method returns a list with a single absolute Path to that file,
-   * if pathname is a directory, this method returns a list of Pathes to all the files within
-   * this directory.
-   * Only files inside that directory are included, no subdirectories or files in subdirectories
-   * will be added.
+  /**
+   * If pathname is a file, this method returns a list with a single absolute Path to that file.
+   * If pathname is a directory, this method returns a list of Pathes to all the files within
+   * this directory. Only files inside that directory are included, no subdirectories or files
+   * in subdirectories will be added.
+   * If pathname is a glob pattern, all files matching the pattern are included.
+   *
    * The List is sorted alphabetically.
-   * @param fileOrDirName filename or directoryname
+   * @param fileOrDirName filename, directoryname or a glob pattern
    * @return A Path List
    * @throws IOException
    */
-  static List<Path> getFiles(String fileOrDirName)
-    throws IOException {
+  static List<Path> getFiles(String fileOrDirName) throws IOException {
     List<Path> pathList = new ArrayList<Path>();
     Path path = new Path(fileOrDirName);
     FileSystem fs = path.getFileSystem(new Configuration());
 
     if (fs.isFile(path)) {
       pathList.add(path);
-    }
-    else if (fs.getFileStatus(path).isDir()) {
+    } else if (fs.isDirectory(path)) {
       for (FileStatus status : fs.listStatus(path)) {
         if(!status.isDir()) {
           pathList.add(status.getPath());
         }
       }
+    } else {
+      FileStatus[] fileStatuses = fs.globStatus(path);
+      if (fileStatuses != null) {
+        for (FileStatus status : fileStatuses) {
+          pathList.add(status.getPath());
+        }
+      } else {
+        throw new FileNotFoundException(fileOrDirName);
+      }
     }
     Collections.sort(pathList);
     return pathList;
   }
 
   /**
-   * This method returns a list which contains a path to every given file
-   * in the input and a path to every file inside a given directory.
+   * Concatenate the result of {@link #getFiles(String)} applied to all file or directory names.
    * The list is sorted alphabetically and contains no subdirectories or files within those.
-   * @param fileOrDirNames A list of filenames and directorynames
-   * @return A list of Pathes, one for each file
+   *
+   * The list is sorted alphabetically.
+   * @param fileOrDirNames A list of filenames, directorynames or glob patterns
+   * @return A list of Paths, one for each file
    * @throws IOException
    */
   static List<Path> getFiles(List<String> fileOrDirNames)

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/tools/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/lang/java/tools/src/main/resources/log4j.properties b/lang/java/tools/src/main/resources/log4j.properties
new file mode 100644
index 0000000..41894f6
--- /dev/null
+++ b/lang/java/tools/src/main/resources/log4j.properties
@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+log4j.rootLogger=INFO, console
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
----------------------------------------------------------------------
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
index 312bd76..39e45de 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
@@ -180,6 +180,20 @@ public class TestCatTool {
       args);
     assertEquals(0, returnCode);
     assertEquals(LIMIT_WITHIN_INPUT_BOUNDS, numRowsInFile(output));
+
+//    glob input
+    args = asList(
+      new File(input1.getParentFile(), "/*").getAbsolutePath(),
+      output.getAbsolutePath(),
+      "--offset" , String.valueOf(OFFSET),
+      "--limit" , String.valueOf(LIMIT_WITHIN_INPUT_BOUNDS));
+    returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    assertEquals(LIMIT_WITHIN_INPUT_BOUNDS, numRowsInFile(output));
   }
 
 

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java
----------------------------------------------------------------------
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java
index af31ccb..6fdbddf 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java
@@ -25,7 +25,9 @@ import static org.junit.Assert.assertTrue;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileNotFoundException;
 import java.io.PrintStream;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -108,6 +110,83 @@ public class TestConcatTool {
   }
 
   @Test
+  public void testDirConcat() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+
+    File dir = AvroTestUtil.tempDirectory(getClass(), "input");
+
+    for (int i = 0; i < 3; i++) {
+      String filename = "input" + i + ".avro";
+      File input = generateData(filename, Type.STRING, metadata, DEFLATE);
+      boolean ok = input.renameTo(new File(dir, input.getName()));
+      assertTrue(ok);
+    }
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      dir.getAbsolutePath(),
+      output.getAbsolutePath());
+    int returnCode = new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+
+    assertEquals(ROWS_IN_INPUT_FILES * 3, numRowsInFile(output));
+  }
+
+  @Test
+  public void testGlobPatternConcat() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+
+    File dir = AvroTestUtil.tempDirectory(getClass(), "input");
+
+    for (int i = 0; i < 3; i++) {
+      String filename = "input" + i + ".avro";
+      File input = generateData(filename, Type.STRING, metadata, DEFLATE);
+      boolean ok = input.renameTo(new File(dir, input.getName()));
+      assertTrue(ok);
+    }
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      new File(dir, "/*").getAbsolutePath(),
+      output.getAbsolutePath());
+    int returnCode = new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+
+    assertEquals(ROWS_IN_INPUT_FILES * 3, numRowsInFile(output));
+  }
+
+  @Test(expected = FileNotFoundException.class)
+  public void testFileDoesNotExist() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+
+    File dir = AvroTestUtil.tempDirectory(getClass(), "input");
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      new File(dir, "/doNotExist").getAbsolutePath(),
+      output.getAbsolutePath());
+    new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+  }
+
+  @Test
   public void testConcat() throws Exception {
     Map<String, String> metadata = new HashMap<String, String>();
     metadata.put("myMetaKey", "myMetaValue");

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/ruby/Manifest
----------------------------------------------------------------------
diff --git a/lang/ruby/Manifest b/lang/ruby/Manifest
index 23220bb..3edd7cf 100644
--- a/lang/ruby/Manifest
+++ b/lang/ruby/Manifest
@@ -12,6 +12,7 @@ lib/avro/ipc.rb
 lib/avro/protocol.rb
 lib/avro/schema.rb
 lib/avro/schema_normalization.rb
+lib/avro/schema_validator.rb
 test/case_finder.rb
 test/random_data.rb
 test/sample_ipc_client.rb
@@ -25,5 +26,6 @@ test/test_io.rb
 test/test_protocol.rb
 test/test_schema.rb
 test/test_schema_normalization.rb
+test/test_schema_validator.rb
 test/test_socket_transport.rb
 test/tool.rb

http://git-wip-us.apache.org/repos/asf/avro/blob/884fbabd/lang/ruby/lib/avro.rb
----------------------------------------------------------------------
diff --git a/lang/ruby/lib/avro.rb b/lang/ruby/lib/avro.rb
index c419ab1..1293f0f 100644
--- a/lang/ruby/lib/avro.rb
+++ b/lang/ruby/lib/avro.rb
@@ -40,3 +40,4 @@ require 'avro/data_file'
 require 'avro/protocol'
 require 'avro/ipc'
 require 'avro/schema_normalization'
+require 'avro/schema_validator'


Mime
View raw message