avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dk...@apache.org
Subject [avro] branch master updated: Fix deprecated API uses and multiple warnings reported by static analysis tools
Date Tue, 12 Mar 2019 16:45:07 GMT
This is an automated email from the ASF dual-hosted git repository.

dkulp pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/avro.git


The following commit(s) were added to refs/heads/master by this push:
     new 3d51ba5  Fix deprecated API uses and multiple warnings reported by static analysis tools
3d51ba5 is described below

commit 3d51ba5e96556136c2084838591aa325e81bfca9
Author: Ismaël Mejía <iemejia@gmail.com>
AuthorDate: Thu Nov 29 16:33:30 2018 +0100

    Fix deprecated API uses and multiple warnings reported by static analysis tools
---
 .../java/transport/SimpleOrderServiceClient.java   |  10 +-
 .../org/apache/avro/AvroMissingFieldException.java |   4 +-
 .../main/java/org/apache/avro/JsonProperties.java  |   3 +
 .../src/main/java/org/apache/avro/Protocol.java    |   5 +
 .../avro/src/main/java/org/apache/avro/Schema.java | 217 +++++++++++----------
 .../main/java/org/apache/avro/SchemaBuilder.java   |   2 +-
 .../java/org/apache/avro/SchemaCompatibility.java  |   2 +-
 .../src/main/java/org/apache/avro/data/Json.java   |   6 +-
 .../org/apache/avro/data/RecordBuilderBase.java    |   4 +-
 .../java/org/apache/avro/file/DataFileReader.java  |   1 -
 .../org/apache/avro/file/DataFileReader12.java     |   2 +-
 .../java/org/apache/avro/file/DataFileStream.java  |   2 +-
 .../java/org/apache/avro/file/DataFileWriter.java  |   9 +-
 .../main/java/org/apache/avro/file/FileReader.java |   1 +
 .../apache/avro/file/SeekableByteArrayInput.java   |  29 +--
 .../org/apache/avro/file/SeekableFileInput.java    |   7 +-
 .../java/org/apache/avro/generic/GenericData.java  |   4 +-
 .../apache/avro/generic/GenericDatumReader.java    |   2 +-
 .../org/apache/avro/generic/GenericEnumSymbol.java |   1 +
 .../java/org/apache/avro/io/BinaryDecoder.java     |   9 +-
 .../java/org/apache/avro/io/DecoderFactory.java    |   2 +-
 .../main/java/org/apache/avro/io/JsonDecoder.java  |   4 +-
 .../main/java/org/apache/avro/io/JsonEncoder.java  |   4 +-
 .../java/org/apache/avro/io/ResolvingDecoder.java  |   2 +-
 .../avro/io/parsing/JsonGrammarGenerator.java      |   2 +
 .../avro/io/parsing/ResolvingGrammarGenerator.java |  23 ++-
 .../java/org/apache/avro/io/parsing/Symbol.java    |   6 +-
 .../io/parsing/ValidatingGrammarGenerator.java     |   4 +-
 .../apache/avro/reflect/DateAsLongEncoding.java    |   2 +-
 .../apache/avro/reflect/FieldAccessReflect.java    |   8 +-
 .../org/apache/avro/reflect/FieldAccessUnsafe.java |   3 +-
 .../java/org/apache/avro/reflect/ReflectData.java  |  19 +-
 .../apache/avro/reflect/ReflectDatumReader.java    |   2 +-
 .../org/apache/avro/reflect/ReflectionUtil.java    |   2 +-
 .../org/apache/avro/specific/SpecificData.java     |  16 +-
 .../avro/specific/SpecificExceptionBase.java       |   8 +-
 .../apache/avro/specific/SpecificRecordBase.java   |   6 +-
 .../apache/avro/util/ByteBufferOutputStream.java   |   4 +-
 .../main/java/org/apache/avro/util/RandomData.java |   7 +-
 .../src/main/java/org/apache/avro/util/Utf8.java   |   2 +
 .../org/apache/avro/util/WeakIdentityHashMap.java  |  25 +++
 .../java/org/apache/avro/FooBarSpecificRecord.java |   3 +
 .../java/org/apache/avro/GenerateBlockingData.java |   2 +-
 .../test/java/org/apache/avro/TestDataFile.java    |  29 +--
 .../org/apache/avro/TestDataFileCustomSync.java    |   3 +-
 .../java/org/apache/avro/TestDataFileMeta.java     |   3 +-
 .../java/org/apache/avro/TestNestedRecords.java    |   3 +-
 .../org/apache/avro/TestSchemaCompatibility.java   |   5 +-
 .../TestSchemaCompatibilityFixedSizeMismatch.java  |   8 +-
 .../TestSchemaCompatibilityMissingEnumSymbols.java |   8 +-
 .../TestSchemaCompatibilityMissingUnionBranch.java |   7 +-
 .../avro/TestSchemaCompatibilityNameMismatch.java  |   7 +-
 ...ompatibilityReaderFieldMissingDefaultValue.java |   7 +-
 .../avro/TestSchemaCompatibilityTypeMismatch.java  |   7 +-
 .../org/apache/avro/TestSchemaNormalization.java   |  13 +-
 .../org/apache/avro/TestUnionSelfReference.java    |   6 +-
 .../java/org/apache/avro/file/TestCustomCodec.java |   7 +-
 .../org/apache/avro/file/codec/CustomCodec.java    |   6 +-
 .../org/apache/avro/generic/TestGenericData.java   |  51 ++---
 .../avro/generic/TestGenericDatumWriter.java       |  10 +-
 .../avro/generic/TestGenericRecordBuilder.java     |   2 +-
 .../java/org/apache/avro/io/TestBinaryDecoder.java |   2 +-
 .../java/org/apache/avro/io/TestBlockingIO.java    |   9 +-
 .../test/java/org/apache/avro/io/TestEncoders.java |   6 +-
 .../java/org/apache/avro/io/TestJsonDecoder.java   |   4 +-
 .../java/org/apache/avro/io/TestValidatingIO.java  |   6 +-
 .../org/apache/avro/io/parsing/SymbolTest.java     |   5 +-
 .../io/parsing/TestResolvingGrammarGenerator.java  |   2 +-
 .../avro/message/TestBinaryMessageEncoding.java    |   2 +-
 .../org/apache/avro/reflect/TestByteBuffer.java    |   3 +-
 .../apache/avro/reflect/TestNonStringMapKeys.java  |  22 +--
 .../java/org/apache/avro/reflect/TestReflect.java  |  21 +-
 .../apache/avro/reflect/TestReflectionUtil.java    |   2 +-
 .../specific/TestRecordWithJsr310LogicalTypes.java |   3 +
 .../avro/specific/TestRecordWithLogicalTypes.java  |   3 +
 .../java/org/apache/avro/util/TestCaseFinder.java  |   1 +
 .../test/java/org/apache/avro/util/TestUtf8.java   |   4 +-
 lang/java/compiler/pom.xml                         |   2 +-
 .../avro/compiler/specific/SpecificCompiler.java   |  10 +-
 .../javacc/org/apache/avro/compiler/idl/idl.jj     |   2 +-
 lang/java/grpc/pom.xml                             |  22 +++
 .../java/org/apache/avro/grpc/AvroGrpcClient.java  |   1 +
 .../java/org/apache/avro/grpc/AvroGrpcServer.java  |   1 +
 .../java/org/apache/avro/grpc/AvroGrpcUtils.java   |   4 +-
 .../java/org/apache/avro/grpc/AvroInputStream.java |   2 +-
 .../apache/avro/grpc/AvroRequestMarshaller.java    |   2 +-
 .../apache/avro/grpc/AvroResponseMarshaller.java   |   2 +-
 .../org/apache/avro/grpc/TestAvroProtocolGrpc.java |   4 +-
 .../CustomDecimalConversion.java                   |  44 ++---
 .../org/apache/avro/ipc/jetty/StaticServlet.java   |   1 +
 .../apache/avro/ipc/jetty/TestProtocolHttp.java    |   3 +-
 .../apache/avro/ipc/netty/NettyTransceiver.java    |   2 +-
 .../java/org/apache/avro/ipc/DatagramServer.java   |   3 +
 .../org/apache/avro/ipc/DatagramTransceiver.java   |   3 +
 .../java/org/apache/avro/ipc/HttpTransceiver.java  |   3 +
 .../main/java/org/apache/avro/ipc/RPCContext.java  |  16 +-
 .../main/java/org/apache/avro/ipc/Requestor.java   |  16 +-
 .../main/java/org/apache/avro/ipc/Responder.java   |  26 ++-
 .../java/org/apache/avro/ipc/SaslSocketServer.java |  18 +-
 .../org/apache/avro/ipc/SaslSocketTransceiver.java |  13 +-
 .../java/org/apache/avro/ipc/SocketServer.java     |   6 +
 .../org/apache/avro/ipc/SocketTransceiver.java     |   3 +
 .../main/java/org/apache/avro/ipc/Transceiver.java |   2 +-
 .../java/org/apache/avro/ipc/stats/Histogram.java  |   3 +-
 .../java/org/apache/avro/RPCMetaTestPlugin.java    |   2 +-
 .../src/test/java/org/apache/avro/TestCompare.java |  14 +-
 .../java/org/apache/avro/TestDataFileSpecific.java |   2 +-
 .../java/org/apache/avro/TestProtocolGeneric.java  |   5 +-
 .../java/org/apache/avro/TestProtocolSpecific.java |   1 +
 .../src/test/java/org/apache/avro/TestSchema.java  | 160 +++++++--------
 .../compiler/specific/TestSpecificCompiler.java    |  14 +-
 .../apache/avro/message/TestCustomSchemaStore.java |   2 +-
 .../org/apache/avro/specific/TestSpecificData.java |   2 +-
 .../avro/specific/TestSpecificRecordBuilder.java   |  10 +-
 lang/java/mapred/pom.xml                           |  22 +++
 .../avro/hadoop/file/SortedKeyValueFile.java       |   1 +
 .../apache/avro/hadoop/io/AvroSequenceFile.java    |   4 +-
 .../apache/avro/hadoop/io/AvroSerialization.java   |   8 +-
 .../org/apache/avro/hadoop/io/AvroSerializer.java  |   4 +-
 .../apache/avro/mapred/AvroAsTextRecordReader.java |   8 +-
 .../main/java/org/apache/avro/mapred/AvroJob.java  |   6 +-
 .../org/apache/avro/mapred/AvroKeyComparator.java  |   2 +
 .../org/apache/avro/mapred/AvroMultipleInputs.java |  33 ++--
 .../apache/avro/mapred/AvroMultipleOutputs.java    |  10 +-
 .../org/apache/avro/mapred/AvroOutputFormat.java   |   2 +
 .../org/apache/avro/mapred/AvroRecordReader.java   |   6 +
 .../org/apache/avro/mapred/AvroSerialization.java  |   9 +
 .../apache/avro/mapred/AvroTextOutputFormat.java   |   4 +-
 .../apache/avro/mapred/AvroUtf8InputFormat.java    |   8 +-
 .../java/org/apache/avro/mapred/AvroWrapper.java   |   2 +
 .../apache/avro/mapred/DelegatingInputFormat.java  |   8 +-
 .../org/apache/avro/mapred/DelegatingMapper.java   |   1 +
 .../org/apache/avro/mapred/HadoopCombiner.java     |   1 +
 .../java/org/apache/avro/mapred/HadoopReducer.java |   1 +
 .../org/apache/avro/mapred/HadoopReducerBase.java  |   8 +-
 .../java/org/apache/avro/mapred/MapCollector.java  |   1 +
 .../org/apache/avro/mapred/SequenceFileReader.java |  20 +-
 .../org/apache/avro/mapred/TaggedInputSplit.java   |   6 +
 .../org/apache/avro/mapred/tether/TetherJob.java   |   2 +-
 .../avro/mapred/tether/TetherOutputFormat.java     |   3 +
 .../avro/mapred/tether/TetherOutputService.java    |   8 +-
 .../avro/mapred/tether/TetherPartitioner.java      |   2 +
 .../avro/mapred/tether/TetherRecordReader.java     |   7 +-
 .../apache/avro/mapred/tether/TetherReducer.java   |   3 +
 .../java/org/apache/avro/mapreduce/AvroJob.java    |   8 +-
 .../apache/avro/mapreduce/AvroMultipleOutputs.java |   6 +-
 .../hadoop/io/TestAvroDatumConverterFactory.java   |   2 +-
 .../avro/hadoop/io/TestAvroSerialization.java      |  10 +-
 .../apache/avro/mapred/TestAvroInputFormat.java    |  16 +-
 .../apache/avro/mapred/TestAvroMultipleInputs.java |   2 +-
 .../avro/mapred/TestAvroMultipleOutputs.java       |   4 +-
 .../org/apache/avro/mapred/TestAvroTextSort.java   |   2 +-
 .../apache/avro/mapred/TestSequenceFileReader.java |   9 +-
 .../java/org/apache/avro/mapred/TestWordCount.java |   9 +-
 .../avro/mapred/tether/TestWordCountTether.java    |   2 +-
 .../org/apache/avro/mapred/tether/TetherTask.java  |   4 +-
 .../avro/mapreduce/TestAvroKeyInputFormat.java     |   2 +-
 .../avro/mapreduce/TestAvroKeyOutputFormat.java    |   2 +-
 .../mapreduce/TestAvroKeyValueRecordWriter.java    |  22 +--
 .../avro/mapreduce/TestAvroMultipleOutputs.java    |  32 +--
 .../mapreduce/TestAvroMultipleOutputsSyncable.java |  32 +--
 .../apache/avro/mapreduce/TestKeyValueInput.java   |   4 +-
 .../avro/mapreduce/TestKeyValueWordCount.java      |   2 +-
 .../org/apache/avro/mapreduce/TestWordCount.java   |  32 +--
 lang/java/pom.xml                                  |  10 +
 .../org/apache/avro/protobuf/ProtobufData.java     |   2 +-
 .../test/java/org/apache/avro/protobuf/Test.java   |  29 +++
 .../org/apache/avro/protobuf/TestProtobuf.java     |   2 +-
 .../java/org/apache/avro/thrift/TestThrift.java    |   2 +-
 .../test/java/org/apache/avro/thrift/test/E.java   |   1 +
 .../java/org/apache/avro/thrift/test/Error.java    |   6 +
 .../test/java/org/apache/avro/thrift/test/Foo.java |  40 +++-
 .../java/org/apache/avro/thrift/test/FooOrBar.java |   7 +-
 .../java/org/apache/avro/thrift/test/Nested.java   |  12 ++
 .../java/org/apache/avro/thrift/test/Test.java     |  13 ++
 lang/java/tools/pom.xml                            |  22 +++
 .../java/org/apache/avro/tool/FromTextTool.java    |   2 +-
 .../main/java/org/apache/avro/tool/TetherTool.java |   2 +-
 .../org/apache/avro/tool/TrevniMetadataTool.java   |   2 +-
 .../org/apache/avro/tool/TrevniToJsonTool.java     |   2 +-
 .../src/main/java/org/apache/avro/tool/Util.java   |   2 +-
 .../org/apache/avro/tool/TestDataFileTools.java    |   9 +-
 .../java/org/apache/avro/tool/TestTetherTool.java  |   2 +-
 lang/java/trevni/avro/pom.xml                      |  27 +++
 .../org/apache/trevni/avro/AvroColumnReader.java   |   2 +-
 .../apache/trevni/avro/AvroTrevniInputFormat.java  |   6 +
 .../apache/trevni/avro/AvroTrevniOutputFormat.java |   3 +-
 .../avro/mapreduce/AvroTrevniRecordWriterBase.java |   2 +-
 .../java/org/apache/trevni/avro/TestCases.java     |   4 +-
 .../org/apache/trevni/avro/TestEvolvedSchema.java  |   6 +-
 .../java/org/apache/trevni/avro/TestShredder.java  |  32 +--
 .../java/org/apache/trevni/avro/TestWordCount.java |   2 +-
 .../avro/mapreduce/TestKeyValueWordCount.java      |   4 +-
 .../trevni/avro/mapreduce/TestKeyWordCount.java    |   6 +-
 .../main/java/org/apache/trevni/OutputBuffer.java  |  28 +--
 .../src/main/java/org/apache/trevni/ValueType.java |   2 +-
 .../test/java/org/apache/trevni/TestIOBuffers.java |   4 +-
 pom.xml                                            |   1 -
 198 files changed, 1132 insertions(+), 745 deletions(-)

diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java
index 6fbbb5a..b76bf40 100644
--- a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java
@@ -42,7 +42,7 @@ import ${package}.service.OrderProcessingService;
  */
 public class SimpleOrderServiceClient implements OrderProcessingService {
 
-  private static final Logger log = LoggerFactory.getLogger(SimpleOrderServiceEndpoint.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SimpleOrderServiceEndpoint.class);
 
   private InetSocketAddress endpointAddress;
 
@@ -55,16 +55,16 @@ public class SimpleOrderServiceClient implements OrderProcessingService {
   }
 
   public synchronized void start() throws IOException {
-    if (log.isInfoEnabled()) {
-      log.info("Starting Simple Ordering Netty client on '{}'", endpointAddress);
+    if (LOG.isInfoEnabled()) {
+      LOG.info("Starting Simple Ordering Netty client on '{}'", endpointAddress);
     }
     transceiver = new NettyTransceiver(endpointAddress);
     service = SpecificRequestor.getClient(OrderProcessingService.class, transceiver);
   }
 
   public void stop() throws IOException {
-    if (log.isInfoEnabled()) {
-      log.info("Stopping Simple Ordering Netty client on '{}'", endpointAddress);
+    if (LOG.isInfoEnabled()) {
+      LOG.info("Stopping Simple Ordering Netty client on '{}'", endpointAddress);
     }
     if (transceiver != null && transceiver.isConnected()) {
       transceiver.close();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/AvroMissingFieldException.java b/lang/java/avro/src/main/java/org/apache/avro/AvroMissingFieldException.java
index 2ef12b2..c8fca26 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/AvroMissingFieldException.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/AvroMissingFieldException.java
@@ -37,9 +37,9 @@ public class AvroMissingFieldException extends AvroRuntimeException {
 
   @Override
   public String toString() {
-    String result = "";
+    StringBuilder result = new StringBuilder();
     for (Field field: chainOfFields) {
-      result = " --> " + field.name() + result;
+      result.insert(0, " --> " + field.name());
     }
     return "Path in schema:" + result;
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java b/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
index fc83bd5..e18e05a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
@@ -146,6 +146,7 @@ public abstract class JsonProperties {
   private ConcurrentMap<String,JsonNode> props = new ConcurrentHashMap<String,JsonNode>() {
     private static final long serialVersionUID = 1L;
     private Queue<MapEntry<String, JsonNode>> propOrder = new ConcurrentLinkedQueue<MapEntry<String, JsonNode>>();
+    @Override
     public JsonNode putIfAbsent(String key,  JsonNode value) {
       JsonNode r = super.putIfAbsent(key, value);
       if (r == null) {
@@ -153,9 +154,11 @@ public abstract class JsonProperties {
       }
       return r;
     }
+    @Override
     public JsonNode put(String key,  JsonNode value) {
       return putIfAbsent(key, value);
     }
+    @Override
     public Set<Map.Entry<String, JsonNode>> entrySet() {
       return new AbstractSet<Map.Entry<String, JsonNode>>() {
         @Override
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
index b54a51c..fadaf80 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
@@ -116,6 +116,7 @@ public class Protocol extends JsonProperties {
     /** Returns true if this is a one-way message, with no response or errors.*/
     public boolean isOneWay() { return true; }
 
+    @Override
     public String toString() {
       try {
         StringWriter writer = new StringWriter();
@@ -143,6 +144,7 @@ public class Protocol extends JsonProperties {
       gen.writeBooleanField("one-way", true);
     }
 
+    @Override
     public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof Message)) return false;
@@ -152,6 +154,7 @@ public class Protocol extends JsonProperties {
         && propsEqual(that);
     }
 
+    @Override
     public int hashCode() {
       return name.hashCode() + request.hashCode() + propsHashCode();
     }
@@ -328,6 +331,7 @@ public class Protocol extends JsonProperties {
     return new TwoWayMessage(name, doc, propMap, request, response, errors);
   }
 
+  @Override
   public boolean equals(Object o) {
     if (o == this) return true;
     if (!(o instanceof Protocol)) return false;
@@ -339,6 +343,7 @@ public class Protocol extends JsonProperties {
       && this.propsEqual(that);
   }
 
+  @Override
   public int hashCode() {
     return name.hashCode() + namespace.hashCode()
       + types.hashCode() + messages.hashCode() + propsHashCode();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Schema.java b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
index f0d1f86..a55b265 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/Schema.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
@@ -96,7 +96,7 @@ public abstract class Schema extends JsonProperties {
   public enum Type {
     RECORD, ENUM, ARRAY, MAP, UNION, FIXED, STRING, BYTES,
       INT, LONG, FLOAT, DOUBLE, BOOLEAN, NULL;
-    private String name;
+    private final String name;
     private Type() { this.name = this.name().toLowerCase(Locale.ENGLISH); }
     public String getName() { return name; }
   };
@@ -339,7 +339,7 @@ public abstract class Schema extends JsonProperties {
   public String toString(boolean pretty) {
     try {
       StringWriter writer = new StringWriter();
-      JsonGenerator gen = FACTORY.createJsonGenerator(writer);
+      JsonGenerator gen = FACTORY.createGenerator(writer);
       if (pretty) gen.useDefaultPrettyPrinter();
       toJson(new Names(), gen);
       gen.flush();
@@ -364,6 +364,7 @@ public abstract class Schema extends JsonProperties {
     throw new AvroRuntimeException("Not a record: "+this);
   }
 
+  @Override
   public boolean equals(Object o) {
     if (o == this) return true;
     if (!(o instanceof Schema)) return false;
@@ -371,6 +372,8 @@ public abstract class Schema extends JsonProperties {
     if (!(this.type == that.type)) return false;
     return equalCachedHash(that) && propsEqual(that);
   }
+
+  @Override
   public final int hashCode() {
     if (hashCode == NO_HASHCODE)
       hashCode = computeHash();
@@ -436,7 +439,7 @@ public abstract class Schema extends JsonProperties {
     /** How values of this field should be ordered when sorting records. */
     public enum Order {
       ASCENDING, DESCENDING, IGNORE;
-      private String name;
+      private final String name;
       private Order() { this.name = this.name().toLowerCase(Locale.ENGLISH); }
     };
 
@@ -521,6 +524,7 @@ public abstract class Schema extends JsonProperties {
         return Collections.emptySet();
       return Collections.unmodifiableSet(aliases);
     }
+    @Override
     public boolean equals(Object other) {
       if (other == this) return true;
       if (!(other instanceof Field)) return false;
@@ -531,6 +535,7 @@ public abstract class Schema extends JsonProperties {
         (order == that.order) &&
         propsEqual(that);
     }
+    @Override
     public int hashCode() { return name.hashCode() + schema.computeHash(); }
 
     private boolean defaultValueEquals(JsonNode thatDefaultValue) {
@@ -570,15 +575,16 @@ public abstract class Schema extends JsonProperties {
       this.space = space;
       this.full = (this.space == null) ? this.name : this.space+"."+this.name;
     }
-    public boolean equals(Object o) {
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof Name)) return false;
       Name that = (Name)o;
       return full==null ? that.full==null : full.equals(that.full);
     }
-    public int hashCode() {
+    @Override public int hashCode() {
       return full==null ? 0 : full.hashCode();
     }
+    @Override
     public String toString() { return full; }
     public void writeName(Names names, JsonGenerator gen) throws IOException {
       if (name != null) gen.writeStringField("name", name);
@@ -606,21 +612,21 @@ public abstract class Schema extends JsonProperties {
         throw new AvroTypeException("Schemas may not be named after primitives: " + name.full);
       }
     }
-    public String getName() { return name.name; }
-    public String getDoc() { return doc; }
-    public String getNamespace() { return name.space; }
-    public String getFullName() { return name.full; }
-    public void addAlias(String alias) {
+    @Override public String getName() { return name.name; }
+    @Override public String getDoc() { return doc; }
+    @Override public String getNamespace() { return name.space; }
+    @Override public String getFullName() { return name.full; }
+    @Override public void addAlias(String alias) {
       addAlias(alias, null);
     }
-    public void addAlias(String name, String space) {
+    @Override public void addAlias(String name, String space) {
       if (aliases == null)
         this.aliases = new LinkedHashSet<>();
       if (space == null)
         space = this.name.space;
       aliases.add(new Name(name, space));
     }
-    public Set<String> getAliases() {
+    @Override public Set<String> getAliases() {
       Set<String> result = new LinkedHashSet<>();
       if (aliases != null)
         for (Name alias : aliases)
@@ -660,20 +666,20 @@ public abstract class Schema extends JsonProperties {
   private static class SeenPair {
     private Object s1; private Object s2;
     private SeenPair(Object s1, Object s2) { this.s1 = s1; this.s2 = s2; }
-    public boolean equals(Object o) {
+    @Override public boolean equals(Object o) {
       if (!(o instanceof SeenPair)) return false;
       return this.s1 == ((SeenPair)o).s1 && this.s2 == ((SeenPair)o).s2;
     }
-    public int hashCode() {
+    @Override public int hashCode() {
       return System.identityHashCode(s1) + System.identityHashCode(s2);
     }
   }
 
   private static final ThreadLocal<Set> SEEN_EQUALS = new ThreadLocal<Set>() {
-    protected Set initialValue() { return new HashSet(); }
+    @Override protected Set initialValue() { return new HashSet(); }
   };
   private static final ThreadLocal<Map> SEEN_HASHCODE = new ThreadLocal<Map>() {
-    protected Map initialValue() { return new IdentityHashMap(); }
+    @Override protected Map initialValue() { return new IdentityHashMap(); }
   };
 
   @SuppressWarnings(value="unchecked")
@@ -693,7 +699,7 @@ public abstract class Schema extends JsonProperties {
       setFields(fields);
     }
 
-    public boolean isError() { return isError; }
+    @Override public boolean isError() { return isError; }
 
     @Override
     public Field getField(String fieldname) {
@@ -732,7 +738,7 @@ public abstract class Schema extends JsonProperties {
       this.fields = ff.lock();
       this.hashCode = NO_HASHCODE;
     }
-    public boolean equals(Object o) {
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof RecordSchema)) return false;
       RecordSchema that = (RecordSchema)o;
@@ -761,7 +767,7 @@ public abstract class Schema extends JsonProperties {
         if (first) seen.clear();
       }
     }
-    void toJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void toJson(Names names, JsonGenerator gen) throws IOException {
       if (writeNameRef(names, gen)) return;
       String savedSpace = names.space;            // save namespace
       gen.writeStartObject();
@@ -782,7 +788,7 @@ public abstract class Schema extends JsonProperties {
       names.space = savedSpace;                   // restore namespace
     }
 
-    void fieldsToJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void fieldsToJson(Names names, JsonGenerator gen) throws IOException {
       gen.writeStartArray();
       for (Field f : fields) {
         gen.writeStartObject();
@@ -828,11 +834,11 @@ public abstract class Schema extends JsonProperties {
       if (enumDefault != null && !symbols.contains(enumDefault))
         throw new SchemaParseException("The Enum Default: " + enumDefault + " is not in the enum symbol set: " + symbols);
     }
-    public List<String> getEnumSymbols() { return symbols; }
-    public boolean hasEnumSymbol(String symbol) {
+    @Override public List<String> getEnumSymbols() { return symbols; }
+    @Override public boolean hasEnumSymbol(String symbol) {
       return ordinals.containsKey(symbol); }
-    public int getEnumOrdinal(String symbol) { return ordinals.get(symbol); }
-    public boolean equals(Object o) {
+    @Override public int getEnumOrdinal(String symbol) { return ordinals.get(symbol); }
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof EnumSchema)) return false;
       EnumSchema that = (EnumSchema)o;
@@ -844,7 +850,7 @@ public abstract class Schema extends JsonProperties {
     @Override
     public String getEnumDefault() { return enumDefault; }
     @Override int computeHash() { return super.computeHash() + symbols.hashCode(); }
-    void toJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void toJson(Names names, JsonGenerator gen) throws IOException {
       if (writeNameRef(names, gen)) return;
       gen.writeStartObject();
       gen.writeStringField("type", "enum");
@@ -869,8 +875,8 @@ public abstract class Schema extends JsonProperties {
       super(Type.ARRAY);
       this.elementType = elementType;
     }
-    public Schema getElementType() { return elementType; }
-    public boolean equals(Object o) {
+    @Override public Schema getElementType() { return elementType; }
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof ArraySchema)) return false;
       ArraySchema that = (ArraySchema)o;
@@ -881,7 +887,7 @@ public abstract class Schema extends JsonProperties {
     @Override int computeHash() {
       return super.computeHash() + elementType.computeHash();
     }
-    void toJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void toJson(Names names, JsonGenerator gen) throws IOException {
       gen.writeStartObject();
       gen.writeStringField("type", "array");
       gen.writeFieldName("items");
@@ -897,8 +903,8 @@ public abstract class Schema extends JsonProperties {
       super(Type.MAP);
       this.valueType = valueType;
     }
-    public Schema getValueType() { return valueType; }
-    public boolean equals(Object o) {
+    @Override public Schema getValueType() { return valueType; }
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof MapSchema)) return false;
       MapSchema that = (MapSchema)o;
@@ -909,7 +915,7 @@ public abstract class Schema extends JsonProperties {
     @Override int computeHash() {
       return super.computeHash() + valueType.computeHash();
     }
-    void toJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void toJson(Names names, JsonGenerator gen) throws IOException {
       gen.writeStartObject();
       gen.writeStringField("type", "map");
       gen.writeFieldName("values");
@@ -937,9 +943,9 @@ public abstract class Schema extends JsonProperties {
           throw new AvroRuntimeException("Duplicate in union:" + name);
       }
     }
-    public List<Schema> getTypes() { return types; }
-    public Integer getIndexNamed(String name) { return indexByName.get(name); }
-    public boolean equals(Object o) {
+    @Override public List<Schema> getTypes() { return types; }
+    @Override public Integer getIndexNamed(String name) { return indexByName.get(name); }
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof UnionSchema)) return false;
       UnionSchema that = (UnionSchema)o;
@@ -959,7 +965,7 @@ public abstract class Schema extends JsonProperties {
       throw new AvroRuntimeException("Can't set properties on a union: "+this);
     }
 
-    void toJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void toJson(Names names, JsonGenerator gen) throws IOException {
       gen.writeStartArray();
       for (Schema type : types)
         type.toJson(names, gen);
@@ -975,8 +981,8 @@ public abstract class Schema extends JsonProperties {
         throw new IllegalArgumentException("Invalid fixed size: "+size);
       this.size = size;
     }
-    public int getFixedSize() { return size; }
-    public boolean equals(Object o) {
+    @Override public int getFixedSize() { return size; }
+    @Override public boolean equals(Object o) {
       if (o == this) return true;
       if (!(o instanceof FixedSchema)) return false;
       FixedSchema that = (FixedSchema)o;
@@ -986,7 +992,7 @@ public abstract class Schema extends JsonProperties {
         && propsEqual(that);
     }
     @Override int computeHash() { return super.computeHash() + size; }
-    void toJson(Names names, JsonGenerator gen) throws IOException {
+    @Override void toJson(Names names, JsonGenerator gen) throws IOException {
       if (writeNameRef(names, gen)) return;
       gen.writeStartObject();
       gen.writeStringField("type", "fixed");
@@ -1077,14 +1083,14 @@ public abstract class Schema extends JsonProperties {
     /** Parse a schema from the provided file.
      * If named, the schema is added to the names known to this parser. */
     public Schema parse(File file) throws IOException {
-      return parse(FACTORY.createJsonParser(file));
+      return parse(FACTORY.createParser(file));
     }
 
     /** Parse a schema from the provided stream.
      * If named, the schema is added to the names known to this parser.
      * The input stream stays open after the parsing. */
     public Schema parse(InputStream in) throws IOException {
-      return parse(FACTORY.createJsonParser(in).disable(
+      return parse(FACTORY.createParser(in).disable(
               JsonParser.Feature.AUTO_CLOSE_SOURCE));
     }
 
@@ -1100,7 +1106,7 @@ public abstract class Schema extends JsonProperties {
      * If named, the schema is added to the names known to this parser. */
     public Schema parse(String s) {
       try {
-        return parse(FACTORY.createJsonParser(new StringReader(s)));
+        return parse(FACTORY.createParser(new StringReader(s)));
       } catch (IOException e) {
         throw new SchemaParseException(e);
       }
@@ -1131,6 +1137,7 @@ public abstract class Schema extends JsonProperties {
    * @throws IOException if there was trouble reading the contents or they are invalid
    * @deprecated use {@link Schema.Parser} instead.
    */
+  @Deprecated
   public static Schema parse(File file) throws IOException {
     return new Parser().parse(file);
   }
@@ -1143,6 +1150,7 @@ public abstract class Schema extends JsonProperties {
    * @throws IOException if there was trouble reading the contents or they are invalid
    * @deprecated use {@link Schema.Parser} instead.
    */
+  @Deprecated
   public static Schema parse(InputStream in) throws IOException {
     return new Parser().parse(in);
   }
@@ -1150,6 +1158,7 @@ public abstract class Schema extends JsonProperties {
   /** Construct a schema from <a href="http://json.org/">JSON</a> text.
    * @deprecated use {@link Schema.Parser} instead.
    */
+  @Deprecated
   public static Schema parse(String jsonSchema) {
     return new Parser().parse(jsonSchema);
   }
@@ -1158,6 +1167,7 @@ public abstract class Schema extends JsonProperties {
    * @param validate true if names should be validated, false if not.
    * @deprecated use {@link Schema.Parser} instead.
    */
+  @Deprecated
   public static Schema parse(String jsonSchema, boolean validate) {
     return new Parser().setValidate(validate).parse(jsonSchema);
   }
@@ -1184,20 +1194,19 @@ public abstract class Schema extends JsonProperties {
     public String space() { return space; }
     public void space(String space) { this.space = space; }
 
-    @Override
-    public Schema get(Object o) {
-      Name name;
-      if (o instanceof String) {
-        Type primitive = PRIMITIVES.get((String)o);
-        if (primitive != null) return Schema.create(primitive);
-        name = new Name((String)o, space);
-        if (!containsKey(name))                   // if not in default
-          name = new Name((String)o, "");         // try anonymous
-      } else {
-        name = (Name)o;
+    public Schema get(String o) {
+      Type primitive = PRIMITIVES.get(o);
+      if (primitive != null) {
+        return Schema.create(primitive);
+      }
+      Name name = new Name((String) o, space);
+      if (!containsKey(name)) {
+        // if not in default try anonymous
+        name = new Name((String) o, "");
       }
       return super.get(name);
     }
+
     public boolean contains(Schema schema) {
       return get(((NamedSchema)schema).name) != null;
     }
@@ -1482,7 +1491,7 @@ public abstract class Schema extends JsonProperties {
 
   static JsonNode parseJson(String s) {
     try {
-      return MAPPER.readTree(FACTORY.createJsonParser(new StringReader(s)));
+      return MAPPER.readTree(FACTORY.createParser(new StringReader(s)));
     } catch (JsonParseException e) {
       throw new RuntimeException(e);
     } catch (IOException e) {
@@ -1503,7 +1512,7 @@ public abstract class Schema extends JsonProperties {
    * contains the same data elements in the same order, but with possibly
    * different names. */
   public static Schema applyAliases(Schema writer, Schema reader) {
-    if (writer == reader) return writer;          // same schema
+    if (writer.equals(reader)) return writer;          // same schema
 
     // create indexes of names
     Map<Schema,Schema> seen = new IdentityHashMap<>(1);
@@ -1526,50 +1535,52 @@ public abstract class Schema extends JsonProperties {
     Name name = s instanceof NamedSchema ? ((NamedSchema)s).name : null;
     Schema result = s;
     switch (s.getType()) {
-    case RECORD:
-      if (seen.containsKey(s)) return seen.get(s); // break loops
-      if (aliases.containsKey(name))
-        name = aliases.get(name);
-      result = Schema.createRecord(name.full, s.getDoc(), null, s.isError());
-      seen.put(s, result);
-      List<Field> newFields = new ArrayList<>();
-      for (Field f : s.getFields()) {
-        Schema fSchema = applyAliases(f.schema, seen, aliases, fieldAliases);
-        String fName = getFieldAlias(name, f.name, fieldAliases);
-        Field newF = new Field(fName, fSchema, f.doc, f.defaultValue, f.order);
-        newF.putAll(f);               // copy props
-        newFields.add(newF);
-      }
-      result.setFields(newFields);
-      break;
-    case ENUM:
-      if (aliases.containsKey(name))
-        result = Schema.createEnum(aliases.get(name).full, s.getDoc(), null,
-                                   s.getEnumSymbols(), s.getEnumDefault());
-      break;
-    case ARRAY:
-      Schema e = applyAliases(s.getElementType(), seen, aliases, fieldAliases);
-      if (e != s.getElementType())
-        result = Schema.createArray(e);
-      break;
-    case MAP:
-      Schema v = applyAliases(s.getValueType(), seen, aliases, fieldAliases);
-      if (v != s.getValueType())
-        result = Schema.createMap(v);
-      break;
-    case UNION:
-      List<Schema> types = new ArrayList<>();
-      for (Schema branch : s.getTypes())
-        types.add(applyAliases(branch, seen, aliases, fieldAliases));
-      result = Schema.createUnion(types);
-      break;
-    case FIXED:
-      if (aliases.containsKey(name))
-        result = Schema.createFixed(aliases.get(name).full, s.getDoc(), null,
-                                    s.getFixedSize());
-      break;
-    }
-    if (result != s)
+      case RECORD:
+        if (seen.containsKey(s)) return seen.get(s); // break loops
+        if (aliases.containsKey(name))
+          name = aliases.get(name);
+        result = Schema.createRecord(name.full, s.getDoc(), null, s.isError());
+        seen.put(s, result);
+        List<Field> newFields = new ArrayList<>();
+        for (Field f : s.getFields()) {
+          Schema fSchema = applyAliases(f.schema, seen, aliases, fieldAliases);
+          String fName = getFieldAlias(name, f.name, fieldAliases);
+          Field newF = new Field(fName, fSchema, f.doc, f.defaultValue, f.order);
+          newF.putAll(f);               // copy props
+          newFields.add(newF);
+        }
+        result.setFields(newFields);
+        break;
+      case ENUM:
+        if (aliases.containsKey(name))
+          result = Schema.createEnum(aliases.get(name).full, s.getDoc(), null,
+                                     s.getEnumSymbols(), s.getEnumDefault());
+        break;
+      case ARRAY:
+        Schema e = applyAliases(s.getElementType(), seen, aliases, fieldAliases);
+        if (!e.equals(s.getElementType()))
+          result = Schema.createArray(e);
+        break;
+      case MAP:
+        Schema v = applyAliases(s.getValueType(), seen, aliases, fieldAliases);
+        if (!v.equals(s.getValueType()))
+          result = Schema.createMap(v);
+        break;
+      case UNION:
+        List<Schema> types = new ArrayList<>();
+        for (Schema branch : s.getTypes())
+          types.add(applyAliases(branch, seen, aliases, fieldAliases));
+        result = Schema.createUnion(types);
+        break;
+      case FIXED:
+        if (aliases.containsKey(name))
+          result = Schema.createFixed(aliases.get(name).full, s.getDoc(), null,
+                                      s.getFixedSize());
+        break;
+      default:
+        // NO-OP
+    }
+    if (!result.equals(s))
       result.putAll(s);        // copy props
     return result;
   }
@@ -1677,46 +1688,52 @@ public abstract class Schema extends JsonProperties {
       }
     }
 
+    @Override
     public boolean add(E e) {
       ensureUnlocked();
       return super.add(e);
     }
 
+    @Override
     public boolean remove(Object o) {
       ensureUnlocked();
       return super.remove(o);
     }
 
+    @Override
     public E remove(int index) {
       ensureUnlocked();
       return super.remove(index);
     }
 
+    @Override
     public boolean addAll(Collection<? extends E> c) {
       ensureUnlocked();
       return super.addAll(c);
     }
 
+    @Override
     public boolean addAll(int index, Collection<? extends E> c) {
       ensureUnlocked();
       return super.addAll(index, c);
     }
 
+    @Override
     public boolean removeAll(Collection<?> c) {
       ensureUnlocked();
       return super.removeAll(c);
     }
 
+    @Override
     public boolean retainAll(Collection<?> c) {
       ensureUnlocked();
       return super.retainAll(c);
     }
 
+    @Override
     public void clear() {
       ensureUnlocked();
       super.clear();
     }
-
   }
-
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
index 471de76..cc8f5b9 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
@@ -27,12 +27,12 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import com.fasterxml.jackson.core.util.BufferRecyclers;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.util.internal.JacksonUtils;
 
-import com.fasterxml.jackson.core.util.BufferRecyclers;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.TextNode;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
index b1a499e..4b9c813 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
@@ -885,7 +885,7 @@ public class SchemaCompatibility {
     /** {@inheritDoc} */
     @Override
     public boolean equals(Object other) {
-      if ((null != other) && (other instanceof SchemaPairCompatibility)) {
+      if ((other instanceof SchemaPairCompatibility)) {
         final SchemaPairCompatibility result = (SchemaPairCompatibility) other;
         return objectsEqual(result.mResult, mResult)
             && objectsEqual(result.mReader, mReader)
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/Json.java b/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
index 09c2363..0270e03 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
@@ -58,7 +58,7 @@ public class Json {
     try {
       InputStream in = Json.class.getResourceAsStream("/org/apache/avro/data/Json.avsc");
       try {
-        SCHEMA = Schema.parse(in);
+        SCHEMA = new Schema.Parser().parse(in);
       } finally {
         in.close();
       }
@@ -113,7 +113,7 @@ public class Json {
    */
   public static Object parseJson(String s) {
     try {
-      return JacksonUtils.toObject(MAPPER.readTree(FACTORY.createJsonParser(
+      return JacksonUtils.toObject(MAPPER.readTree(FACTORY.createParser(
           new StringReader(s))));
     } catch (JsonParseException e) {
       throw new RuntimeException(e);
@@ -216,7 +216,7 @@ public class Json {
       ObjectNode object = JsonNodeFactory.instance.objectNode();
       for (long l = in.readMapStart(); l > 0; l = in.mapNext())
         for (long i = 0; i < l; i++)
-          object.put(in.readString(), read(in));
+          object.set(in.readString(), read(in));
       return object;
     default:
       throw new AvroRuntimeException("Unexpected Json node type");
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
index 6d2f4c1..626ab19 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
@@ -48,7 +48,7 @@ public abstract class RecordBuilderBase<T extends IndexedRecord>
   protected RecordBuilderBase(Schema schema, GenericData data) {
     this.schema = schema;
     this.data = data;
-    fields = (Field[]) schema.getFields().toArray(EMPTY_FIELDS);
+    fields = schema.getFields().toArray(EMPTY_FIELDS);
     fieldSetFlags = new boolean[fields.length];
   }
 
@@ -60,7 +60,7 @@ public abstract class RecordBuilderBase<T extends IndexedRecord>
   protected RecordBuilderBase(RecordBuilderBase<T> other, GenericData data) {
     this.schema = other.schema;
     this.data = data;
-    fields = (Field[]) schema.getFields().toArray(EMPTY_FIELDS);
+    fields = schema.getFields().toArray(EMPTY_FIELDS);
     fieldSetFlags = new boolean[other.fieldSetFlags.length];
     System.arraycopy(
         other.fieldSetFlags, 0, fieldSetFlags, 0, fieldSetFlags.length);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
index 5fccf9e..fb80903 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
@@ -174,7 +174,6 @@ public class DataFileReader<D>
     // if no match or EOF set start to the end position
       blockStart = sin.tell();
     //System.out.println("block start location after EOF: " + blockStart );
-      return;
   }
 
   @Override
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
index 1e5193e..efee14a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
@@ -95,7 +95,7 @@ public class DataFileReader12<D> implements FileReader<D>, Closeable {
     if (codec != null && ! codec.equals(NULL_CODEC)) {
       throw new UnknownAvroCodecException("Unknown codec: " + codec);
     }
-    this.schema = Schema.parse(getMetaString(SCHEMA));
+    this.schema = new Schema.Parser().parse(getMetaString(SCHEMA));
     this.reader = reader;
 
     reader.setSchema(schema);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
index 377d2a6..2e07108 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
@@ -122,7 +122,7 @@ public class DataFileStream<D> implements Iterator<D>, Iterable<D>, Closeable {
 
     // finalize the header
     header.metaKeyList = Collections.unmodifiableList(header.metaKeyList);
-    header.schema = Schema.parse(getMetaString(DataFileConstants.SCHEMA),false);
+    header.schema = new Schema.Parser().setValidate(false).parse(getMetaString(DataFileConstants.SCHEMA));
     this.codec = resolveCodec();
     reader.setSchema(header.schema);
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
index 19273d1..0dc9b06 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
@@ -17,6 +17,8 @@
  */
 package org.apache.avro.file;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.io.BufferedOutputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.Closeable;
@@ -26,7 +28,6 @@ import java.io.Flushable;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.HashMap;
@@ -254,7 +255,7 @@ public class DataFileWriter<D> implements Closeable, Flushable {
     try {
       MessageDigest digester = MessageDigest.getInstance("MD5");
       long time = System.currentTimeMillis();
-      digester.update((UUID.randomUUID()+"@"+time).getBytes());
+      digester.update((UUID.randomUUID()+"@"+time).getBytes(UTF_8));
       return digester.digest();
     } catch (NoSuchAlgorithmException e) {
       throw new RuntimeException(e);
@@ -268,7 +269,7 @@ public class DataFileWriter<D> implements Closeable, Flushable {
   }
 
   private DataFileWriter<D> setMetaInternal(String key, String value) {
-    return setMetaInternal(key, value.getBytes(StandardCharsets.UTF_8));
+    return setMetaInternal(key, value.getBytes(UTF_8));
   }
 
   /** Set a metadata property. */
@@ -285,7 +286,7 @@ public class DataFileWriter<D> implements Closeable, Flushable {
 
   /** Set a metadata property. */
   public DataFileWriter<D> setMeta(String key, String value) {
-    return setMeta(key, value.getBytes(StandardCharsets.UTF_8));
+    return setMeta(key, value.getBytes(UTF_8));
   }
   /** Set a metadata property. */
   public DataFileWriter<D> setMeta(String key, long value) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java b/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
index 1c58f1e..5a14148 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
@@ -20,6 +20,7 @@ package org.apache.avro.file;
 import java.io.IOException;
 import java.io.Closeable;
 import java.util.Iterator;
+import java.util.NoSuchElementException;
 
 import org.apache.avro.Schema;
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java
index 278b775..7530836 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java
@@ -23,20 +23,23 @@ import java.io.IOException;
 /** A {@link SeekableInput} backed with data in a byte array. */
 public class SeekableByteArrayInput extends ByteArrayInputStream implements SeekableInput {
 
-    public SeekableByteArrayInput(byte[] data) {
-        super(data);
-    }
+  public SeekableByteArrayInput(byte[] data) {
+      super(data);
+  }
 
-    public long length() throws IOException {
-        return this.count;
-    }
+  @Override
+  public long length() throws IOException {
+    return this.count;
+  }
 
-    public void seek(long p) throws IOException {
-        this.reset();
-        this.skip(p);
-    }
+  @Override
+  public void seek(long p) throws IOException {
+    this.reset();
+    this.skip(p);
+  }
 
-    public long tell() throws IOException {
-        return this.pos;
-    }
+  @Override
+  public long tell() throws IOException {
+    return this.pos;
+  }
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java
index 9bf68a1..50d460c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java
@@ -29,9 +29,8 @@ public class SeekableFileInput
   public SeekableFileInput(File file) throws IOException { super(file); }
   public SeekableFileInput(FileDescriptor fd) throws IOException { super(fd); }
 
-  public void seek(long p) throws IOException { getChannel().position(p); }
-  public long tell() throws IOException { return getChannel().position(); }
-  public long length() throws IOException { return getChannel().size(); }
-
+  @Override public void seek(long p) throws IOException { getChannel().position(p); }
+  @Override public long tell() throws IOException { return getChannel().position(); }
+  @Override public long length() throws IOException { return getChannel().size(); }
 }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
index e18acfb..fd117b3 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
@@ -982,7 +982,7 @@ public class GenericData {
       return e1.hasNext() ? 1 : (e2.hasNext() ? -1 : 0);
     case MAP:
       if (equals)
-        return ((Map)o1).equals(o2) ? 0 : 1;
+        return o1.equals(o2) ? 0 : 1;
       throw new AvroRuntimeException("Can't compare maps!");
     case UNION:
       int i1 = resolveUnion(s, o1);
@@ -1115,7 +1115,7 @@ public class GenericData {
         Map<CharSequence, Object> mapCopy =
           new HashMap<>(mapValue.size());
         for (Map.Entry<CharSequence, Object> entry : mapValue.entrySet()) {
-          mapCopy.put((CharSequence)(deepCopy(STRINGS, entry.getKey())),
+          mapCopy.put(deepCopy(STRINGS, entry.getKey()),
               deepCopy(schema.getValueType(), entry.getValue()));
         }
         return mapCopy;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
index b5ef33b..452522a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
@@ -548,7 +548,7 @@ public class GenericDatumReader<D> implements DatumReader<D> {
       }
       break;
     case UNION:
-      skip(schema.getTypes().get((int)in.readIndex()), in);
+      skip(schema.getTypes().get(in.readIndex()), in);
       break;
     case FIXED:
       in.skipFixed(schema.getFixedSize());
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java
index 3661f10..4c461f1 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java
@@ -21,5 +21,6 @@ package org.apache.avro.generic;
 public interface GenericEnumSymbol<E extends GenericEnumSymbol<E>>
     extends GenericContainer, Comparable<E> {
   /** Return the symbol. */
+  @Override
   String toString();
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
index 61bd958..74b6fb0 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
@@ -332,7 +332,7 @@ public class BinaryDecoder extends Decoder {
   protected void doSkipBytes(long length) throws IOException {
     int remaining = limit - pos;
     if (length <= remaining) {
-      pos += length;
+      pos = (int) (pos + length);
     } else {
       limit = pos = 0;
       length -= remaining;
@@ -645,7 +645,6 @@ public class BinaryDecoder extends Decoder {
       decoder.minPos = 0;
       decoder.limit = 0;
       this.ba = new BufferAccessor(decoder);
-      return;
     }
 
     protected void detach() {
@@ -758,7 +757,7 @@ public class BinaryDecoder extends Decoder {
       int pos = ba.getPos();
       int remaining = lim - pos;
       if (remaining > n) {
-        pos += n;
+        pos = (int) (pos + n);
         ba.setPos(pos);
         return n;
       } else {
@@ -956,9 +955,9 @@ public class BinaryDecoder extends Decoder {
       // the buffer is shared, so this should return 0
       max = ba.getLim();
       position = ba.getPos();
-      long remaining = max - position;
+      long remaining = (long) max - position;
       if (remaining >= length) {
-        position += length;
+        position = (int) (position + length);
         ba.setPos(position);
         return length;
       } else {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java b/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
index ddb3697..0f1e80a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
@@ -132,7 +132,7 @@ public class DecoderFactory {
     if (null == reuse || !reuse.getClass().equals(BinaryDecoder.class)) {
       return new BinaryDecoder(in, binaryDecoderBufferSize);
     } else {
-      return ((BinaryDecoder)reuse).configure(in, binaryDecoderBufferSize);
+      return reuse.configure(in, binaryDecoderBufferSize);
     }
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
index 26a7b1e..4804fe0 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
@@ -108,7 +108,7 @@ public class JsonDecoder extends ParsingDecoder
     parser.reset();
     reorderBuffers.clear();
     currentReorderBuffer = null;
-    this.in = jsonFactory.createJsonParser(in);
+    this.in = jsonFactory.createParser(in);
     this.in.nextToken();
     return this;
   }
@@ -132,7 +132,7 @@ public class JsonDecoder extends ParsingDecoder
     parser.reset();
     reorderBuffers.clear();
     currentReorderBuffer = null;
-    this.in = new JsonFactory().createJsonParser(in);
+    this.in = new JsonFactory().createParser(in);
     this.in.nextToken();
     return this;
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
index 44657f0..319a8c7 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
@@ -81,10 +81,10 @@ public class JsonEncoder extends ParsingEncoder implements Parser.ActionHandler
     if (null == out)
       throw new NullPointerException("OutputStream cannot be null");
     JsonGenerator g
-      = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
+      = new JsonFactory().createGenerator(out, JsonEncoding.UTF8);
     if (pretty) {
       DefaultPrettyPrinter pp = new DefaultPrettyPrinter() {
-        //@Override
+        @Override
         public void writeRootValueSeparator(JsonGenerator jg)
             throws IOException
         {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
index 45ff922..62fc55e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
@@ -183,7 +183,7 @@ public class ResolvingDecoder extends ValidatingDecoder {
       return (float) in.readLong();
     } else {
       assert actual == Symbol.FLOAT;
-      return (float) in.readFloat();
+      return in.readFloat();
     }
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java
index 44fc19b..48e8252 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java
@@ -33,6 +33,7 @@ public class JsonGrammarGenerator extends ValidatingGrammarGenerator {
    * Returns the non-terminal that is the start symbol
    * for the grammar for the grammar for the given schema <tt>sc</tt>.
    */
+  @Override
   public Symbol generate(Schema schema) {
     return Symbol.root(generate(schema, new HashMap<>()));
   }
@@ -47,6 +48,7 @@ public class JsonGrammarGenerator extends ValidatingGrammarGenerator {
    * @param seen  A map of schema to symbol mapping done so far.
    * @return      The start symbol for the schema
    */
+  @Override
   public Symbol generate(Schema sc, Map<LitS, Symbol> seen) {
     switch (sc.getType()) {
     case NULL:
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
index 61073dc..41d8cc4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
@@ -612,18 +612,23 @@ public class ResolvingGrammarGenerator extends ValidatingGrammarGenerator {
    * from those put in by {@link ValidatingGrammarGenerator#resolving resolving()}.
    */
    static class LitS2 extends ValidatingGrammarGenerator.LitS {
-     public Schema expected;
-     public LitS2(Schema actual, Schema expected) {
+      public Schema expected;
+
+      public LitS2(Schema actual, Schema expected) {
        super(actual);
        this.expected = expected;
-     }
-     public boolean equals(Object o) {
+      }
+
+      @Override
+      public boolean equals(Object o) {
        if (! (o instanceof LitS2)) return false;
        LitS2 other = (LitS2) o;
        return actual == other.actual && expected == other.expected;
-     }
-     public int hashCode() {
-       return super.hashCode() + expected.hashCode();
-     }
-   }
+      }
+
+      @Override
+      public int hashCode() {
+         return super.hashCode() + expected.hashCode();
+      }
+  }
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
index df4eade..151cf1b 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
@@ -264,7 +264,7 @@ public abstract class Symbol {
       super(Kind.TERMINAL);
       this.printName = printName;
     }
-    public String toString() { return printName; }
+    @Override public String toString() { return printName; }
   }
 
   public static class ImplicitAction extends Symbol {
@@ -313,14 +313,17 @@ public abstract class Symbol {
       return production.length;
     }
 
+    @Override
     public Iterator<Symbol> iterator() {
       return new Iterator<Symbol>() {
         private int pos = production.length;
 
+        @Override
         public boolean hasNext() {
           return 0 < pos;
         }
 
+        @Override
         public Symbol next() {
           if (0 < pos) {
             return production[--pos];
@@ -329,6 +332,7 @@ public abstract class Symbol {
           }
         }
 
+        @Override
         public void remove() {
           throw new UnsupportedOperationException();
         }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
index 6e1a55e..d797a4a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
@@ -124,11 +124,13 @@ public class ValidatingGrammarGenerator {
      * Two LitS are equal if and only if their underlying schema is
      * the same (not merely equal).
      */
+    @Override
     public boolean equals(Object o) {
       if (! (o instanceof LitS)) return false;
-      return actual == ((LitS)o).actual;
+      return actual.equals(((LitS)o).actual);
     }
 
+    @Override
     public int hashCode() {
       return actual.hashCode();
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java
index 2968538..420f1e1 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java
@@ -43,7 +43,7 @@ public class DateAsLongEncoding extends CustomEncoding<Date> {
 
   @Override
   protected final Date read(Object reuse, Decoder in) throws IOException {
-    if (reuse != null && reuse instanceof Date) {
+    if (reuse instanceof Date) {
       ((Date)reuse).setTime(in.readLong());
       return (Date)reuse;
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
index bf20f44..f8cfcfa 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
@@ -31,14 +31,14 @@ class FieldAccessReflect extends FieldAccess {
     AvroEncode enc = field.getAnnotation(AvroEncode.class);
     if (enc != null)
       try {
-        return new ReflectionBasesAccessorCustomEncoded(field, enc.using().newInstance());
+        return new ReflectionBasesAccessorCustomEncoded(field, enc.using().getDeclaredConstructor().newInstance());
       } catch (Exception e) {
         throw new AvroRuntimeException("Could not instantiate custom Encoding");
       }
     return new ReflectionBasedAccessor(field);
   }
 
-  private class ReflectionBasedAccessor extends FieldAccessor {
+  private static class ReflectionBasedAccessor extends FieldAccessor {
     protected final Field field;
     private boolean isStringable;
     private boolean isCustomEncoded;
@@ -82,7 +82,7 @@ class FieldAccessReflect extends FieldAccess {
     }
   }
 
-  private final class ReflectionBasesAccessorCustomEncoded extends ReflectionBasedAccessor {
+  private static final class ReflectionBasesAccessorCustomEncoded extends ReflectionBasedAccessor {
 
     private CustomEncoding<?> encoding;
 
@@ -109,7 +109,7 @@ class FieldAccessReflect extends FieldAccess {
       }
     }
 
-    protected boolean isCustomEncoded() {
+    @Override protected boolean isCustomEncoded() {
       return true;
     }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
index 0a7d09a..874d11d 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
@@ -47,7 +47,7 @@ class FieldAccessUnsafe extends FieldAccess {
     AvroEncode enc = field.getAnnotation(AvroEncode.class);
     if (enc != null)
       try {
-        return new UnsafeCustomEncodedField(field, enc.using().newInstance() );
+        return new UnsafeCustomEncodedField(field, enc.using().getDeclaredConstructor().newInstance() );
       } catch (Exception e) {
         throw new AvroRuntimeException("Could not instantiate custom Encoding");
       }
@@ -358,6 +358,7 @@ class FieldAccessUnsafe extends FieldAccess {
       encoding.write(UNSAFE.getObject(object, offset), out);
     }
 
+    @Override
     protected boolean isCustomEncoded() {
       return true;
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
index 864b84e..5c7728c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
@@ -345,7 +345,7 @@ public class ReflectData extends SpecificData {
     return c;
   }
 
-  private static final Class BYTES_CLASS = new byte[0].getClass();
+  private static final Class BYTES_CLASS = byte[].class;
   private static final IdentityHashMap<Class, Class> ARRAY_CLASSES;
   static {
     ARRAY_CLASSES = new IdentityHashMap<>();
@@ -363,6 +363,7 @@ public class ReflectData extends SpecificData {
    * It returns false for non-string-maps because Avro writes out such maps
    * as an array of records. Even their JSON representation is an array.
    */
+  @Override
   protected boolean isMap(Object datum) {
     return (datum instanceof Map) && !isNonStringMap(datum);
   }
@@ -555,7 +556,7 @@ public class ReflectData extends SpecificData {
       }
       AvroSchema explicit = c.getAnnotation(AvroSchema.class);
       if (explicit != null)                                  // explicit schema
-        return Schema.parse(explicit.value());
+        return new Schema.Parser().parse(explicit.value());
       if (CharSequence.class.isAssignableFrom(c))            // String
         return Schema.create(Schema.Type.STRING);
       if (ByteBuffer.class.isAssignableFrom(c))              // bytes
@@ -736,14 +737,14 @@ public class ReflectData extends SpecificData {
     AvroEncode enc = field.getAnnotation(AvroEncode.class);
     if (enc != null)
       try {
-          return enc.using().newInstance().getSchema();
+          return enc.using().getDeclaredConstructor().newInstance().getSchema();
       } catch (Exception e) {
           throw new AvroRuntimeException("Could not create schema from custom serializer for " + field.getName());
       }
 
     AvroSchema explicit = field.getAnnotation(AvroSchema.class);
     if (explicit != null)                                   // explicit schema
-      return Schema.parse(explicit.value());
+      return new Schema.Parser().parse(explicit.value());
 
     Union union = field.getAnnotation(Union.class);
     if (union != null)
@@ -779,8 +780,7 @@ public class ReflectData extends SpecificData {
       }
 
     // reverse types, since they were defined in reference order
-    List<Schema> types = new ArrayList<>();
-    types.addAll(names.values());
+    List<Schema> types = new ArrayList<>(names.values());
     Collections.reverse(types);
     protocol.setTypes(types);
 
@@ -807,7 +807,7 @@ public class ReflectData extends SpecificData {
       for (int j = 0; j < annotations[i].length; j++) {
         Annotation annotation = annotations[i][j];
         if (annotation instanceof AvroSchema)     // explicit schema
-          paramSchema = Schema.parse(((AvroSchema)annotation).value());
+          paramSchema = new Schema.Parser().parse(((AvroSchema)annotation).value());
         else if (annotation instanceof Union)     // union
           paramSchema = getAnnotatedUnion(((Union)annotation), names);
         else if (annotation instanceof Nullable)  // nullable
@@ -830,7 +830,7 @@ public class ReflectData extends SpecificData {
 
     AvroSchema explicit = method.getAnnotation(AvroSchema.class);
     if (explicit != null)                         // explicit schema
-      response = Schema.parse(explicit.value());
+      response = new Schema.Parser().parse(explicit.value());
 
     List<Schema> errs = new ArrayList<>();
     errs.add(Protocol.SYSTEM_ERROR);              // every method can throw
@@ -838,7 +838,8 @@ public class ReflectData extends SpecificData {
       if (err != AvroRemoteException.class)
         errs.add(getSchema(err, names));
     Schema errors = Schema.createUnion(errs);
-    return protocol.createMessage(method.getName(), null /* doc */, request, response, errors);
+    return protocol.createMessage(method.getName(), null /* doc */,
+      new LinkedHashMap<String,String>() /* propMap */, request, response, errors);
   }
 
   private Schema getSchema(Type type, Map<String,Schema> names) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
index 1bfc957..345e0d1 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
@@ -256,7 +256,7 @@ public class ReflectDatumReader<T> extends SpecificDatumReader<T> {
     else if (Short.class.getName().equals(intClass))
       value = ((Integer)value).shortValue();
     else if (Character.class.getName().equals(intClass))
-        value = ((Character)(char)(int)(Integer)value);
+        value = (char)(int)(Integer)value;
     return value;
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
index f540bfd..bb4550c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
@@ -68,7 +68,7 @@ class ReflectionUtil {
 
   private static <T> T load(String name, Class<T> type) throws Exception {
     return ReflectionUtil.class.getClassLoader().loadClass(name)
-        .asSubclass(type).newInstance();
+        .asSubclass(type).getDeclaredConstructor().newInstance();
   }
 
   public static FieldAccess getFieldAccess() {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
index ff01177..3b83fe4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
@@ -373,7 +373,7 @@ public class SpecificData extends GenericData {
 
           if (!fullName.equals(getClassName(schema)))
             // HACK: schema mismatches class. maven shade plugin? try replacing.
-            schema = Schema.parse
+            schema = new Schema.Parser().parse
               (schema.toString().replace(schema.getNamespace(),
                                          c.getPackage().getName()));
         } catch (NoSuchFieldException e) {
@@ -426,11 +426,11 @@ public class SpecificData extends GenericData {
   @Override
   protected int compare(Object o1, Object o2, Schema s, boolean eq) {
     switch (s.getType()) {
-    case ENUM:
-      if (o1 instanceof Enum)
-        return ((Enum)o1).ordinal() - ((Enum)o2).ordinal();
-    default:
-      return super.compare(o1, o2, s, eq);
+      case ENUM:
+        if (o1 instanceof Enum)
+          return ((Enum)o1).ordinal() - ((Enum)o2).ordinal();
+      default:
+        return super.compare(o1, o2, s, eq);
     }
   }
 
@@ -442,8 +442,8 @@ public class SpecificData extends GenericData {
     boolean useSchema = SchemaConstructable.class.isAssignableFrom(c);
     Object result;
     try {
-      Constructor meth = (Constructor)CTOR_CACHE.get(c);
-      result = meth.newInstance(useSchema ? new Object[]{s} : (Object[])null);
+      Constructor meth = CTOR_CACHE.get(c);
+      result = meth.newInstance(useSchema ? new Object[]{s} : null);
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
index d5976a3..8c6da01 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
@@ -46,10 +46,11 @@ public abstract class SpecificExceptionBase extends AvroRemoteException
     super(value, cause);
   }
 
-  public abstract Schema getSchema();
-  public abstract Object get(int field);
-  public abstract void put(int field, Object value);
+  @Override public abstract Schema getSchema();
+  @Override public abstract Object get(int field);
+  @Override public abstract void put(int field, Object value);
 
+  @Override
   public boolean equals(Object that) {
     if (that == this) return true;                        // identical object
     if (!(that instanceof SpecificExceptionBase)) return false; // not a record
@@ -57,6 +58,7 @@ public abstract class SpecificExceptionBase extends AvroRemoteException
     return SpecificData.get().compare(this, that, this.getSchema()) == 0;
   }
 
+  @Override
   public int hashCode() {
     return SpecificData.get().hashCode(this, this.getSchema());
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
index ac003ba..595c518 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
@@ -33,9 +33,9 @@ public abstract class SpecificRecordBase
   implements SpecificRecord, Comparable<SpecificRecord>, GenericRecord,
              Externalizable {
 
-  public abstract Schema getSchema();
-  public abstract Object get(int field);
-  public abstract void put(int field, Object value);
+  @Override public abstract Schema getSchema();
+  @Override public abstract Object get(int field);
+  @Override public abstract void put(int field, Object value);
 
   public SpecificData getSpecificData() {
     // Default implementation for backwards compatibility, overridden in generated code
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
index 53b9ea8..1fb96e0 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
@@ -22,7 +22,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
-import java.util.LinkedList;
+import java.util.ArrayList;
 import java.util.List;
 
 /** Utility to collect data written to an {@link OutputStream} in {@link
@@ -61,7 +61,7 @@ public class ByteBufferOutputStream extends OutputStream {
   }
 
   public void reset() {
-    buffers = new LinkedList<>();
+    buffers = new ArrayList<>();
     buffers.add(ByteBuffer.allocate(BUFFER_SIZE));
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/RandomData.java b/lang/java/avro/src/main/java/org/apache/avro/util/RandomData.java
index b8c7c04..3f8d196 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/RandomData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/RandomData.java
@@ -62,16 +62,17 @@ public class RandomData implements Iterable<Object> {
     this.utf8ForString = utf8ForString;
   }
 
+  @Override
   public Iterator<Object> iterator() {
     return new Iterator<Object>() {
       private int n;
       private Random random = new Random(seed);
-      public boolean hasNext() { return n < count; }
-      public Object next() {
+      @Override public boolean hasNext() { return n < count; }
+      @Override public Object next() {
         n++;
         return generate(root, random, 0);
       }
-      public void remove() { throw new UnsupportedOperationException(); }
+      @Override public void remove() { throw new UnsupportedOperationException(); }
     };
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java b/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
index 8b1f9cb..5f899a4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
@@ -75,6 +75,7 @@ public class Utf8 implements Comparable<Utf8>, CharSequence {
 
   /** Return length in bytes.
    * @deprecated call {@link #getByteLength()} instead. */
+  @Deprecated
   public int getLength() { return length; }
 
   /** Return length in bytes. */
@@ -83,6 +84,7 @@ public class Utf8 implements Comparable<Utf8>, CharSequence {
   /** Set length in bytes.  Should called whenever byte content changes, even
    * if the length does not change, as this also clears the cached String.
    * @deprecated call {@link #setByteLength(int)} instead. */
+  @Deprecated
   public Utf8 setLength(int newLength) {
     return setByteLength(newLength);
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
index ce1f837..8e121e4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
@@ -49,21 +49,25 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
 
   public WeakIdentityHashMap() {}
 
+  @Override
   public void clear() {
     backingStore.clear();
     reap();
   }
 
+  @Override
   public boolean containsKey(Object key) {
     reap();
     return backingStore.containsKey(new IdentityWeakReference(key));
   }
 
+  @Override
   public boolean containsValue(Object value)  {
     reap();
     return backingStore.containsValue(value);
   }
 
+  @Override
   public Set<Map.Entry<K, V>> entrySet() {
     reap();
     Set<Map.Entry<K, V>> ret = new HashSet<>();
@@ -71,12 +75,15 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
       final K key = ref.getKey().get();
       final V value = ref.getValue();
       Map.Entry<K, V> entry = new Map.Entry<K, V>() {
+        @Override
         public K getKey() {
           return key;
         }
+        @Override
         public V getValue() {
           return value;
         }
+        @Override
         public V setValue(V value) {
           throw new UnsupportedOperationException();
         }
@@ -86,6 +93,7 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
     return Collections.unmodifiableSet(ret);
   }
 
+  @Override
   public Set<K> keySet() {
     reap();
     Set<K> ret = new HashSet<>();
@@ -95,6 +103,7 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
     return Collections.unmodifiableSet(ret);
   }
 
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof WeakIdentityHashMap)) {
       return false;
@@ -102,34 +111,48 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
     return backingStore.equals(((WeakIdentityHashMap)o).backingStore);
   }
 
+  @Override
   public V get(Object key) {
     reap();
     return backingStore.get(new IdentityWeakReference(key));
   }
+
+  @Override
   public V put(K key, V value) {
     reap();
     return backingStore.put(new IdentityWeakReference(key), value);
   }
 
+  @Override
   public int hashCode() {
     reap();
     return backingStore.hashCode();
   }
+
+  @Override
   public boolean isEmpty() {
     reap();
     return backingStore.isEmpty();
   }
+
+  @Override
   public void putAll(Map t) {
     throw new UnsupportedOperationException();
   }
+
+  @Override
   public V remove(Object key) {
     reap();
     return backingStore.remove(new IdentityWeakReference(key));
   }
+
+  @Override
   public int size() {
     reap();
     return backingStore.size();
   }
+
+  @Override
   public Collection<V> values() {
     reap();
     return backingStore.values();
@@ -154,10 +177,12 @@ public class WeakIdentityHashMap<K, V> implements Map<K, V> {
       hash = System.identityHashCode(obj);
     }
 
+    @Override
     public int hashCode() {
       return hash;
     }
 
+    @Override
     public boolean equals(Object o) {
       if (this == o) {
         return true;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java b/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
index 6e48702..83280c2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
@@ -106,8 +106,10 @@ public class FooBarSpecificRecord extends org.apache.avro.specific.SpecificRecor
     this.typeEnum = typeEnum;
   }
 
+  @Override
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
   // Used by DatumWriter.  Applications should not call.
+  @Override
   public java.lang.Object get(int field$) {
     switch (field$) {
     case 0: return id;
@@ -121,6 +123,7 @@ public class FooBarSpecificRecord extends org.apache.avro.specific.SpecificRecor
 
   // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
+  @Override
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
     case 0: id = (java.lang.Integer)value$; break;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java b/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
index 5a9a572..25545f4 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
@@ -62,7 +62,7 @@ public class GenerateBlockingData {
       System.exit(-1);
     }
 
-    Schema sch = Schema.parse(new File(args[0]));
+    Schema sch = new Schema.Parser().parse(new File(args[0]));
     File outputFile = new File(args[1]);
     int numObjects = Integer.parseInt(args[2]);
 
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
index d42548a..de74ce9 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
@@ -19,6 +19,8 @@ package org.apache.avro;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -27,7 +29,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import junit.framework.Assert;
 import org.apache.avro.file.*;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumReader;
@@ -101,7 +102,7 @@ public class TestDataFile {
     testFSync(true);
   }
 
-  public void testGenericWrite() throws IOException {
+  private void testGenericWrite() throws IOException {
     DataFileWriter<Object> writer =
       new DataFileWriter<>(new GenericDatumWriter<>())
       .setSyncInterval(100);
@@ -127,7 +128,7 @@ public class TestDataFile {
           } catch (DataFileWriter.AppendWriteException e) {
             System.out.println("Ignoring: "+e);
           }
-          Assert.assertTrue("failed to throw when expected", threwProperly);
+          assertTrue("failed to throw when expected", threwProperly);
         }
       }
     } finally {
@@ -143,10 +144,10 @@ public class TestDataFile {
       doubleCloseEx = e;
     }
 
-    Assert.assertNull("Double close() threw an unexpected exception", doubleCloseEx);
+    assertNull("Double close() threw an unexpected exception", doubleCloseEx);
   }
 
-  public void testGenericRead() throws IOException {
+  private void testGenericRead() throws IOException {
     DataFileReader<Object> reader =
       new DataFileReader<>(makeFile(), new GenericDatumReader<>());
     try {
@@ -166,7 +167,7 @@ public class TestDataFile {
     }
   }
 
-  public void testSplits() throws IOException {
+  private void testSplits() throws IOException {
     File file = makeFile();
     DataFileReader<Object> reader =
       new DataFileReader<>(file, new GenericDatumReader<>());
@@ -193,7 +194,7 @@ public class TestDataFile {
     }
   }
 
-  public void testSyncDiscovery() throws IOException {
+  private void testSyncDiscovery() throws IOException {
     File file = makeFile();
     DataFileReader<Object> reader =
       new DataFileReader<>(file, new GenericDatumReader<>());
@@ -210,7 +211,7 @@ public class TestDataFile {
       }
       // confirm that the first point is the one reached by sync(0)
       reader.sync(0);
-      assertEquals((long)reader.previousSync(), (long)syncs.get(0));
+      assertEquals(reader.previousSync(), (long)syncs.get(0));
       // and confirm that all points are reachable
       for (Long sync : syncs) {
         reader.seek(sync);
@@ -221,7 +222,7 @@ public class TestDataFile {
     }
   }
 
-  public void testGenericAppend() throws IOException {
+  private void testGenericAppend() throws IOException {
     File file = makeFile();
     long start = file.length();
     DataFileWriter<Object> writer =
@@ -254,7 +255,7 @@ public class TestDataFile {
     }
   }
 
-  public void testReadWithHeader() throws IOException {
+  private void testReadWithHeader() throws IOException {
     File file = makeFile();
     DataFileReader<Object> reader =
       new DataFileReader<>(file, new GenericDatumReader<>());
@@ -317,7 +318,7 @@ public class TestDataFile {
     // accurately do is that each sync did not lead to a flush and that the
     // file was flushed at least as many times as we called flush. Generally
     // noticed that out.flushCount is almost always 24 though.
-    Assert.assertTrue(out.flushCount < currentCount &&
+    assertTrue(out.flushCount < currentCount &&
       out.flushCount >= flushCounter);
   }
 
@@ -349,7 +350,7 @@ public class TestDataFile {
         }
       }
       System.out.println("Total number of syncs: " + out.syncCount);
-      Assert.assertEquals(syncCounter, out.syncCount);
+      assertEquals(syncCounter, out.syncCount);
     } finally {
       writer.close();
     }
@@ -368,7 +369,7 @@ public class TestDataFile {
     File input = new File(args[0]);
     Schema projection = null;
     if (args.length > 1)
-      projection = Schema.parse(new File(args[1]));
+      projection = new Schema.Parser().parse(new File(args[1]));
     TestDataFile.readFile(input, new GenericDatumReader<>(null, projection));
     long start = System.currentTimeMillis();
     for (int i = 0; i < 4; i++)
@@ -376,7 +377,7 @@ public class TestDataFile {
     System.out.println("Time: "+(System.currentTimeMillis()-start));
   }
 
-  private class TestingByteArrayOutputStream extends ByteArrayOutputStream
+  private static class TestingByteArrayOutputStream extends ByteArrayOutputStream
     implements Syncable {
     private int flushCount = 0;
     private int syncCount = 0;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
index 81d6d86..7c34512 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
@@ -17,6 +17,7 @@
  */
 package org.apache.avro;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
@@ -55,7 +56,7 @@ public class TestDataFileCustomSync {
     try {
       MessageDigest digester = MessageDigest.getInstance("MD5");
       long time = System.currentTimeMillis();
-      digester.update((UUID.randomUUID()+"@"+time).getBytes());
+      digester.update((UUID.randomUUID()+"@"+time).getBytes(UTF_8));
       return digester.digest();
     } catch (NoSuchAlgorithmException e) {
       throw new RuntimeException(e);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
index bebf850..4977130 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
@@ -25,7 +25,6 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 
-import junit.framework.Assert;
 import org.apache.avro.Schema.Type;
 import org.apache.avro.file.DataFileStream;
 import org.apache.avro.file.DataFileWriter;
@@ -79,6 +78,6 @@ public class TestDataFileMeta {
         exceptions++;
       }
     }
-    Assert.assertEquals(33, exceptions);
+    assertEquals(33, exceptions);
   }
 }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java b/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java
index 9800512..5b1afdf 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java
@@ -27,6 +27,7 @@ import org.junit.Test;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.junit.Assert.assertThat;
 
@@ -63,7 +64,7 @@ public class TestNestedRecords {
             "}";
 
 
-    final ByteArrayInputStream inputStream = new ByteArrayInputStream(inputAsExpected.getBytes());
+    final ByteArrayInputStream inputStream = new ByteArrayInputStream(inputAsExpected.getBytes(UTF_8));
 
     final JsonDecoder decoder = DecoderFactory.get().jsonDecoder(parent, inputStream);
     final DatumReader<Object> reader = new GenericDatumReader<Object>(parent);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java
index 4953a10..0f7e909 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertEquals;
 import java.io.ByteArrayOutputStream;
 import java.util.*;
 
-import org.apache.avro.SchemaCompatibility.*;
-import org.apache.avro.TestSchemas.ReaderWriter;
 import org.apache.avro.generic.GenericData.EnumSymbol;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericDatumWriter;
@@ -508,10 +506,9 @@ public class TestSchemaCompatibility {
   }
 
   Deque<String> asDeqeue(String... args) {
-    Deque<String> dq = new ArrayDeque<>();
     List<String> x = Arrays.asList(args);
     Collections.reverse(x);
-    dq.addAll(x);
+    Deque<String> dq = new ArrayDeque<>(x);
     return dq;
   }
 }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityFixedSizeMismatch.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityFixedSizeMismatch.java
index 3e22086..e233ad2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityFixedSizeMismatch.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityFixedSizeMismatch.java
@@ -21,6 +21,8 @@ import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchema
 import static org.apache.avro.TestSchemas.*;
 
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
@@ -40,11 +42,7 @@ public class TestSchemaCompatibilityFixedSizeMismatch {
         { FIXED_8_BYTES, FIXED_4_BYTES, "expected: 4, found: 8", "/size" },
         { A_DINT_B_DFIXED_8_BYTES_RECORD1, A_DINT_B_DFIXED_4_BYTES_RECORD1, "expected: 4, found: 8", "/fields/1/type/size" },
         { A_DINT_B_DFIXED_4_BYTES_RECORD1, A_DINT_B_DFIXED_8_BYTES_RECORD1, "expected: 8, found: 4", "/fields/1/type/size" }, };
-    List<Object[]> list = new ArrayList<>(fields.length);
-    for (Object[] schemas : fields) {
-      list.add(schemas);
-    }
-    return list;
+    return Arrays.asList(fields);
   }
 
   @Parameter(0)
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingEnumSymbols.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingEnumSymbols.java
index 02ddb18..e1eaeb9 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingEnumSymbols.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingEnumSymbols.java
@@ -21,6 +21,8 @@ import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchema
 import static org.apache.avro.TestSchemas.*;
 
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
@@ -45,11 +47,7 @@ public class TestSchemaCompatibilityMissingEnumSymbols {
     Object[][] fields = { //
         { ENUM1_AB_SCHEMA, ENUM1_ABC_SCHEMA, "[C]", "/symbols" }, { ENUM1_BC_SCHEMA, ENUM1_ABC_SCHEMA, "[A]", "/symbols" },
         { RECORD1_WITH_ENUM_AB, RECORD1_WITH_ENUM_ABC, "[C]", "/fields/0/type/symbols" } };
-    List<Object[]> list = new ArrayList<>(fields.length);
-    for (Object[] schemas : fields) {
-      list.add(schemas);
-    }
-    return list;
+    return Arrays.asList(fields);
   }
 
   @Parameter(0)
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingUnionBranch.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingUnionBranch.java
index 87e7cdf..1607bd3 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingUnionBranch.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingUnionBranch.java
@@ -22,6 +22,7 @@ import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchema
 import static org.apache.avro.TestSchemas.*;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
@@ -81,11 +82,7 @@ public class TestSchemaCompatibilityMissingUnionBranch {
               asList("/1", "/2", "/3") },
         { A_DINT_B_DINT_UNION_RECORD1, A_DINT_B_DINT_STRING_UNION_RECORD1,
               asList("reader union lacking writer type: STRING"), asList("/fields/1/type/1") } };
-    List<Object[]> list = new ArrayList<>(fields.length);
-    for (Object[] schemas : fields) {
-      list.add(schemas);
-    }
-    return list;
+    return Arrays.asList(fields);
   }
 
   @Parameter(0)
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityNameMismatch.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityNameMismatch.java
index 961192e..66f486b 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityNameMismatch.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityNameMismatch.java
@@ -21,6 +21,7 @@ import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchema
 import static org.apache.avro.TestSchemas.*;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
@@ -48,11 +49,7 @@ public class TestSchemaCompatibilityNameMismatch {
         { FIXED_4_BYTES, FIXED_4_ANOTHER_NAME, "expected: AnotherName", "/name" }, { FIXED_4_NAMESPACE_V1,
             FIXED_4_NAMESPACE_V2, "expected: org.apache.avro.tests.v_2_0.Fixed", "/name" },
         { A_DINT_B_DENUM_1_RECORD1, A_DINT_B_DENUM_2_RECORD1, "expected: Enum2", "/fields/1/type/name" } };
-    List<Object[]> list = new ArrayList<>(fields.length);
-    for (Object[] schemas : fields) {
-      list.add(schemas);
-    }
-    return list;
+    return Arrays.asList(fields);
   }
 
   @Parameter(0)
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityReaderFieldMissingDefaultValue.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityReaderFieldMissingDefaultValue.java
index 3c61ee1..3256a88 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityReaderFieldMissingDefaultValue.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityReaderFieldMissingDefaultValue.java
@@ -21,6 +21,7 @@ import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchema
 import static org.apache.avro.TestSchemas.*;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
@@ -36,11 +37,7 @@ public class TestSchemaCompatibilityReaderFieldMissingDefaultValue {
   public static Iterable<Object[]> data() {
     Object[][] fields = { //
         { A_INT_RECORD1, EMPTY_RECORD1, "a", "/fields/0" }, { A_INT_B_DINT_RECORD1, EMPTY_RECORD1, "a", "/fields/0" } };
-    List<Object[]> list = new ArrayList<>(fields.length);
-    for (Object[] schemas : fields) {
-      list.add(schemas);
-    }
-    return list;
+    return Arrays.asList(fields);
   }
 
   @Parameter(0)
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityTypeMismatch.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityTypeMismatch.java
index 0cc4cb2..187f8c2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityTypeMismatch.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityTypeMismatch.java
@@ -21,6 +21,7 @@ import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchema
 import static org.apache.avro.TestSchemas.*;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
@@ -91,11 +92,7 @@ public class TestSchemaCompatibilityTypeMismatch {
 
         { NULL_SCHEMA, INT_SCHEMA, "reader type: NULL not compatible with writer type: INT", "/" }
     };
-    List<Object[]> list = new ArrayList<>(fields.length);
-    for (Object[] schemas : fields) {
-      list.add(schemas);
-    }
-    return list;
+    return Arrays.asList(fields);
   }
 
   @Parameter(0)
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
index 8c7f6c0..06ba722 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
@@ -17,6 +17,7 @@
  */
 package org.apache.avro;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
@@ -24,6 +25,8 @@ import java.io.BufferedReader;
 import java.io.FileReader;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Formatter;
 import java.util.List;
@@ -49,7 +52,7 @@ public class TestSchemaNormalization {
     { return CaseFinder.find(data(), "canonical", new ArrayList<>()); }
 
     @Test public void testCanonicalization() throws Exception {
-      assertEquals(SchemaNormalization.toParsingForm(Schema.parse(input)),
+      assertEquals(SchemaNormalization.toParsingForm(new Schema.Parser().parse(input)),
                    expectedOutput);
     }
   }
@@ -63,7 +66,7 @@ public class TestSchemaNormalization {
     { return CaseFinder.find(data(),"fingerprint", new ArrayList<>()); }
 
     @Test public void testCanonicalization() throws Exception {
-      Schema s = Schema.parse(input);
+      Schema s = new Schema.Parser().parse(input);
       long carefulFP = altFingerprint(SchemaNormalization.toParsingForm(s));
       assertEquals(carefulFP, Long.parseLong(expectedOutput));
       assertEqHex(carefulFP, SchemaNormalization.parsingFingerprint64(s));
@@ -82,7 +85,7 @@ public class TestSchemaNormalization {
     @Test public void testCanonicalization() throws Exception {
       Locale originalDefaultLocale = Locale.getDefault();
       Locale.setDefault(Locale.forLanguageTag("tr"));
-      Schema s = Schema.parse(input);
+      Schema s = new Schema.Parser().parse(input);
       long carefulFP = altFingerprint(SchemaNormalization.toParsingForm(s));
       assertEquals(carefulFP, Long.parseLong(expectedOutput));
       assertEqHex(carefulFP, SchemaNormalization.parsingFingerprint64(s));
@@ -95,7 +98,7 @@ public class TestSchemaNormalization {
      + "/test/data/schema-tests.txt");
 
   private static BufferedReader data() throws IOException
-  { return new BufferedReader(new FileReader(DATA_FILE)); }
+  { return Files.newBufferedReader(Paths.get(DATA_FILE), UTF_8); }
 
   /** Compute the fingerprint of <i>bytes[s,s+l)</i> using a slow
       algorithm that's an alternative to that implemented in {@link
@@ -109,7 +112,7 @@ public class TestSchemaNormalization {
     // randomness for small inputs
 
     long tmp = altExtend(SchemaNormalization.EMPTY64, 64, ONE,
-        s.getBytes(StandardCharsets.UTF_8));
+        s.getBytes(UTF_8));
     return altExtend(SchemaNormalization.EMPTY64, 64, tmp, POSTFIX);
   }
 
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestUnionSelfReference.java b/lang/java/avro/src/test/java/org/apache/avro/TestUnionSelfReference.java
index f88ac4a..f850b00 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestUnionSelfReference.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestUnionSelfReference.java
@@ -29,7 +29,7 @@ import org.slf4j.Logger;
 public class TestUnionSelfReference {
   /** The logger for TestUnionSelfReference */
   @SuppressWarnings("unused")
-  private static final Logger LOGGER = LoggerFactory.getLogger(TestUnionSelfReference.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestUnionSelfReference.class);
   
   private static final String SIMPLE_BINARY_TREE =
 	  "{"
@@ -91,7 +91,7 @@ public class TestUnionSelfReference {
 	  +"    ]"
 	  +"  }";
   @Test
-  public void testSelfReferenceInUnion(){ 
+  public void testSelfReferenceInUnion() {
      Schema schema = new Schema.Parser().parse(SIMPLE_BINARY_TREE);
      Field leftField = schema.getField("left");
      assertEquals(JsonProperties.NULL_VALUE,leftField.defaultVal());
@@ -109,7 +109,7 @@ public class TestUnionSelfReference {
   }
   
   @Test
-  public void testSelfReferenceInThreeUnion(){ 
+  public void testSelfReferenceInThreeUnion() {
      Schema schema = new Schema.Parser().parse(THREE_TYPE_UNION);
      Field leftField = schema.getField("left");
      assertEquals(JsonProperties.NULL_VALUE,leftField.defaultVal());
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java b/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java
index 77c01ad..db73311 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java
@@ -19,6 +19,7 @@
 package org.apache.avro.file;
 
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 import java.io.IOException;
@@ -37,8 +38,8 @@ public class TestCustomCodec {
     assertFalse(customCodec.equals(snappyCodec));
 
     String testString = "Testing 123";
-    ByteBuffer original = ByteBuffer.allocate(testString.getBytes().length);
-    original.put(testString.getBytes());
+    ByteBuffer original = ByteBuffer.allocate(testString.getBytes(UTF_8).length);
+    original.put(testString.getBytes(UTF_8));
     original.rewind();
     ByteBuffer decompressed = null;
     try {
@@ -49,7 +50,7 @@ public class TestCustomCodec {
       e.printStackTrace();
     }
 
-    assertEquals(testString, new String(decompressed.array()));
+    assertEquals(testString, new String(decompressed.array(), UTF_8));
 
   }
 
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java b/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java
index 2a0fbf1..dd52e4e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java
@@ -18,6 +18,8 @@
 
 package org.apache.avro.file.codec;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
@@ -57,8 +59,8 @@ public class CustomCodec extends Codec {
     if (this == other)
       return true;
     if (other instanceof Codec) {
-      ByteBuffer original = ByteBuffer.allocate(getName().getBytes().length);
-      original.put(getName().getBytes());
+      ByteBuffer original = ByteBuffer.allocate(getName().getBytes(UTF_8).length);
+      original.put(getName().getBytes(UTF_8));
       original.rewind();
       try {
         return compareDecompress((Codec) other, original);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
index dd4c1f6..c90e1da 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
@@ -118,7 +118,7 @@ public class TestGenericData {
     GenericArray<CharSequence> l2 =
       new GenericData.Array<>(1, s.getFields().get(0).schema());
     String foo = "foo";
-    l0.add(new StringBuffer(foo));
+    l0.add(new StringBuilder(foo));
     l1.add(foo);
     l2.add(new Utf8(foo));
     r0.put(0, l0);
@@ -190,7 +190,7 @@ public class TestGenericData {
     Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
     GenericArray<Integer> array = new GenericData.Array<>(1, schema);
     array.add(99);
-    assertEquals(new Integer(99), array.get(0));
+    assertEquals(Integer.valueOf(99), array.get(0));
     List<Integer> list = new ArrayList<>();
     list.add(99);
     assertEquals(array, list);
@@ -218,19 +218,19 @@ public class TestGenericData {
       array.add(i);
     assertEquals(5, array.size());
     array.add(0, 6);
-    assertEquals(new Integer(6), array.get(0));
+    assertEquals(Integer.valueOf(6), array.get(0));
     assertEquals(6, array.size());
-    assertEquals(new Integer(0), array.get(1));
-    assertEquals(new Integer(4), array.get(5));
+    assertEquals(Integer.valueOf(0), array.get(1));
+    assertEquals(Integer.valueOf(4), array.get(5));
     array.add(6, 7);
-    assertEquals(new Integer(7), array.get(6));
+    assertEquals(Integer.valueOf(7), array.get(6));
     assertEquals(7, array.size());
-    assertEquals(new Integer(6), array.get(0));
-    assertEquals(new Integer(4), array.get(5));
+    assertEquals(Integer.valueOf(6), array.get(0));
+    assertEquals(Integer.valueOf(4), array.get(5));
     array.add(1, 8);
-    assertEquals(new Integer(8), array.get(1));
-    assertEquals(new Integer(0), array.get(2));
-    assertEquals(new Integer(6), array.get(0));
+    assertEquals(Integer.valueOf(8), array.get(1));
+    assertEquals(Integer.valueOf(0), array.get(2));
+    assertEquals(Integer.valueOf(6), array.get(0));
     assertEquals(8, array.size());
     try {
       array.get(9);
@@ -246,14 +246,14 @@ public class TestGenericData {
     for(int i=0; i<10; ++i)
       array.add(i);
     assertEquals(10, array.size());
-    assertEquals(new Integer(0), array.get(0));
-    assertEquals(new Integer(9), array.get(9));
+    assertEquals(Integer.valueOf(0), array.get(0));
+    assertEquals(Integer.valueOf(9), array.get(9));
 
     array.remove(0);
     assertEquals(9, array.size());
-    assertEquals(new Integer(1), array.get(0));
-    assertEquals(new Integer(2), array.get(1));
-    assertEquals(new Integer(9), array.get(8));
+    assertEquals(Integer.valueOf(1), array.get(0));
+    assertEquals(Integer.valueOf(2), array.get(1));
+    assertEquals(Integer.valueOf(9), array.get(8));
 
     // Test boundary errors.
     try {
@@ -270,13 +270,13 @@ public class TestGenericData {
     } catch (IndexOutOfBoundsException e){}
 
     // Test that we can still remove for properly sized arrays, and the rval
-    assertEquals(new Integer(9), array.remove(8));
+    assertEquals(Integer.valueOf(9), array.remove(8));
     assertEquals(8, array.size());
 
 
     // Test insertion after remove
     array.add(88);
-    assertEquals(new Integer(88), array.get(8));
+    assertEquals(Integer.valueOf(88), array.get(8));
   }
   @Test
   public void testArraySet()
@@ -287,12 +287,12 @@ public class TestGenericData {
     for(int i=0; i<10; ++i)
       array.add(i);
     assertEquals(10, array.size());
-    assertEquals(new Integer(0), array.get(0));
-    assertEquals(new Integer(5), array.get(5));
+    assertEquals(Integer.valueOf(0), array.get(0));
+    assertEquals(Integer.valueOf(5), array.get(5));
 
-    assertEquals(new Integer(5), array.set(5, 55));
+    assertEquals(Integer.valueOf(5), array.set(5, 55));
     assertEquals(10, array.size());
-    assertEquals(new Integer(55), array.get(5));
+    assertEquals(Integer.valueOf(55), array.get(5));
   }
 
   @Test
@@ -309,7 +309,7 @@ public class TestGenericData {
 
     String json = r.toString();
     JsonFactory factory = new JsonFactory();
-    JsonParser parser = factory.createJsonParser(json);
+    JsonParser parser = factory.createParser(json);
     ObjectMapper mapper = new ObjectMapper();
 
     // will throw exception if string is not parsable json
@@ -354,7 +354,7 @@ public class TestGenericData {
 
     String json = r.toString();
     JsonFactory factory = new JsonFactory();
-    JsonParser parser = factory.createJsonParser(json);
+    JsonParser parser = factory.createParser(json);
     ObjectMapper mapper = new ObjectMapper();
 
     // will throw exception if string is not parsable json
@@ -443,8 +443,9 @@ public class TestGenericData {
   public void testEnumCompare() {
     Schema s = Schema.createEnum("Kind",null,null,Arrays.asList("Z","Y","X"));
     GenericEnumSymbol z = new GenericData.EnumSymbol(s, "Z");
+    GenericEnumSymbol z2 = new GenericData.EnumSymbol(s, "Z");
+    assertEquals(0, z.compareTo(z2));
     GenericEnumSymbol y = new GenericData.EnumSymbol(s, "Y");
-    assertEquals(0, z.compareTo(z));
     assertTrue(y.compareTo(z) > 0);
     assertTrue(z.compareTo(y) < 0);
   }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
index 832871a..e5d6001 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
@@ -43,7 +43,7 @@ public class TestGenericDatumWriter {
     String json = "{\"type\": \"record\", \"name\": \"r\", \"fields\": ["
       + "{ \"name\": \"f1\", \"type\": \"long\" }"
       + "]}";
-    Schema s = Schema.parse(json);
+    Schema s = new Schema.Parser().parse(json);
     GenericRecord r = new GenericData.Record(s);
     r.put("f1", 100L);
     ByteArrayOutputStream bao = new ByteArrayOutputStream();
@@ -61,7 +61,7 @@ public class TestGenericDatumWriter {
   @Test
   public void testArrayConcurrentModification() throws Exception {
     String json = "{\"type\": \"array\", \"items\": \"int\" }";
-    Schema s = Schema.parse(json);
+    Schema s = new Schema.Parser().parse(json);
     final GenericArray<Integer> a = new GenericData.Array<>(1, s);
     ByteArrayOutputStream bao = new ByteArrayOutputStream();
     final GenericDatumWriter<GenericArray<Integer>> w =
@@ -99,7 +99,7 @@ public class TestGenericDatumWriter {
   @Test
   public void testMapConcurrentModification() throws Exception {
     String json = "{\"type\": \"map\", \"values\": \"int\" }";
-    Schema s = Schema.parse(json);
+    Schema s = new Schema.Parser().parse(json);
     final Map<String, Integer> m = new HashMap<>();
     ByteArrayOutputStream bao = new ByteArrayOutputStream();
     final GenericDatumWriter<Map<String, Integer>> w =
@@ -214,7 +214,7 @@ public class TestGenericDatumWriter {
           "}" +
         "}" +
       "]}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     GenericRecord record = new GenericData.Record(schema);
     record.put("field", "ONE");
 
@@ -237,7 +237,7 @@ public class TestGenericDatumWriter {
           "}" +
         "}" +
       "]}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     GenericRecord record = new GenericData.Record(schema);
     record.put("field", AnEnum.ONE);
 
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
index 27b38a9..2e1cec4 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
@@ -57,7 +57,7 @@ public class TestGenericRecordBuilder {
 
     // Build the record, and verify that fields are set:
     Record record = builder.build();
-    Assert.assertEquals(new Integer(1), record.get("intField"));
+    Assert.assertEquals(Integer.valueOf(1), record.get("intField"));
     Assert.assertEquals(anArray, record.get("anArray"));
     Assert.assertNotNull(record.get("id"));
     Assert.assertEquals("0", record.get("id").toString());
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
index dd1c55a..a104fac 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
@@ -171,7 +171,7 @@ public class TestBinaryDecoder {
       +"{\"name\":\"arrayField\", \"type\": " +
           "{\"type\":\"array\", \"items\":\"boolean\"}},"
       +"{\"name\":\"longField\", \"type\":\"long\"}]}";
-    schema = Schema.parse(jsonSchema);
+    schema = new Schema.Parser().parse(jsonSchema);
     GenericDatumWriter<Object> writer = new GenericDatumWriter<>();
     writer.setSchema(schema);
     ByteArrayOutputStream baos = new ByteArrayOutputStream(8192);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
index baaba95..821c36b 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
@@ -23,6 +23,7 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Stack;
@@ -57,7 +58,7 @@ public class TestBlockingIO {
       this.depth = depth;
       byte[] in = input.getBytes("UTF-8");
       JsonFactory f = new JsonFactory();
-      JsonParser p = f.createJsonParser(
+      JsonParser p = f.createParser(
           new ByteArrayInputStream(input.getBytes("UTF-8")));
 
       ByteArrayOutputStream os = new ByteArrayOutputStream();
@@ -70,11 +71,11 @@ public class TestBlockingIO {
       byte[] bb = os.toByteArray();
       // dump(bb);
       this.input = DecoderFactory.get().binaryDecoder(bb, null);
-      this.parser =  f.createJsonParser(new ByteArrayInputStream(in));
+      this.parser =  f.createParser(new ByteArrayInputStream(in));
     }
 
     public void scan() throws IOException {
-      Stack<S> countStack = new Stack<>();
+      ArrayDeque<S> countStack = new ArrayDeque<>();
       long count = 0;
       while (parser.nextToken() != null) {
         switch (parser.getCurrentToken()) {
@@ -126,7 +127,7 @@ public class TestBlockingIO {
     }
 
     public void skip(int skipLevel) throws IOException {
-      Stack<S> countStack = new Stack<>();
+      ArrayDeque<S> countStack = new ArrayDeque<>();
       long count = 0;
       while (parser.nextToken() != null) {
         switch (parser.getCurrentToken()) {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
index f8f4407..21c799e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
@@ -95,11 +95,11 @@ public class TestEncoders {
 
   @Test
   public void testJsonEncoderInit() throws IOException {
-    Schema s = Schema.parse("\"int\"");
+    Schema s = new Schema.Parser().parse("\"int\"");
     OutputStream out = new ByteArrayOutputStream();
     factory.jsonEncoder(s, out);
     JsonEncoder enc = factory.jsonEncoder(s,
-        new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8));
+        new JsonFactory().createGenerator(out, JsonEncoding.UTF8));
     enc.configure(out);
   }
 
@@ -128,7 +128,7 @@ public class TestEncoders {
 
   @Test
   public void testValidatingEncoderInit() throws IOException {
-    Schema s = Schema.parse("\"int\"");
+    Schema s = new Schema.Parser().parse("\"int\"");
     OutputStream out = new ByteArrayOutputStream();
     Encoder e = factory.directBinaryEncoder(out, null);
     factory.validatingEncoder(s, e).configure(e);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
index eab6f25..bb9905b 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
@@ -45,7 +45,7 @@ public class TestJsonDecoder {
     String def =
       "{\"type\":\"record\",\"name\":\"X\",\"fields\":"
       +"[{\"type\":\""+type+"\",\"name\":\"n\"}]}";
-    Schema schema = Schema.parse(def);
+    Schema schema = new Schema.Parser().parse(def);
     DatumReader<GenericRecord> reader =
       new GenericDatumReader<>(schema);
 
@@ -66,7 +66,7 @@ public class TestJsonDecoder {
       +"[{\"type\":\"long\",\"name\":\"l\"},"
       +"{\"type\":{\"type\":\"array\",\"items\":\"int\"},\"name\":\"a\"}"
       +"]}";
-    Schema ws = Schema.parse(w);
+    Schema ws = new Schema.Parser().parse(w);
     DecoderFactory df = DecoderFactory.get();
     String data = "{\"a\":[1,2],\"l\":100}{\"l\": 200, \"a\":[1,2]}";
     JsonDecoder in = df.jsonDecoder(ws, data);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
index 0d4112a..2012632 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
@@ -233,13 +233,13 @@ public class TestValidatingIO {
         result.add(r.nextInt());
         break;
       case 'L':
-        result.add(new Long(r.nextInt()));
+        result.add(Long.valueOf(r.nextInt()));
         break;
       case 'F':
-        result.add(new Float(r.nextInt()));
+        result.add(Float.valueOf(r.nextInt()));
         break;
       case 'D':
-        result.add(new Double(r.nextInt()));
+        result.add(Double.valueOf(r.nextInt()));
         break;
       case 'S':
       case 'K':
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/SymbolTest.java b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/SymbolTest.java
index f5b9ca0..e972ec2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/SymbolTest.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/SymbolTest.java
@@ -15,13 +15,12 @@
  */
 package org.apache.avro.io.parsing;
 
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Set;
 
-import junit.framework.Assert;
 import org.apache.avro.Schema;
 import org.junit.Test;
 
@@ -63,7 +62,7 @@ public class SymbolTest {
     if (symb.production != null) {
       for (Symbol s : symb.production) {
         if (s == null) {
-          Assert.fail("invalid parsing tree should not contain nulls");
+          fail("invalid parsing tree should not contain nulls");
         }
         if (s.kind != Symbol.Kind.ROOT) {
           validateNonNull(s, seen);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
index bd0df52..f2157a2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
@@ -50,7 +50,7 @@ public class TestResolvingGrammarGenerator {
 
   public TestResolvingGrammarGenerator(String jsonSchema, String jsonData)
     throws IOException {
-    this.schema = Schema.parse(jsonSchema);
+    this.schema = new Schema.Parser().parse(jsonSchema);
     JsonFactory factory = new JsonFactory();
     ObjectMapper mapper = new ObjectMapper(factory);
 
diff --git a/lang/java/avro/src/test/java/org/apache/avro/message/TestBinaryMessageEncoding.java b/lang/java/avro/src/test/java/org/apache/avro/message/TestBinaryMessageEncoding.java
index 4cb6c38..2a74e3e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/message/TestBinaryMessageEncoding.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/message/TestBinaryMessageEncoding.java
@@ -88,7 +88,7 @@ public class TestBinaryMessageEncoding {
     MessageEncoder<Record> v2Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
 
     for (Record record : records) {
-      if (record.getSchema() == SCHEMA_V1) {
+      if (record.getSchema().equals(SCHEMA_V1)) {
         buffers.add(v1Encoder.encode(record));
       } else {
         buffers.add(v2Encoder.encode(record));
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
index a46e1dc..f592a5e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
@@ -18,6 +18,7 @@
 
 package org.apache.avro.reflect;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 import java.io.*;
@@ -54,7 +55,7 @@ public class TestByteBuffer {
     content = new File(DIR.getRoot().getPath(),"test-content");
     try(FileOutputStream out = new FileOutputStream(content)) {
       for (int i = 0; i < 100000; i++) {
-        out.write("hello world\n".getBytes());
+        out.write("hello world\n".getBytes(UTF_8));
       }
     }
   }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
index 2784fa2..589722c 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
@@ -17,6 +17,7 @@
  */
 package org.apache.avro.reflect;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 import java.io.ByteArrayOutputStream;
@@ -54,7 +55,7 @@ public class TestNonStringMapKeys {
     Company [] entityObjs = {entityObj1, entityObj2};
     byte[] bytes = testSerialization(testType, entityObj1, entityObj2);
     List<GenericRecord> records =
-      (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
+      testGenericDatumRead(testType, bytes, entityObjs);
 
     GenericRecord record = records.get(0);
     Object employees = record.get("employees");
@@ -74,8 +75,7 @@ public class TestNonStringMapKeys {
       (id.equals(2) && name.equals("Bar"))
     );
 
-    List<Company> records2 =
-      (List<Company>) testReflectDatumRead(testType, bytes, entityObjs);
+    List<Company> records2 = testReflectDatumRead(testType, bytes, entityObjs);
     Company co = records2.get(0);
     log ("Read: " + co);
     assertNotNull (co.getEmployees());
@@ -106,8 +106,7 @@ public class TestNonStringMapKeys {
     String testType = "NestedMapsTest";
     Company2 [] entityObjs = {entityObj1};
     byte[] bytes = testSerialization(testType, entityObj1);
-    List<GenericRecord> records =
-      (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
+    List<GenericRecord> records = testGenericDatumRead(testType, bytes, entityObjs);
 
     GenericRecord record = records.get(0);
     Object employees = record.get("employees");
@@ -137,8 +136,7 @@ public class TestNonStringMapKeys {
       value = ((Utf8)value).toString();
     assertEquals ("CompanyFoo", value);
 
-    List<Company2> records2 =
-      (List<Company2>) testReflectDatumRead(testType, bytes, entityObjs);
+    List<Company2> records2 = testReflectDatumRead(testType, bytes, entityObjs);
     Company2 co = records2.get(0);
     log ("Read: " + co);
     assertNotNull (co.getEmployees());
@@ -167,8 +165,7 @@ public class TestNonStringMapKeys {
     String testType = "RecordNameInvariance";
     SameMapSignature [] entityObjs = {entityObj1};
     byte[] bytes = testSerialization(testType, entityObj1);
-    List<GenericRecord> records =
-      (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
+    List<GenericRecord> records = testGenericDatumRead(testType, bytes, entityObjs);
 
     GenericRecord record = records.get(0);
     Object map1obj = record.get("map1");
@@ -185,8 +182,7 @@ public class TestNonStringMapKeys {
     Object map2obj = record.get("map2");
     assertEquals (map1obj, map2obj);
 
-    List<SameMapSignature> records2 =
-      (List<SameMapSignature>) testReflectDatumRead(testType, bytes, entityObjs);
+    List<SameMapSignature> records2 = testReflectDatumRead(testType, bytes, entityObjs);
     SameMapSignature entity = records2.get(0);
     log ("Read: " + entity);
     assertNotNull (entity.getMap1());
@@ -313,7 +309,7 @@ public class TestNonStringMapKeys {
     encoder.flush();
 
     byte[] bytes = os.toByteArray();
-    System.out.println ("JSON encoder output:\n" + new String(bytes));
+    System.out.println ("JSON encoder output:\n" + new String(bytes, UTF_8));
     return bytes;
   }
 
@@ -326,7 +322,7 @@ public class TestNonStringMapKeys {
     GenericDatumReader<GenericRecord> datumReader =
       new GenericDatumReader<>(schema);
 
-    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new String(bytes));
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new String(bytes, UTF_8));
     GenericRecord r = datumReader.read(null, decoder);
     return r;
   }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
index d667d55..c875d22 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
@@ -17,13 +17,13 @@
  */
 package org.apache.avro.reflect;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.lang.reflect.Array;
-import java.lang.reflect.Type;
 import java.nio.ByteBuffer;
 import java.util.*;
 
@@ -437,7 +437,7 @@ public class TestReflect {
   @Test public void testR12() throws Exception {
     Schema s = ReflectData.get().getSchema(R12.class);
     assertEquals(Schema.Type.INT, s.getField("x").schema().getType());
-    assertEquals(Schema.parse
+    assertEquals(new Schema.Parser().parse
                  ("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}"),
                  s.getField("strings").schema());
   }
@@ -511,7 +511,7 @@ public class TestReflect {
 
     // check reflective setField works for records
     if (s.getType().equals(Schema.Type.RECORD)) {
-      Object copy = object.getClass().newInstance();
+      Object copy = object.getClass().getDeclaredConstructor().newInstance();
       for (Field f : s.getFields()) {
         Object val = ReflectData.get().getField(object, f.name(), f.pos());
         ReflectData.get().setField(copy, f.name(), f.pos(), val);
@@ -563,16 +563,17 @@ public class TestReflect {
           +"{\"name\":\"b\",\"type\":\"int\"}]}");
   }
 
-  public static class RAvroStringableField { @Stringable int a; }
+  public static class RAvroStringableField {
+    @Stringable int a;
+  }
+
+  @Test
   public void testAnnotationAvroStringableFields() throws Exception {
-    check(RAvroStringableField.class, "{\"type\":\"record\",\"name\":\"RAvroNameCollide\",\"namespace\":"
+    check(RAvroStringableField.class, "{\"type\":\"record\",\"name\":\"RAvroStringableField\",\"namespace\":"
           +"\"org.apache.avro.reflect.TestReflect\",\"fields\":["
-          +"{\"name\":\"a\",\"type\":\"String\"}]}");
+          +"{\"name\":\"a\",\"type\":\"string\"}]}");
   }
 
-
-
-
   private void check(Object o, String schemaJson) {
     check(o.getClass(), schemaJson);
   }
@@ -1132,7 +1133,7 @@ public class TestReflect {
 
   @Test
   public void testNullableByteArrayNotNullValue() throws Exception {
-    checkReadWrite(new NullableBytesTest("foo".getBytes()));
+    checkReadWrite(new NullableBytesTest("foo".getBytes(UTF_8)));
   }
 
   @Test
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
index f0ce390..77c1e8e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
@@ -33,7 +33,7 @@ public class TestReflectionUtil {
   public void testUnsafeWhenNotExists() throws Exception {
     ClassLoader cl = new NoUnsafe();
     Class<?> testerClass = cl.loadClass(Tester.class.getName());
-    testerClass.getDeclaredMethod("checkUnsafe").invoke(testerClass.newInstance());
+    testerClass.getDeclaredMethod("checkUnsafe").invoke(testerClass.getDeclaredConstructor().newInstance());
   }
 
   public static final class Tester {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithJsr310LogicalTypes.java b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithJsr310LogicalTypes.java
index 352e5f0..1adf5aa 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithJsr310LogicalTypes.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithJsr310LogicalTypes.java
@@ -94,8 +94,10 @@ public class TestRecordWithJsr310LogicalTypes extends org.apache.avro.specific.S
     this.dec = dec;
   }
 
+  @Override
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
   // Used by DatumWriter.  Applications should not call.
+  @Override
   public java.lang.Object get(int field$) {
     switch (field$) {
     case 0: return b;
@@ -139,6 +141,7 @@ public class TestRecordWithJsr310LogicalTypes extends org.apache.avro.specific.S
 
   // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
+  @Override
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
     case 0: b = (java.lang.Boolean)value$; break;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java
index 3029e39..05f018a 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java
@@ -80,8 +80,10 @@ public class TestRecordWithLogicalTypes extends org.apache.avro.specific.Specifi
     this.dec = dec;
   }
 
+  @Override
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
   // Used by DatumWriter.  Applications should not call.
+  @Override
   public java.lang.Object get(int field$) {
     switch (field$) {
     case 0: return b;
@@ -99,6 +101,7 @@ public class TestRecordWithLogicalTypes extends org.apache.avro.specific.Specifi
   }
   // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
+  @Override
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
     case 0: b = (java.lang.Boolean)value$; break;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java b/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java
index cd958be..eed1f78 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java
@@ -97,6 +97,7 @@ public class TestCaseFinder {
       CaseFinder.find(mk("<<INPUT blah"), "", result);
     }
 
+    @Test (expected=java.lang.IllegalArgumentException.class)
     public void testBadDocLabel2() throws Exception {
       List<Object[]> result = new ArrayList<>();
       CaseFinder.find(mk("<<INPUT blah"), "kill-er", result);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java b/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
index 677b7bd..4ed8089 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
@@ -17,7 +17,7 @@
  */
 package org.apache.avro.util;
 
-import static junit.framework.Assert.assertSame;
+import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertEquals;
 
 import java.nio.charset.StandardCharsets;
@@ -28,7 +28,7 @@ public class TestUtf8 {
   @Test public void testByteConstructor() throws Exception {
     byte[] bs = "Foo".getBytes(StandardCharsets.UTF_8);
     Utf8 u = new Utf8(bs);
-    assertEquals(bs.length, u.getLength());
+    assertEquals(bs.length, u.getByteLength());
     for (int i=0; i<bs.length; i++) {
       assertEquals(bs[i], u.getBytes()[i]);
     }
diff --git a/lang/java/compiler/pom.xml b/lang/java/compiler/pom.xml
index 8510b5e..a3fa3d0 100644
--- a/lang/java/compiler/pom.xml
+++ b/lang/java/compiler/pom.xml
@@ -171,7 +171,7 @@
       </plugins>
     </pluginManagement>
   </build>
-  
+
 
   <dependencies>
     <dependency>
diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
index 800d50e..236dba3 100644
--- a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
@@ -21,9 +21,10 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.io.FileWriter;
 import java.io.IOException;
 import java.io.StringWriter;
+import java.lang.reflect.InvocationTargetException;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -55,6 +56,7 @@ import org.apache.velocity.app.VelocityEngine;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.avro.specific.SpecificData.RESERVED_WORDS;
 
 /**
@@ -295,9 +297,9 @@ public class SpecificCompiler {
 
   public void addCustomConversion(Class<?> conversionClass) {
     try {
-      final Conversion<?> conversion = (Conversion<?>)conversionClass.newInstance();
+      final Conversion<?> conversion = (Conversion<?>) conversionClass.getDeclaredConstructor().newInstance();
       specificData.addLogicalTypeConversion(conversion);
-    }  catch (IllegalAccessException | InstantiationException e) {
+    }  catch (IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e) {
       throw new RuntimeException("Failed to instantiate conversion class " + conversionClass, e);
     }
   }
@@ -406,7 +408,7 @@ public class SpecificCompiler {
           fos = new FileOutputStream(f);
           fw = new OutputStreamWriter(fos, outputCharacterEncoding);
         } else {
-          fw = new FileWriter(f);
+          fw = Files.newBufferedWriter(f.toPath(), UTF_8);
         }
         fw.write(FILE_HEADER);
         fw.write(contents);
diff --git a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
index 6ac8d32..891334d 100644
--- a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
+++ b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
@@ -1634,7 +1634,7 @@ private void JsonPair(ObjectNode o) :
 }
 {
   name=JsonString() <COLON> value=Json()
-    { o.put(name, value); } 
+    { o.set(name, value); }
 }
 
 private JsonNode JsonArray() :
diff --git a/lang/java/grpc/pom.xml b/lang/java/grpc/pom.xml
index 409b283..6bc3bac 100644
--- a/lang/java/grpc/pom.xml
+++ b/lang/java/grpc/pom.xml
@@ -71,6 +71,28 @@
           </execution>
         </executions>
       </plugin>
+      <!-- Allow guava because hadoop brings it as a transitive dependency. -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce-banned-dependencies</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <bannedDependencies>
+                  <includes>
+                    <exclude>com.google.guava:guava</exclude>
+                  </includes>
+                </bannedDependencies>
+              </rules>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 
diff --git a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcClient.java b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcClient.java
index e4e03c6..1d3a3cd 100644
--- a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcClient.java
+++ b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcClient.java
@@ -86,6 +86,7 @@ public abstract class AvroGrpcClient {
       this.serviceDescriptor = serviceDescriptor;
     }
 
+    @Override
     public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
       try {
         return invokeUnaryMethod(method, args);
diff --git a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcServer.java b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcServer.java
index ff0d187..6f0e967 100644
--- a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcServer.java
+++ b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcServer.java
@@ -73,6 +73,7 @@ public abstract class AvroGrpcServer {
       this.method = method;
     }
 
+    @Override
     public void invoke(Object[] request, StreamObserver<Object> responseObserver) {
       Object methodResponse = null;
       try {
diff --git a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcUtils.java b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcUtils.java
index f3ab07b..9b39ccb 100644
--- a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcUtils.java
+++ b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcUtils.java
@@ -29,7 +29,7 @@ import io.grpc.KnownLength;
 
 /** Utility methods for using Avro IDL and serialization with gRPC. */
 public final class AvroGrpcUtils {
-  private static Logger log = Logger.getLogger(AvroGrpcUtils.class.getName());
+  private static final Logger LOG = Logger.getLogger(AvroGrpcUtils.class.getName());
 
   private AvroGrpcUtils() {
   }
@@ -79,7 +79,7 @@ public final class AvroGrpcUtils {
       }
       stream.close();
     } catch (Exception e) {
-      log.log(Level.WARNING, "failed to skip/close the input stream, may cause memory leak", e);
+      LOG.log(Level.WARNING, "failed to skip/close the input stream, may cause memory leak", e);
     }
   }
 }
diff --git a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroInputStream.java b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroInputStream.java
index eab9338..f580be4 100644
--- a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroInputStream.java
+++ b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroInputStream.java
@@ -61,7 +61,7 @@ public abstract class AvroInputStream extends InputStream implements Drainable {
    * An {@link OutputStream} that writes to a target {@link OutputStream} and provides total
    * number of bytes written to it.
    */
-  protected class CountingOutputStream extends OutputStream {
+  protected static class CountingOutputStream extends OutputStream {
     private final OutputStream target;
     private int writtenCount = 0;
 
diff --git a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroRequestMarshaller.java b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroRequestMarshaller.java
index c553c88..45864a1 100644
--- a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroRequestMarshaller.java
+++ b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroRequestMarshaller.java
@@ -71,7 +71,7 @@ public class AvroRequestMarshaller implements MethodDescriptor.Marshaller<Object
     }
   }
 
-  private class AvroRequestInputStream extends AvroInputStream {
+  private static class AvroRequestInputStream extends AvroInputStream {
     private final Protocol.Message message;
     private Object[] args;
 
diff --git a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroResponseMarshaller.java b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroResponseMarshaller.java
index e46c932..a215c45 100644
--- a/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroResponseMarshaller.java
+++ b/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroResponseMarshaller.java
@@ -75,7 +75,7 @@ public class AvroResponseMarshaller implements MethodDescriptor.Marshaller<Objec
     }
   }
 
-  private class AvroResponseInputStream extends AvroInputStream {
+  private static class AvroResponseInputStream extends AvroInputStream {
     private final Protocol.Message message;
     private Object response;
 
diff --git a/lang/java/grpc/src/test/java/org/apache/avro/grpc/TestAvroProtocolGrpc.java b/lang/java/grpc/src/test/java/org/apache/avro/grpc/TestAvroProtocolGrpc.java
index f91addd..436405c 100644
--- a/lang/java/grpc/src/test/java/org/apache/avro/grpc/TestAvroProtocolGrpc.java
+++ b/lang/java/grpc/src/test/java/org/apache/avro/grpc/TestAvroProtocolGrpc.java
@@ -82,7 +82,7 @@ public class TestAvroProtocolGrpc {
         .build();
     server.start();
     int port = server.getPort();
-    channel = ManagedChannelBuilder.forAddress("localhost", port).usePlaintext(true).build();
+    channel = ManagedChannelBuilder.forAddress("localhost", port).usePlaintext().build();
     stub = AvroGrpcClient.create(channel, TestService.class);
     callbackStub = AvroGrpcClient.create(channel, TestService.Callback.class);
   }
@@ -210,7 +210,7 @@ public class TestAvroProtocolGrpc {
   @Test
   public void testConcurrentChannels() throws Exception {
     ManagedChannel otherChannel = ManagedChannelBuilder.forAddress("localhost", server.getPort())
-        .usePlaintext(true).build();
+        .usePlaintext().build();
     TestService otherStub = AvroGrpcClient.create(otherChannel, TestService.class);
     Future<Integer>[] adds = new Future[5];
     Future<Integer>[] otherAdds = new Future[5];
diff --git a/lang/java/integration-test/test-custom-conversions/src/main/java/org.apache.avro.codegentest/CustomDecimalConversion.java b/lang/java/integration-test/test-custom-conversions/src/main/java/org.apache.avro.codegentest/CustomDecimalConversion.java
index 7c200ad..5b763b3 100644
--- a/lang/java/integration-test/test-custom-conversions/src/main/java/org.apache.avro.codegentest/CustomDecimalConversion.java
+++ b/lang/java/integration-test/test-custom-conversions/src/main/java/org.apache.avro.codegentest/CustomDecimalConversion.java
@@ -27,26 +27,26 @@ import java.math.BigInteger;
 import java.nio.ByteBuffer;
 
 public class CustomDecimalConversion extends Conversion<CustomDecimal> {
-
-    @Override
-    public Class<CustomDecimal> getConvertedType() {
-        return CustomDecimal.class;
-    }
-
-    @Override
-    public String getLogicalTypeName() {
-        return "decimal";
-    }
-
-    public CustomDecimal fromBytes(ByteBuffer value, Schema schema, LogicalType type) {
-        int scale = ((LogicalTypes.Decimal)type).getScale();
-        byte[] bytes = value.get(new byte[value.remaining()]).array();
-        return new CustomDecimal(new BigInteger(bytes), scale);
-    }
-
-    public ByteBuffer toBytes(CustomDecimal value, Schema schema, LogicalType type) {
-        int scale = ((LogicalTypes.Decimal)type).getScale();
-        return ByteBuffer.wrap(value.toByteArray(scale));
-    }
-
+  @Override
+  public Class<CustomDecimal> getConvertedType() {
+      return CustomDecimal.class;
+  }
+
+  @Override
+  public String getLogicalTypeName() {
+      return "decimal";
+  }
+
+  @Override
+  public CustomDecimal fromBytes(ByteBuffer value, Schema schema, LogicalType type) {
+      int scale = ((LogicalTypes.Decimal)type).getScale();
+      byte[] bytes = value.get(new byte[value.remaining()]).array();
+      return new CustomDecimal(new BigInteger(bytes), scale);
+  }
+
+  @Override
+  public ByteBuffer toBytes(CustomDecimal value, Schema schema, LogicalType type) {
+      int scale = ((LogicalTypes.Decimal)type).getScale();
+      return ByteBuffer.wrap(value.toByteArray(scale));
+  }
 }
diff --git a/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/StaticServlet.java b/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/StaticServlet.java
index b9e5633..a4c655d 100644
--- a/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/StaticServlet.java
+++ b/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/StaticServlet.java
@@ -29,6 +29,7 @@ import org.eclipse.jetty.util.resource.Resource;
 public class StaticServlet extends DefaultServlet {
   private static final long serialVersionUID = 1L;
 
+  @Override
   public Resource getResource(String pathInContext) {
     // Take only last slice of the URL as a filename, so we can adjust path.
     // This also prevents mischief like '../../foo.css'
diff --git a/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestProtocolHttp.java b/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestProtocolHttp.java
index d067edf..74c6db8 100644
--- a/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestProtocolHttp.java
+++ b/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestProtocolHttp.java
@@ -36,6 +36,7 @@ import java.net.URL;
 import java.net.ServerSocket;
 import java.net.SocketTimeoutException;
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 
 public class TestProtocolHttp extends TestProtocolSpecific {
 
@@ -74,7 +75,7 @@ public class TestProtocolHttp extends TestProtocolSpecific {
     // a version of the Simple protocol that doesn't declare "ack" one-way
     Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
     Protocol.Message message =
-      protocol.createMessage("ack", null,
+      protocol.createMessage("ack", null, new LinkedHashMap<String,String>(),
                              Schema.createRecord(new ArrayList<>()),
                              Schema.create(Schema.Type.NULL),
                              Schema.createUnion(new ArrayList<>()));
diff --git a/lang/java/ipc-netty/src/main/java/org/apache/avro/ipc/netty/NettyTransceiver.java b/lang/java/ipc-netty/src/main/java/org/apache/avro/ipc/netty/NettyTransceiver.java
index 24f542f..ad4c48a 100644
--- a/lang/java/ipc-netty/src/main/java/org/apache/avro/ipc/netty/NettyTransceiver.java
+++ b/lang/java/ipc-netty/src/main/java/org/apache/avro/ipc/netty/NettyTransceiver.java
@@ -557,7 +557,7 @@ public class NettyTransceiver extends Transceiver {
    * A ChannelFutureListener for channel write operations that notifies
    * a {@link Callback} if an error occurs while writing to the channel.
    */
-  protected class WriteFutureListener implements ChannelFutureListener {
+  protected static class WriteFutureListener implements ChannelFutureListener {
     protected final Callback<List<ByteBuffer>> callback;
 
     /**
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
index 1f56519..f9ad693 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
@@ -52,8 +52,10 @@ public class DatagramServer extends Thread implements Server {
     setDaemon(true);
   }
 
+  @Override
   public int getPort() { return channel.socket().getLocalPort(); }
 
+  @Override
   public void run() {
     while (true) {
       try {
@@ -67,6 +69,7 @@ public class DatagramServer extends Thread implements Server {
     }
   }
 
+  @Override
   public void close() { this.interrupt(); }
 
   public static void main(String[] arg) throws Exception {
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java
index 70c0545..33a5600 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java
@@ -40,6 +40,7 @@ public class DatagramTransceiver extends Transceiver {
   private SocketAddress remote;
   private ByteBuffer buffer = ByteBuffer.allocate(MAX_SIZE);
 
+  @Override
   public String getRemoteName() { return remote.toString(); }
 
   public DatagramTransceiver(SocketAddress remote) throws IOException {
@@ -51,6 +52,7 @@ public class DatagramTransceiver extends Transceiver {
     this.channel = channel;
   }
 
+  @Override
   public synchronized List<ByteBuffer> readBuffers() throws IOException {
     buffer.clear();
     remote = channel.receive(buffer);
@@ -69,6 +71,7 @@ public class DatagramTransceiver extends Transceiver {
     }
   }
 
+  @Override
   public synchronized void writeBuffers(List<ByteBuffer> buffers)
     throws IOException {
     buffer.clear();
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
index a46fcd4..cf17a27 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
@@ -48,8 +48,10 @@ public class HttpTransceiver extends Transceiver {
   /** Set the connect and read timeouts, in milliseconds. */
   public void setTimeout(int timeout) { this.timeout = timeout; }
 
+  @Override
   public String getRemoteName() { return this.url.toString(); }
 
+  @Override
   public synchronized List<ByteBuffer> readBuffers() throws IOException {
     InputStream in = connection.getInputStream();
     try {
@@ -59,6 +61,7 @@ public class HttpTransceiver extends Transceiver {
     }
   }
 
+  @Override
   public synchronized void writeBuffers(List<ByteBuffer> buffers)
     throws IOException {
     if (proxy == null)
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
index 161b8d6..58a0cb5 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
@@ -73,13 +73,13 @@ public class RPCContext {
    * the client to the server
    */
   public Map<String,ByteBuffer> requestHandshakeMeta() {
-    if (handshakeRequest.meta == null)
-      handshakeRequest.meta = new HashMap<>();
-    return handshakeRequest.meta;
+    if (handshakeRequest.getMeta() == null)
+      handshakeRequest.setMeta(new HashMap<>());
+    return handshakeRequest.getMeta();
   }
 
   void setRequestHandshakeMeta(Map<String,ByteBuffer> newmeta) {
-    handshakeRequest.meta = newmeta;
+    handshakeRequest.setMeta(newmeta);
   }
 
   /**
@@ -89,13 +89,13 @@ public class RPCContext {
    * the server to the client
    */
   public Map<String,ByteBuffer> responseHandshakeMeta() {
-    if (handshakeResponse.meta == null)
-      handshakeResponse.meta = new HashMap<>();
-    return handshakeResponse.meta;
+    if (handshakeResponse.getMeta() == null)
+      handshakeResponse.setMeta(new HashMap<>());
+    return handshakeResponse.getMeta();
   }
 
   void setResponseHandshakeMeta(Map<String,ByteBuffer> newmeta) {
-    handshakeResponse.meta = newmeta;
+    handshakeResponse.setMeta(newmeta);
   }
 
   /**
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
index 92d77d7..d897751 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
@@ -208,17 +208,17 @@ public abstract class Requestor {
       remote = REMOTE_PROTOCOLS.get(remoteHash);
     }
     HandshakeRequest handshake = new HandshakeRequest();
-    handshake.clientHash = localHash;
-    handshake.serverHash = remoteHash;
+    handshake.setClientHash(localHash);
+    handshake.setServerHash(remoteHash);
     if (sendLocalText)
-      handshake.clientProtocol = local.toString();
+      handshake.setClientProtocol(local.toString());
 
     RPCContext context = new RPCContext();
     context.setHandshakeRequest(handshake);
     for (RPCPlugin plugin : rpcMetaPlugins) {
       plugin.clientStartConnect(context);
     }
-    handshake.meta = context.requestHandshakeMeta();
+    handshake.setMeta(context.requestHandshakeMeta());
 
     HANDSHAKE_WRITER.write(handshake, out);
   }
@@ -227,7 +227,7 @@ public abstract class Requestor {
     if (getTransceiver().isConnected()) return true;
     boolean established = false;
     HandshakeResponse handshake = HANDSHAKE_READER.read(null, in);
-    switch (handshake.match) {
+    switch (handshake.getMatch()) {
     case BOTH:
       established = true;
       sendLocalText = false;
@@ -244,7 +244,7 @@ public abstract class Requestor {
       sendLocalText = true;
       break;
     default:
-      throw new AvroRuntimeException("Unexpected match: "+handshake.match);
+      throw new AvroRuntimeException("Unexpected match: " + handshake.getMatch());
     }
 
     RPCContext context = new RPCContext();
@@ -258,8 +258,8 @@ public abstract class Requestor {
   }
 
   private void setRemote(HandshakeResponse handshake) throws IOException {
-    remote = Protocol.parse(handshake.serverProtocol.toString());
-    MD5 remoteHash = (MD5)handshake.serverHash;
+    remote = Protocol.parse(handshake.getServerProtocol().toString());
+    MD5 remoteHash = handshake.getServerHash();
     REMOTE_HASHES.put(transceiver.getRemoteName(), remoteHash);
     REMOTE_PROTOCOLS.putIfAbsent(remoteHash, remote);
   }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
index 796f778..f1f5941 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
@@ -207,23 +207,21 @@ public abstract class Responder {
     throws IOException {
     if (connection != null && connection.isConnected())
       return connection.getRemote();
-    HandshakeRequest request = (HandshakeRequest)handshakeReader.read(null, in);
-    Protocol remote = protocols.get(request.clientHash);
-    if (remote == null && request.clientProtocol != null) {
-      remote = Protocol.parse(request.clientProtocol.toString());
-      protocols.put(request.clientHash, remote);
+    HandshakeRequest request = handshakeReader.read(null, in);
+    Protocol remote = protocols.get(request.getClientHash());
+    if (remote == null && request.getClientProtocol() != null) {
+      remote = Protocol.parse(request.getClientProtocol().toString());
+      protocols.put(request.getClientHash(), remote);
     }
     HandshakeResponse response = new HandshakeResponse();
-    if (localHash.equals(request.serverHash)) {
-      response.match =
-        remote == null ? HandshakeMatch.NONE : HandshakeMatch.BOTH;
+    if (localHash.equals(request.getServerHash())) {
+      response.setMatch(remote == null ? HandshakeMatch.NONE : HandshakeMatch.BOTH);
     } else {
-      response.match =
-        remote == null ? HandshakeMatch.NONE : HandshakeMatch.CLIENT;
+      response.setMatch(remote == null ? HandshakeMatch.NONE : HandshakeMatch.CLIENT);
     }
-    if (response.match != HandshakeMatch.BOTH) {
-      response.serverProtocol = local.toString();
-      response.serverHash = localHash;
+    if (response.getMatch() != HandshakeMatch.BOTH) {
+      response.setServerProtocol(local.toString());
+      response.setServerHash(localHash);
     }
 
     RPCContext context = new RPCContext();
@@ -234,7 +232,7 @@ public abstract class Responder {
     }
     handshakeWriter.write(response, out);
 
-    if (connection != null && response.match != HandshakeMatch.NONE)
+    if (connection != null && response.getMatch() != HandshakeMatch.NONE)
       connection.setRemote(remote);
 
     return remote;
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
index eb4569f..9866d14 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
@@ -47,6 +47,7 @@ public class SaslSocketServer extends SocketServer {
     throws IOException {
     this(responder, addr,
          new SaslServerFactory() {
+           @Override
            public SaslServer getServer() { return new AnonymousServer(); }
          });
   }
@@ -58,6 +59,7 @@ public class SaslSocketServer extends SocketServer {
                           final CallbackHandler cbh) throws IOException {
     this(responder, addr,
          new SaslServerFactory() {
+           @Override
            public SaslServer getServer() throws SaslException {
              return Sasl.createSaslServer(mechanism, protocol, serverName,
                                           props, cbh);
@@ -78,8 +80,8 @@ public class SaslSocketServer extends SocketServer {
 
   private static class AnonymousServer implements SaslServer {
     private String user;
-    public String getMechanismName() { return "ANONYMOUS"; }
-    public byte[] evaluateResponse(byte[] response) throws SaslException {
+    @Override public String getMechanismName() { return "ANONYMOUS"; }
+    @Override public byte[] evaluateResponse(byte[] response) throws SaslException {
       try {
         this.user = new String(response, "UTF-8");
       } catch (IOException e) {
@@ -87,16 +89,16 @@ public class SaslSocketServer extends SocketServer {
       }
       return null;
     }
-    public boolean isComplete() { return user != null; }
-    public String getAuthorizationID() { return user; }
-    public byte[] unwrap(byte[] incoming, int offset, int len) {
+    @Override public boolean isComplete() { return user != null; }
+    @Override public String getAuthorizationID() { return user; }
+    @Override public byte[] unwrap(byte[] incoming, int offset, int len) {
       throw new UnsupportedOperationException();
     }
-    public byte[] wrap(byte[] outgoing, int offset, int len) {
+    @Override public byte[] wrap(byte[] outgoing, int offset, int len) {
       throw new UnsupportedOperationException();
     }
-    public Object getNegotiatedProperty(String propName) { return null; }
-    public void dispose() {}
+    @Override public Object getNegotiatedProperty(String propName) { return null; }
+    @Override public void dispose() {}
   }
 
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
index 249b78a..7d512e1 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
@@ -377,8 +377,11 @@ public class SaslSocketTransceiver extends Transceiver {
   }
 
   private static class AnonymousClient implements SaslClient {
+    @Override
     public String getMechanismName() { return "ANONYMOUS"; }
+    @Override
     public boolean hasInitialResponse() { return true; }
+    @Override
     public byte[] evaluateChallenge(byte[] challenge) throws SaslException {
       try {
         return System.getProperty("user.name").getBytes("UTF-8");
@@ -386,14 +389,14 @@ public class SaslSocketTransceiver extends Transceiver {
         throw new SaslException(e.toString());
       }
     }
-    public boolean isComplete() { return true; }
-    public byte[] unwrap(byte[] incoming, int offset, int len) {
+    @Override public boolean isComplete() { return true; }
+    @Override public byte[] unwrap(byte[] incoming, int offset, int len) {
       throw new UnsupportedOperationException();
     }
-    public byte[] wrap(byte[] outgoing, int offset, int len) {
+    @Override public byte[] wrap(byte[] outgoing, int offset, int len) {
       throw new UnsupportedOperationException();
     }
-    public Object getNegotiatedProperty(String propName) { return null; }
-    public void dispose() {}
+    @Override public Object getNegotiatedProperty(String propName) { return null; }
+    @Override public void dispose() {}
   }
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
index bc0d661..2747484 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
@@ -37,6 +37,7 @@ import org.apache.avro.ipc.generic.GenericResponder;
  * protocol and is not intended for production services.
  * @deprecated use {@link SaslSocketServer} instead.
  */
+@Deprecated
 public class SocketServer extends Thread implements Server {
   private static final Logger LOG = LoggerFactory.getLogger(SocketServer.class);
 
@@ -58,8 +59,10 @@ public class SocketServer extends Thread implements Server {
     setDaemon(true);
   }
 
+  @Override
   public int getPort() { return channel.socket().getLocalPort(); }
 
+  @Override
   public void run() {
     LOG.info("starting "+channel.socket().getInetAddress());
     try {
@@ -82,6 +85,7 @@ public class SocketServer extends Thread implements Server {
     }
   }
 
+  @Override
   public void close() {
     this.interrupt();
     group.interrupt();
@@ -108,6 +112,7 @@ public class SocketServer extends Thread implements Server {
       thread.start();
     }
 
+    @Override
     public void run() {
       try {
         try {
@@ -132,6 +137,7 @@ public class SocketServer extends Thread implements Server {
   public static void main(String[] arg) throws Exception {
     Responder responder =
       new GenericResponder(Protocol.parse("{\"protocol\": \"X\"}")) {
+        @Override
         public Object respond(Message message, Object request)
           throws Exception {
           throw new IOException("no messages!");
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
index 73dec52..6676c8d 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
@@ -54,10 +54,12 @@ public class SocketTransceiver extends Transceiver {
     LOG.info("open to "+getRemoteName());
   }
 
+  @Override
   public String getRemoteName() {
     return channel.socket().getRemoteSocketAddress().toString();
   }
 
+  @Override
   public synchronized List<ByteBuffer> readBuffers() throws IOException {
     List<ByteBuffer> buffers = new ArrayList<>();
     while (true) {
@@ -81,6 +83,7 @@ public class SocketTransceiver extends Transceiver {
     }
   }
 
+  @Override
   public synchronized void writeBuffers(List<ByteBuffer> buffers)
     throws IOException {
     if (buffers == null) return;                  // no data to write
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
index f1b081a..9958369 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
@@ -104,6 +104,6 @@ public abstract class Transceiver implements Closeable {
     throw new IllegalStateException("Not connected.");
   }
 
-  public void close() throws IOException {}
+  @Override public void close() throws IOException {}
 }
 
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
index 26b508d..af4fc23 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
@@ -100,6 +100,7 @@ class Histogram<B, T> {
       }
     }
 
+    @Override
     public int segment(T value) {
       Map.Entry<T, Integer> e = index.floorEntry(value);
       if (e == null) {
@@ -211,7 +212,7 @@ class Histogram<B, T> {
     return totalCount;
   }
 
-
+  @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
     boolean first = true;
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
index 8ada243..6cfaa6d 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
@@ -20,7 +20,7 @@ package org.apache.avro;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.avro.ipc.RPCContext;
 import org.apache.avro.ipc.RPCPlugin;
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java b/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
index ce6f340..6bcc76d 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
@@ -43,7 +43,7 @@ public class TestCompare {
 
   @Test
   public void testNull() throws Exception {
-    Schema schema = Schema.parse("\"null\"");
+    Schema schema = new Schema.Parser().parse("\"null\"");
     byte[] b = render(null, schema, new GenericDatumWriter<>());
     assertEquals(0, BinaryData.compare(b, 0, b, 0, schema));
   }
@@ -101,7 +101,7 @@ public class TestCompare {
   @Test
   public void testArray() throws Exception {
     String json = "{\"type\":\"array\", \"items\": \"long\"}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     GenericArray<Long> a1 = new GenericData.Array<>(1, schema);
     a1.add(1L);
     GenericArray<Long> a2 = new GenericData.Array<>(1, schema);
@@ -117,7 +117,7 @@ public class TestCompare {
       +"{\"name\":\"g\",\"type\":\"int\",\"order\":\"descending\"},"
       +"{\"name\":\"h\",\"type\":\"int\"}]}";
     String recordJson = "{\"type\":\"record\", \"name\":\"Test\","+fields;
-    Schema schema = Schema.parse(recordJson);
+    Schema schema = new Schema.Parser().parse(recordJson);
     GenericData.Record r1 = new GenericData.Record(schema);
     r1.put("f", 1);
     r1.put("g", 13);
@@ -133,7 +133,7 @@ public class TestCompare {
     check(recordJson, r1, r2);
 
     String record2Json = "{\"type\":\"record\", \"name\":\"Test2\","+fields;
-    Schema schema2 = Schema.parse(record2Json);
+    Schema schema2 = new Schema.Parser().parse(record2Json);
     GenericData.Record r3= new GenericData.Record(schema2);
     r3.put("f", 1);
     r3.put("g", 13);
@@ -145,7 +145,7 @@ public class TestCompare {
   public void testEnum() throws Exception {
     String json =
       "{\"type\":\"enum\", \"name\":\"Test\",\"symbols\": [\"A\", \"B\"]}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     check(json,
           new GenericData.EnumSymbol(schema, "A"),
           new GenericData.EnumSymbol(schema, "B"));
@@ -154,7 +154,7 @@ public class TestCompare {
   @Test
   public void testFixed() throws Exception {
     String json = "{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     check(json,
           new GenericData.Fixed(schema, new byte[]{(byte)'a'}),
           new GenericData.Fixed(schema, new byte[]{(byte)'b'}));
@@ -194,7 +194,7 @@ public class TestCompare {
   private static <T> void check(String schemaJson, T o1, T o2,
                             boolean comparable)
     throws Exception {
-    check(Schema.parse(schemaJson), o1, o2, comparable,
+    check(new Schema.Parser().parse(schemaJson), o1, o2, comparable,
           new GenericDatumWriter<>(), GenericData.get());
   }
 
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
index da7ea8c..db7e9ba 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
@@ -46,7 +46,7 @@ public class TestDataFileSpecific {
     File file = new File(DIR.getRoot().getPath(), "testSpecificDatumReaderDefaultCtor");
 
     // like the specific Foo, but with another field
-    Schema s1 = Schema.parse("{\"type\":\"record\",\"name\":\"Foo\","
+    Schema s1 = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Foo\","
             + "\"namespace\":\"org.apache.avro\",\"fields\":["
             + "{\"name\":\"label\",\"type\":\"string\"},"
             + "{\"name\":\"id\",\"type\":\"int\"}]}");
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
index 66ecdb8..d980da3 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
@@ -41,6 +41,7 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Random;
 
@@ -193,6 +194,7 @@ public class TestProtocolGeneric {
     Protocol.Message message =
       protocol.createMessage("hello",
                              null /* doc */,
+                             new LinkedHashMap<String,String>(),
                              Schema.createRecord(fields),
                              Schema.create(Schema.Type.STRING),
                              Schema.createUnion(new ArrayList<>()));
@@ -230,7 +232,8 @@ public class TestProtocolGeneric {
     params.add(new Field("record", record, null, null));
 
     Protocol.Message message =
-      protocol.createMessage("echo", null, Schema.createRecord(params),
+      protocol.createMessage("echo", null, new LinkedHashMap<String,String>(),
+                             Schema.createRecord(params),
                              record,
                              Schema.createUnion(new ArrayList<>()));
     protocol.getMessages().put("echo", message);
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
index 381071d..0e33627 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
@@ -242,6 +242,7 @@ public class TestProtocolSpecific {
     Protocol.Message message =
       protocol.createMessage("hello",
                              null /* doc */,
+                             new LinkedHashMap<String,String>(),
                              Schema.createRecord(fields),
                              Schema.create(Schema.Type.STRING),
                              Schema.createUnion(new ArrayList<>()));
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java b/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
index 525a8b1..f13a8b2 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
@@ -93,55 +93,55 @@ public class TestSchema {
 
   @Test
   public void testNull() throws Exception {
-    assertEquals(Schema.create(Type.NULL), Schema.parse("\"null\""));
-    assertEquals(Schema.create(Type.NULL), Schema.parse("{\"type\":\"null\"}"));
+    assertEquals(Schema.create(Type.NULL), new Schema.Parser().parse("\"null\""));
+    assertEquals(Schema.create(Type.NULL), new Schema.Parser().parse("{\"type\":\"null\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()), "\"null\"", "null", null);
   }
 
   @Test
   public void testBoolean() throws Exception {
-    assertEquals(Schema.create(Type.BOOLEAN), Schema.parse("\"boolean\""));
+    assertEquals(Schema.create(Type.BOOLEAN), new Schema.Parser().parse("\"boolean\""));
     assertEquals(Schema.create(Type.BOOLEAN),
-                 Schema.parse("{\"type\":\"boolean\"}"));
+      new Schema.Parser().parse("{\"type\":\"boolean\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"boolean\"", "true", Boolean.TRUE);
   }
 
   @Test
   public void testString() throws Exception {
-    assertEquals(Schema.create(Type.STRING), Schema.parse("\"string\""));
+    assertEquals(Schema.create(Type.STRING), new Schema.Parser().parse("\"string\""));
     assertEquals(Schema.create(Type.STRING),
-                 Schema.parse("{\"type\":\"string\"}"));
+      new Schema.Parser().parse("{\"type\":\"string\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"string\"", "\"foo\"", new Utf8("foo"));
   }
 
   @Test
   public void testBytes() throws Exception {
-    assertEquals(Schema.create(Type.BYTES), Schema.parse("\"bytes\""));
+    assertEquals(Schema.create(Type.BYTES), new Schema.Parser().parse("\"bytes\""));
     assertEquals(Schema.create(Type.BYTES),
-                 Schema.parse("{\"type\":\"bytes\"}"));
+      new Schema.Parser().parse("{\"type\":\"bytes\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"bytes\"", "\"\\u0000ABC\\u00FF\"",
           ByteBuffer.wrap(new byte[]{0,65,66,67,-1}));
   }
 
   @Test
   public void testInt() throws Exception {
-    assertEquals(Schema.create(Type.INT), Schema.parse("\"int\""));
-    assertEquals(Schema.create(Type.INT), Schema.parse("{\"type\":\"int\"}"));
+    assertEquals(Schema.create(Type.INT), new Schema.Parser().parse("\"int\""));
+    assertEquals(Schema.create(Type.INT), new Schema.Parser().parse("{\"type\":\"int\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"int\"", "9", 9);
   }
 
   @Test
   public void testLong() throws Exception {
-    assertEquals(Schema.create(Type.LONG), Schema.parse("\"long\""));
-    assertEquals(Schema.create(Type.LONG), Schema.parse("{\"type\":\"long\"}"));
+    assertEquals(Schema.create(Type.LONG), new Schema.Parser().parse("\"long\""));
+    assertEquals(Schema.create(Type.LONG), new Schema.Parser().parse("{\"type\":\"long\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"long\"", "11", 11L);
   }
 
   @Test
   public void testFloat() throws Exception {
-    assertEquals(Schema.create(Type.FLOAT), Schema.parse("\"float\""));
+    assertEquals(Schema.create(Type.FLOAT), new Schema.Parser().parse("\"float\""));
     assertEquals(Schema.create(Type.FLOAT),
-                 Schema.parse("{\"type\":\"float\"}"));
+      new Schema.Parser().parse("{\"type\":\"float\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"float\"", "1.1", 1.1f);
     checkDefault("\"float\"", "\"NaN\"", Float.NaN);
     checkDefault("\"float\"", "\"Infinity\"", Float.POSITIVE_INFINITY);
@@ -150,9 +150,9 @@ public class TestSchema {
 
   @Test
   public void testDouble() throws Exception {
-    assertEquals(Schema.create(Type.DOUBLE), Schema.parse("\"double\""));
+    assertEquals(Schema.create(Type.DOUBLE), new Schema.Parser().parse("\"double\""));
     assertEquals(Schema.create(Type.DOUBLE),
-                 Schema.parse("{\"type\":\"double\"}"));
+      new Schema.Parser().parse("{\"type\":\"double\"}"));
     check(new File(DIR.getRoot(), name.getMethodName()),"\"double\"", "1.2", 1.2);
     checkDefault("\"double\"", "\"NaN\"", Double.NaN);
     checkDefault("\"double\"", "\"Infinity\"", Double.POSITIVE_INFINITY);
@@ -162,7 +162,7 @@ public class TestSchema {
   @Test
   public void testArray() throws Exception {
     String json = "{\"type\":\"array\", \"items\": \"long\"}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     Collection<Long> array = new GenericData.Array<>(1, schema);
     array.add(1L);
     check(new File(DIR.getRoot(), name.getMethodName()),json, "[1]", array);
@@ -196,7 +196,7 @@ public class TestSchema {
   public void testRecord() throws Exception {
     String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
       +"[{\"name\":\"f\", \"type\":\"long\", \"foo\":\"bar\"}]}";
-    Schema schema = Schema.parse(recordJson);
+    Schema schema = new Schema.Parser().parse(recordJson);
 
     GenericData.Record record = new GenericData.Record(schema);
     record.put("f", 11L);
@@ -204,7 +204,7 @@ public class TestSchema {
 
     // test field props
     assertEquals("bar", schema.getField("f").getProp("foo"));
-    assertEquals("bar", Schema.parse(schema.toString())
+    assertEquals("bar", new Schema.Parser().parse(schema.toString())
                  .getField("f").getProp("foo"));
     schema.getField("f").addProp("baz", "boo");
     assertEquals("boo", schema.getField("f").getProp("baz"));
@@ -229,16 +229,16 @@ public class TestSchema {
   }
 
   @Test public void testInvalidNameTolerance() {
-    Schema.parse("{\"type\":\"record\",\"name\":\"1X\",\"fields\":[]}", false);
-    Schema.parse("{\"type\":\"record\",\"name\":\"X-\",\"fields\":[]}", false);
-    Schema.parse("{\"type\":\"record\",\"name\":\"X$\",\"fields\":[]}", false);
+    new Schema.Parser().setValidate(false).parse("{\"type\":\"record\",\"name\":\"1X\",\"fields\":[]}");
+    new Schema.Parser().setValidate(false).parse("{\"type\":\"record\",\"name\":\"X-\",\"fields\":[]}");
+    new Schema.Parser().setValidate(false).parse("{\"type\":\"record\",\"name\":\"X$\",\"fields\":[]}");
   }
 
   @Test
   public void testMapInRecord() throws Exception {
     String json = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
       +"[{\"name\":\"f\", \"type\": {\"type\":\"map\", \"values\":\"long\"}}]}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
 
     HashMap<Utf8,Long> map = new HashMap<>();
     map.put(new Utf8("a"), 1L);
@@ -251,7 +251,7 @@ public class TestSchema {
   @Test
   public void testEnum() throws Exception {
     check(new File(DIR.getRoot(), name.getMethodName()),BASIC_ENUM_SCHEMA, "\"B\"",
-          new GenericData.EnumSymbol(Schema.parse(BASIC_ENUM_SCHEMA), "B"),
+          new GenericData.EnumSymbol(new Schema.Parser().parse(BASIC_ENUM_SCHEMA), "B"),
           false);
     checkParseError("{\"type\":\"enum\"}");        // symbols required
     checkParseError("{\"type\":\"enum\",\"symbols\": [\"X\"]}"); // name reqd
@@ -266,7 +266,7 @@ public class TestSchema {
   @Test
   public void testFixed() throws Exception {
     String json = "{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}";
-    Schema schema = Schema.parse(json);
+    Schema schema = new Schema.Parser().parse(json);
     check(new File(DIR.getRoot(), name.getMethodName()),json, "\"a\"",
           new GenericData.Fixed(schema, new byte[]{(byte)'a'}), false);
     checkParseError("{\"type\":\"fixed\"}");        // size required
@@ -286,8 +286,8 @@ public class TestSchema {
   public void testRecursiveEquals() throws Exception {
     String jsonSchema = "{\"type\":\"record\", \"name\":\"List\", \"fields\": ["
       +"{\"name\":\"next\", \"type\":\"List\"}]}";
-    Schema s1 = Schema.parse(jsonSchema);
-    Schema s2 = Schema.parse(jsonSchema);
+    Schema s1 = new Schema.Parser().parse(jsonSchema);
+    Schema s2 = new Schema.Parser().parse(jsonSchema);
     assertEquals(s1, s2);
     s1.hashCode();                                // test no stackoverflow
   }
@@ -312,7 +312,7 @@ public class TestSchema {
       // check that equals and hashcode are correct and complete in a
       // reasonable amount of time
       for (Schema s1 : recs) {
-        Schema s2 = Schema.parse(s1.toString());
+        Schema s2 = new Schema.Parser().parse(s1.toString());
         assertEquals(s1.hashCode(), s2.hashCode());
         assertEquals(s1, s2);
       }
@@ -354,18 +354,18 @@ public class TestSchema {
     String record = "{\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}";
     String fixed = "{\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}";
     String enu = "{\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
-    Schema union = Schema.parse("[\"null\",\"string\","
+    Schema union = new Schema.Parser().parse("[\"null\",\"string\","
                                 +record+","+ enu+","+fixed+"]");
     checkJson(union, null, "null");
     checkJson(union, new Utf8("foo"), "{\"string\":\"foo\"}");
     checkJson(union,
-              new GenericData.Record(Schema.parse(record)),
+              new GenericData.Record(new Schema.Parser().parse(record)),
               "{\"Foo\":{}}");
     checkJson(union,
-              new GenericData.Fixed(Schema.parse(fixed), new byte[]{(byte)'a'}),
+              new GenericData.Fixed(new Schema.Parser().parse(fixed), new byte[]{(byte)'a'}),
               "{\"Bar\":\"a\"}");
     checkJson(union,
-              new GenericData.EnumSymbol(Schema.parse(enu), "X"),
+              new GenericData.EnumSymbol(new Schema.Parser().parse(enu), "X"),
               "{\"Baz\":\"X\"}");
   }
 
@@ -406,20 +406,20 @@ public class TestSchema {
     // succeed with two branches of the same named type, if different names
     Schema u;
     u = buildUnion(new Schema[] {
-        Schema.parse("{\"type\":\"record\",\"name\":\"x.A\",\"fields\":[]}"),
-        Schema.parse("{\"type\":\"record\",\"name\":\"y.A\",\"fields\":[]}")});
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"x.A\",\"fields\":[]}"),
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"y.A\",\"fields\":[]}")});
     check(new File(DIR.getRoot(), name.getMethodName()),u.toString(), false);
 
     u = buildUnion(new Schema[] {
-        Schema.parse
+      new Schema.Parser().parse
         ("{\"type\":\"enum\",\"name\":\"x.A\",\"symbols\":[\"X\"]}"),
-        Schema.parse
+      new Schema.Parser().parse
         ("{\"type\":\"enum\",\"name\":\"y.A\",\"symbols\":[\"Y\"]}")});
     check(new File(DIR.getRoot(), name.getMethodName()),u.toString(), false);
 
     u = buildUnion(new Schema[] {
-        Schema.parse("{\"type\":\"fixed\",\"name\":\"x.A\",\"size\":4}"),
-        Schema.parse("{\"type\":\"fixed\",\"name\":\"y.A\",\"size\":8}")});
+      new Schema.Parser().parse("{\"type\":\"fixed\",\"name\":\"x.A\",\"size\":4}"),
+      new Schema.Parser().parse("{\"type\":\"fixed\",\"name\":\"y.A\",\"size\":8}")});
     check(new File(DIR.getRoot(), name.getMethodName()),u.toString(), false);
 
     // fail with two branches of the same named type, but same names
@@ -438,21 +438,21 @@ public class TestSchema {
   @Test
   public void testComplexProp() {
     String json = "{\"type\":\"null\", \"foo\": [0]}";
-    Schema s = Schema.parse(json);
+    Schema s = new Schema.Parser().parse(json);
     assertNull(s.getProp("foo"));
   }
 
   @Test public void testPropOrdering() {
     String json = "{\"type\":\"int\",\"z\":\"c\",\"yy\":\"b\",\"x\":\"a\"}";
-    Schema s = Schema.parse(json);
+    Schema s = new Schema.Parser().parse(json);
     assertEquals(json, s.toString());
   }
 
   @Test
   public void testParseInputStream() throws IOException {
-    Schema s = Schema.parse(
+    Schema s = new Schema.Parser().parse(
         new ByteArrayInputStream("\"boolean\"".getBytes(StandardCharsets.UTF_8)));
-    assertEquals(Schema.parse("\"boolean\""), s);
+    assertEquals(new Schema.Parser().parse("\"boolean\""), s);
   }
 
   @Test
@@ -464,7 +464,7 @@ public class TestSchema {
       +"{\"name\":\"f\",\"type\":"+y+"},"
       +"{\"name\":\"g\",\"type\":"+z+"}"
       +"]}";
-    Schema xs = Schema.parse(x);
+    Schema xs = new Schema.Parser().parse(x);
     Schema ys = xs.getField("f").schema();
     assertEquals("p.Z", xs.getField("g").schema().getFullName());
     assertEquals("q.Z", ys.getField("f").schema().getFullName());
@@ -477,68 +477,68 @@ public class TestSchema {
     String x = "{\"type\":\"record\",\"name\":\"x.X\",\"fields\":["
       +"{\"name\":\"f\",\"type\":"+y+"}"
       +"]}";
-    Schema xs = Schema.parse(x);
-    assertEquals(xs, Schema.parse(xs.toString()));
+    Schema xs = new Schema.Parser().parse(x);
+    assertEquals(xs, new Schema.Parser().parse(xs.toString()));
   }
 
   @Test
   public void testNestedNullNamespace() {
     Schema inner =
-      Schema.parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
     Schema outer = Schema.createRecord("Outer", null, "space", false);
     outer.setFields(Arrays.asList(new Field("f", inner, null, null)));
-    assertEquals(outer, Schema.parse(outer.toString()));
+    assertEquals(outer, new Schema.Parser().parse(outer.toString()));
   }
 
   @Test
   public void testNestedNullNamespaceReferencing() {
     Schema inner =
-        Schema.parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
     Schema outer = Schema.createRecord("Outer", null, "space", false);
     outer.setFields(Arrays.asList(new Field("f1", inner, null, null),
                                   new Field("f2", inner, null, null)));
-    assertEquals(outer, Schema.parse(outer.toString()));
+    assertEquals(outer, new Schema.Parser().parse(outer.toString()));
   }
 
   @Test
   public void testNestedNullNamespaceReferencingWithUnion() {
     Schema inner =
-        Schema.parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
     Schema innerUnion = Schema.createUnion(Arrays.asList(inner, Schema.create(Type.NULL)));
     Schema outer = Schema.createRecord("Outer", null, "space", false);
     outer.setFields(Arrays.asList(new Field("f1", innerUnion, null, null),
                                   new Field("f2", innerUnion, null, null)));
-    assertEquals(outer, Schema.parse(outer.toString()));
+    assertEquals(outer, new Schema.Parser().parse(outer.toString()));
   }
 
   @Test
   public void testNestedNonNullNamespace1() {
     Schema inner1 = Schema.createEnum("InnerEnum", null, "space", Arrays.asList("x"));
-    Schema inner2 = Schema.parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":"
+    Schema inner2 = new Schema.Parser().parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":"
       +"\"InnerRecord\",\"fields\":[]}");
     Schema nullOuter = Schema.createRecord("Outer", null, null, false);
     nullOuter.setFields(Arrays.asList(new Field("f1", inner1, null, null),
                                       new Field("f2", inner2, null, null)));
-    assertEquals(nullOuter, Schema.parse(nullOuter.toString()));
+    assertEquals(nullOuter, new Schema.Parser().parse(nullOuter.toString()));
   }
 
   @Test
   public void testNestedNonNullNamespace2() {
     Schema inner1 = Schema.createFixed("InnerFixed", null, "space", 1);
-    Schema inner2 = Schema.parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":"
+    Schema inner2 = new Schema.Parser().parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":"
       +"\"InnerRecord\",\"fields\":[]}");
     Schema nullOuter = Schema.createRecord("Outer", null, null, false);
     nullOuter.setFields(Arrays.asList(new Field("f1", inner1, null, null),
                                       new Field("f2", inner2, null, null)));
-    assertEquals(nullOuter, Schema.parse(nullOuter.toString()));
+    assertEquals(nullOuter, new Schema.Parser().parse(nullOuter.toString()));
   }
 
   @Test
   public void testNullNamespaceAlias() {
     Schema s =
-      Schema.parse("{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}");
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}");
     Schema t =
-      Schema.parse("{\"type\":\"record\",\"name\":\"x.Y\",\"aliases\":[\".Z\"],"
+      new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"x.Y\",\"aliases\":[\".Z\"],"
                    +"\"fields\":[]}");
     Schema u = Schema.applyAliases(s, t);
     assertEquals("x.Y", u.getFullName());
@@ -548,7 +548,7 @@ public class TestSchema {
   public void testNullPointer() throws Exception {
     String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
       +"[{\"name\":\"x\", \"type\":\"string\"}]}";
-    Schema schema = Schema.parse(recordJson);
+    Schema schema = new Schema.Parser().parse(recordJson);
     GenericData.Record record = new GenericData.Record(schema);
     try {
       checkBinary(schema, record,
@@ -561,7 +561,7 @@ public class TestSchema {
 
   private static void checkParseError(String json) {
     try {
-      Schema.parse(json);
+      new Schema.Parser().parse(json);
     } catch (SchemaParseException e) {
       return;
     }
@@ -588,7 +588,7 @@ public class TestSchema {
    */
   @Test
   public void testDocs() {
-    Schema schema = Schema.parse(SCHEMA_WITH_DOC_TAGS);
+    Schema schema = new Schema.Parser().parse(SCHEMA_WITH_DOC_TAGS);
     assertEquals("This is not a world record.", schema.getDoc());
     assertEquals("Inner Fixed", schema.getField("inner_fixed").doc());
     assertEquals("Very Inner Fixed", schema.getField("inner_fixed").schema().getDoc());
@@ -604,11 +604,11 @@ public class TestSchema {
       "{\"name\": \"f\", \"type\": \"int\", \"doc\": \"test\"}]}";
 
     // check field doc is parsed correctly
-    Schema schema = Schema.parse(schemaStr);
+    Schema schema = new Schema.Parser().parse(schemaStr);
     assertEquals("test", schema.getField("f").doc());
 
     // check print/read cycle preserves field doc
-    schema = Schema.parse(schema.toString());
+    schema = new Schema.Parser().parse(schema.toString());
     assertEquals("test", schema.getField("f").doc());
   }
 
@@ -620,8 +620,8 @@ public class TestSchema {
     String t2 = "{\"type\":\"record\",\"name\":\"x.y\",\"aliases\":[\"a.b\"],"
       +"\"fields\":[{\"name\":\"g\",\"type\":\"long\",\"aliases\":[\"f\"]},"
       +"{\"name\":\"h\",\"type\":\"int\"}]}";
-    Schema s1 = Schema.parse(t1);
-    Schema s2 = Schema.parse(t2);
+    Schema s1 = new Schema.Parser().parse(t1);
+    Schema s2 = new Schema.Parser().parse(t2);
 
     assertEquals(s1.getAliases(), Collections.emptySet());
     assertEquals(s1.getField("f").aliases(), Collections.emptySet());
@@ -636,8 +636,8 @@ public class TestSchema {
       +"\"symbols\":[\"x\"]}";
     t2 = "{\"type\":\"enum\",\"name\":\"a.c\",\"aliases\":[\"b\"],"
       +"\"symbols\":[\"x\"]}";
-    s1 = Schema.parse(t1);
-    s2 = Schema.parse(t2);
+    s1 = new Schema.Parser().parse(t1);
+    s2 = new Schema.Parser().parse(t2);
     s3 = Schema.applyAliases(s1,s2);
     assertNotSame(s2, s3);
     assertEquals(s2, s3);
@@ -646,8 +646,8 @@ public class TestSchema {
       +"\"size\": 5}";
     t2 = "{\"type\":\"fixed\",\"name\":\"b\",\"aliases\":[\"a\"],"
       +"\"size\": 5}";
-    s1 = Schema.parse(t1);
-    s2 = Schema.parse(t2);
+    s1 = new Schema.Parser().parse(t1);
+    s2 = new Schema.Parser().parse(t2);
     s3 = Schema.applyAliases(s1,s2);
     assertNotSame(s2, s3);
     assertEquals(s2, s3);
@@ -663,7 +663,7 @@ public class TestSchema {
   }
 
   private static void check(File dst, String jsonSchema, boolean induce) throws Exception {
-    Schema schema = Schema.parse(jsonSchema);
+    Schema schema = new Schema.Parser().parse(jsonSchema);
     checkProp(schema);
     Object reuse = null;
     for (Object datum : new RandomData(schema, COUNT, true)) {
@@ -703,11 +703,11 @@ public class TestSchema {
   private static void checkProp(Schema s0) throws Exception {
     if(s0.getType().equals(Schema.Type.UNION)) return; // unions have no props
     assertNull(s0.getProp("foo"));
-    Schema s1 = Schema.parse(s0.toString());
+    Schema s1 = new Schema.Parser().parse(s0.toString());
     s1.addProp("foo", "bar");
     assertEquals("bar", s1.getProp("foo"));
     assertNotEquals(s0, s1);
-    Schema s2 = Schema.parse(s1.toString());
+    Schema s2 = new Schema.Parser().parse(s1.toString());
     assertEquals("bar", s2.getProp("foo"));
     assertEquals(s1, s2);
     assertNotEquals(s0, s2);
@@ -840,7 +840,7 @@ public class TestSchema {
   }
 
   private static final Schema ACTUAL =            // an empty record schema
-    Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[]}");
+    new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[]}");
 
   private static void checkDefault(String schemaJson, String defaultJson,
                                    Object defaultValue) throws Exception {
@@ -848,13 +848,13 @@ public class TestSchema {
       "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", "
     +"\"type\":"+schemaJson+", "
     +"\"default\":"+defaultJson+"}]}";
-    Schema expected = Schema.parse(recordJson);
+    Schema expected = new Schema.Parser().parse(recordJson);
     DatumReader<Object> in = new GenericDatumReader<>(ACTUAL, expected);
     GenericData.Record record = (GenericData.Record)
       in.read(null, DecoderFactory.get().binaryDecoder(
           new byte[0], null));
     assertEquals("Wrong default.", defaultValue, record.get("f"));
-    assertEquals("Wrong toString", expected, Schema.parse(expected.toString()));
+    assertEquals("Wrong toString", expected, new Schema.Parser().parse(expected.toString()));
   }
 
   private static void checkValidateDefaults(String schemaJson, String defaultJson) {
@@ -872,7 +872,7 @@ public class TestSchema {
   @Test(expected=AvroTypeException.class)
   public void testNoDefaultField() throws Exception {
     Schema expected =
-      Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":"+
+      new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":"+
                    "[{\"name\":\"f\", \"type\": \"string\"}]}");
     DatumReader<Object> in = new GenericDatumReader<>(ACTUAL, expected);
     in.read(null, DecoderFactory.get().binaryDecoder(
@@ -881,9 +881,9 @@ public class TestSchema {
 
   @Test
   public void testEnumMismatch() throws Exception {
-    Schema actual = Schema.parse
+    Schema actual = new Schema.Parser().parse
       ("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"X\",\"Y\"]}");
-    Schema expected = Schema.parse
+    Schema expected = new Schema.Parser().parse
       ("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"Y\",\"Z\"]}");
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     DatumWriter<Object> writer = new GenericDatumWriter<>(actual);
@@ -907,16 +907,16 @@ public class TestSchema {
 
   @Test(expected=AvroTypeException.class)
   public void testRecordWithPrimitiveName() {
-    Schema.parse("{\"type\":\"record\", \"name\":\"string\", \"fields\": []}");
+    new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"string\", \"fields\": []}");
   }
 
   @Test(expected=AvroTypeException.class)
   public void testEnumWithPrimitiveName() {
-    Schema.parse("{\"type\":\"enum\", \"name\":\"null\", \"symbols\": [\"A\"]}");
+    new Schema.Parser().parse("{\"type\":\"enum\", \"name\":\"null\", \"symbols\": [\"A\"]}");
   }
 
   private static Schema enumSchema() {
-    return Schema.parse("{ \"type\": \"enum\", \"name\": \"e\", "
+    return new Schema.Parser().parse("{ \"type\": \"enum\", \"name\": \"e\", "
         + "\"symbols\": [\"a\", \"b\"]}");
   }
 
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java b/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
index a39ab27..eebd3db 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
@@ -99,12 +99,12 @@ public class TestSpecificCompiler {
 
   @Test
   public void testPrimitiveSchemaGeneratesNothing() {
-    assertEquals(0, new SpecificCompiler(Schema.parse("\"double\"")).compile().size());
+    assertEquals(0, new SpecificCompiler(new Schema.Parser().parse("\"double\"")).compile().size());
   }
 
   @Test
   public void testSimpleEnumSchema() throws IOException {
-    Collection<OutputFile> outputs = new SpecificCompiler(Schema.parse(TestSchema.BASIC_ENUM_SCHEMA)).compile();
+    Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(TestSchema.BASIC_ENUM_SCHEMA)).compile();
     assertEquals(1, outputs.size());
     OutputFile o = outputs.iterator().next();
     assertEquals(o.path, "Test.java");
@@ -148,7 +148,7 @@ public class TestSpecificCompiler {
 
   @Test
   public void testManglingForRecords() throws IOException {
-    Collection<OutputFile> outputs = new SpecificCompiler(Schema.parse(SCHEMA)).compile();
+    Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(SCHEMA)).compile();
     assertEquals(1, outputs.size());
     String contents = outputs.iterator().next().contents;
 
@@ -165,7 +165,7 @@ public class TestSpecificCompiler {
       "{ \"name\": \"instanceof\", \"type\": \"enum\"," +
       "  \"symbols\": [\"new\", \"super\", \"switch\"] }";
     Collection<OutputFile> outputs =
-      new SpecificCompiler(Schema.parse(enumSchema)).compile();
+      new SpecificCompiler(new Schema.Parser().parse(enumSchema)).compile();
     assertEquals(1, outputs.size());
     String contents = outputs.iterator().next().contents;
 
@@ -176,7 +176,7 @@ public class TestSpecificCompiler {
 
   @Test
   public void testSchemaSplit() throws IOException {
-    SpecificCompiler compiler = new SpecificCompiler(Schema.parse(SCHEMA));
+    SpecificCompiler compiler = new SpecificCompiler(new Schema.Parser().parse(SCHEMA));
     compiler.maxStringChars = 10;
     Collection<OutputFile> files = compiler.compile();
     assertCompilesWithJavaCompiler(new File(INPUT_DIR.getRoot(), name.getMethodName()), files);
@@ -193,7 +193,7 @@ public class TestSpecificCompiler {
   @Test
   public void testSchemaWithDocs() {
     Collection<OutputFile> outputs = new SpecificCompiler(
-        Schema.parse(TestSchema.SCHEMA_WITH_DOC_TAGS)).compile();
+      new Schema.Parser().parse(TestSchema.SCHEMA_WITH_DOC_TAGS)).compile();
     assertEquals(3, outputs.size());
     int count = 0;
     for (OutputFile o : outputs) {
@@ -684,7 +684,7 @@ public class TestSpecificCompiler {
 
   @Test
   public void testAliases() throws IOException {
-    Schema s = Schema.parse
+    Schema s = new Schema.Parser().parse
       ("{\"name\":\"X\",\"type\":\"record\",\"aliases\":[\"Y\"],\"fields\":["
        +"{\"name\":\"f\",\"type\":\"int\",\"aliases\":[\"g\"]}]}");
     SpecificCompiler compiler = new SpecificCompiler(s);
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/message/TestCustomSchemaStore.java b/lang/java/ipc/src/test/java/org/apache/avro/message/TestCustomSchemaStore.java
index 5de9d6a..587d1c3 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/message/TestCustomSchemaStore.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/message/TestCustomSchemaStore.java
@@ -62,7 +62,7 @@ public class TestCustomSchemaStore {
     // Should work
     assertEquals(nestedEvolve1.getRootName(), "RootName");
     assertEquals(nestedEvolve1.getNested().getName(), "Name");
-    assertEquals(nestedEvolve1.getNested().getValue(), 1);
+    assertEquals((long) nestedEvolve1.getNested().getValue(), 1);
   }
 
   @Test(expected = MissingSchemaException.class)
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
index fca1fbe..67e29da 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
@@ -130,7 +130,7 @@ public class TestSpecificData {
 
     String json = foo.toString();
     JsonFactory factory = new JsonFactory();
-    JsonParser parser = factory.createJsonParser(json);
+    JsonParser parser = factory.createParser(json);
     ObjectMapper mapper = new ObjectMapper();
 
     // will throw exception if string is not parsable json
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
index 4d19b7e..af0082f 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
@@ -47,7 +47,7 @@ public class TestSpecificRecordBuilder {
     Assert.assertTrue(builder.hasName());
     Assert.assertEquals("James Gosling", builder.getName());
     Assert.assertTrue(builder.hasYearOfBirth());
-    Assert.assertEquals(1955, builder.getYearOfBirth());
+    Assert.assertEquals(1955, (int) builder.getYearOfBirth());
     Assert.assertFalse(builder.hasCountry());
     Assert.assertNull(builder.getCountry());
     Assert.assertTrue(builder.hasState());
@@ -59,7 +59,7 @@ public class TestSpecificRecordBuilder {
 
     Person person = builder.build();
     Assert.assertEquals("James Gosling", person.getName());
-    Assert.assertEquals(1955, person.getYearOfBirth());
+    Assert.assertEquals(1955, (int) person.getYearOfBirth());
     Assert.assertEquals("US", person.getCountry());  // country should default to "US"
     Assert.assertEquals("CA", person.getState());
     Assert.assertNotNull(person.getFriends());  // friends should default to an empty list
@@ -75,7 +75,7 @@ public class TestSpecificRecordBuilder {
 
     Person.Builder builderCopy = Person.newBuilder(person);
     Assert.assertEquals("James Gosling", builderCopy.getName());
-    Assert.assertEquals(1955, builderCopy.getYearOfBirth());
+    Assert.assertEquals(1955, (int) builderCopy.getYearOfBirth());
     Assert.assertEquals("US", builderCopy.getCountry());  // country should default to "US"
     Assert.assertEquals("CA", builderCopy.getState());
     Assert.assertNotNull(builderCopy.getFriends());  // friends should default to an empty list
@@ -102,13 +102,13 @@ public class TestSpecificRecordBuilder {
           .setProduct(product)
           .build())
       .build();
-    Assert.assertEquals(datetime, p.getDatetime());
+    Assert.assertEquals(datetime, (long) p.getDatetime());
     Assert.assertEquals(ProductPage.class, p.getPageContext().getClass());
     Assert.assertEquals(product, ((ProductPage)p.getPageContext()).getProduct());
 
     PageView p2 = PageView.newBuilder(p).build();
 
-    Assert.assertEquals(datetime, p2.getDatetime());
+    Assert.assertEquals(datetime, (long) p2.getDatetime());
     Assert.assertEquals(ProductPage.class, p2.getPageContext().getClass());
     Assert.assertEquals(product, ((ProductPage)p2.getPageContext()).getProduct());
 
diff --git a/lang/java/mapred/pom.xml b/lang/java/mapred/pom.xml
index 9d454fa..12de5dc 100644
--- a/lang/java/mapred/pom.xml
+++ b/lang/java/mapred/pom.xml
@@ -60,6 +60,28 @@
           </execution>
         </executions>
       </plugin>
+      <!-- Allow guava because hadoop brings it as a transitive dependency. -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce-banned-dependencies</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <bannedDependencies>
+                  <includes>
+                    <exclude>com.google.guava:guava</exclude>
+                  </includes>
+                </bannedDependencies>
+              </rules>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
index 0c3be03..a819935 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
@@ -284,6 +284,7 @@ public class SortedKeyValueFile {
      *
      * @return An iterator.
      */
+    @Override
     public Iterator<AvroKeyValue<K, V>> iterator() {
       return new AvroKeyValue.Iterator<>(mDataFileReader.iterator());
     }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
index 83438f8..38f4c7e 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
@@ -679,7 +679,7 @@ public class AvroSequenceFile {
           LOG.debug("Using key writer schema from SequenceFile metadata: "
               + keySchemaText.toString());
           AvroSerialization.setKeyWriterSchema(
-              confWithAvro, Schema.parse(keySchemaText.toString()));
+              confWithAvro, new Schema.Parser().parse(keySchemaText.toString()));
           if (null != mKeyReaderSchema) {
             AvroSerialization.setKeyReaderSchema(confWithAvro, mKeyReaderSchema);
           }
@@ -691,7 +691,7 @@ public class AvroSequenceFile {
           LOG.debug("Using value writer schema from SequenceFile metadata: "
               + valueSchemaText.toString());
           AvroSerialization.setValueWriterSchema(
-              confWithAvro, Schema.parse(valueSchemaText.toString()));
+              confWithAvro, new Schema.Parser().parse(valueSchemaText.toString()));
           if (null != mValueReaderSchema) {
             AvroSerialization.setValueReaderSchema(confWithAvro, mValueReaderSchema);
           }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java
index 1e7d2fb..4061e44 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java
@@ -194,7 +194,7 @@ public class AvroSerialization<T> extends Configured implements Serialization<Av
    */
   public static Schema getKeyWriterSchema(Configuration conf) {
     String json = conf.get(CONF_KEY_WRITER_SCHEMA);
-    return null == json ? null : Schema.parse(json);
+    return null == json ? null : new Schema.Parser().parse(json);
   }
 
   /**
@@ -205,7 +205,7 @@ public class AvroSerialization<T> extends Configured implements Serialization<Av
    */
   public static Schema getKeyReaderSchema(Configuration conf) {
     String json = conf.get(CONF_KEY_READER_SCHEMA);
-    return null == json ? null : Schema.parse(json);
+    return null == json ? null : new Schema.Parser().parse(json);
   }
 
   /**
@@ -216,7 +216,7 @@ public class AvroSerialization<T> extends Configured implements Serialization<Av
    */
   public static Schema getValueWriterSchema(Configuration conf) {
     String json = conf.get(CONF_VALUE_WRITER_SCHEMA);
-    return null == json ? null : Schema.parse(json);
+    return null == json ? null : new Schema.Parser().parse(json);
   }
 
   /**
@@ -227,7 +227,7 @@ public class AvroSerialization<T> extends Configured implements Serialization<Av
    */
   public static Schema getValueReaderSchema(Configuration conf) {
     String json = conf.get(CONF_VALUE_READER_SCHEMA);
-    return null == json ? null : Schema.parse(json);
+    return null == json ? null : new Schema.Parser().parse(json);
   }
 
   /**
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java
index 45d8309..c175aa2 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.io.serializer.Serializer;
 public class AvroSerializer<T> implements Serializer<AvroWrapper<T>> {
 
   /** An factory for creating Avro datum encoders. */
-  private static EncoderFactory mEncoderFactory = new EncoderFactory();
+  private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory();
 
   /** The writer schema for the data to serialize. */
   private final Schema mWriterSchema;
@@ -100,7 +100,7 @@ public class AvroSerializer<T> implements Serializer<AvroWrapper<T>> {
   @Override
   public void open(OutputStream outputStream) throws IOException {
     mOutputStream = outputStream;
-    mAvroEncoder = mEncoderFactory.binaryEncoder(outputStream, mAvroEncoder);
+    mAvroEncoder = ENCODER_FACTORY.binaryEncoder(outputStream, mAvroEncoder);
   }
 
   /** {@inheritDoc} */
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
index 49791f5..5c326ed 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
@@ -51,14 +51,17 @@ class AvroAsTextRecordReader<T> implements RecordReader<Text, Text> {
     this.end = split.getStart() + split.getLength();
   }
 
+  @Override
   public Text createKey() {
     return new Text();
   }
 
+  @Override
   public Text createValue() {
     return new Text();
   }
 
+  @Override
   public boolean next(Text key, Text ignore) throws IOException {
     if (!reader.hasNext() || reader.pastSync(end))
       return false;
@@ -81,6 +84,7 @@ class AvroAsTextRecordReader<T> implements RecordReader<Text, Text> {
     return true;
   }
 
+  @Override
   public float getProgress() throws IOException {
     if (end == start) {
       return 0.0f;
@@ -89,11 +93,11 @@ class AvroAsTextRecordReader<T> implements RecordReader<Text, Text> {
     }
   }
 
+  @Override
   public long getPos() throws IOException {
     return reader.tell();
   }
 
+  @Override
   public void close() throws IOException { reader.close(); }
-
-
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
index 9d23fd3..323ef35 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
@@ -71,7 +71,7 @@ public class AvroJob {
   /** Return a job's map input schema. */
   public static Schema getInputSchema(Configuration job) {
     String schemaString = job.get(INPUT_SCHEMA);
-    return schemaString != null ? Schema.parse(schemaString) : null;
+    return schemaString != null ? new Schema.Parser().parse(schemaString) : null;
   }
 
   /** Configure a job's map output schema.  The map output schema defaults to
@@ -84,7 +84,7 @@ public class AvroJob {
 
   /** Return a job's map output key schema. */
   public static Schema getMapOutputSchema(Configuration job) {
-    return Schema.parse(job.get(MAP_OUTPUT_SCHEMA, job.get(OUTPUT_SCHEMA)));
+    return new Schema.Parser().parse(job.get(MAP_OUTPUT_SCHEMA, job.get(OUTPUT_SCHEMA)));
   }
 
   /** Configure a job's output schema.  Unless this is a map-only job, this
@@ -140,7 +140,7 @@ public class AvroJob {
 
   /** Return a job's output key schema. */
   public static Schema getOutputSchema(Configuration job) {
-    return Schema.parse(job.get(OUTPUT_SCHEMA));
+    return new Schema.Parser().parse(job.get(OUTPUT_SCHEMA));
   }
 
   private static void configureAvroInput(JobConf job) {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java
index 05a30ed..6091128 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java
@@ -39,10 +39,12 @@ public class AvroKeyComparator<T>
       schema = Pair.getKeySchema(AvroJob.getMapOutputSchema(conf));
   }
 
+  @Override
   public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
     return BinaryData.compare(b1, s1, l1, b2, s2, l2, schema);
   }
 
+  @Override
   public int compare(AvroWrapper<T> x, AvroWrapper<T> y) {
     return ReflectData.get().compare(x.datum(), y.datum(), schema);
   }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java
index 3e0c761..ec830bf 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java
@@ -17,7 +17,8 @@
  */
 package org.apache.avro.mapred;
 
-import java.nio.charset.StandardCharsets;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.util.Base64;
 import java.util.Collections;
 import java.util.HashMap;
@@ -98,9 +99,9 @@ import org.apache.hadoop.mapred.JobConf;
  * </p>
  */
 public class AvroMultipleInputs {
-  private static String schemaKey =
+  private static final String SCHEMA_KEY =
       "avro.mapreduce.input.multipleinputs.dir.schemas";
-  private static String mappersKey =
+  private static final String MAPPERS_KEY =
       "avro.mapreduce.input.multipleinputs.dir.mappers";
   /**
    * Add a {@link Path} with a custom {@link Schema} to the list of
@@ -116,8 +117,8 @@ public class AvroMultipleInputs {
     String schemaMapping = path.toString() + ";"
        + toBase64(inputSchema.toString());
 
-    String schemas = conf.get(schemaKey);
-    conf.set(schemaKey,
+    String schemas = conf.get(SCHEMA_KEY);
+    conf.set(SCHEMA_KEY,
         schemas == null ? schemaMapping : schemas + ","
             + schemaMapping);
 
@@ -141,8 +142,8 @@ public class AvroMultipleInputs {
 
     String mapperMapping = path.toString() + ";" + mapperClass.getName();
     System.out.println(mapperMapping);
-    String mappers = conf.get(mappersKey);
-    conf.set(mappersKey, mappers == null ? mapperMapping
+    String mappers = conf.get(MAPPERS_KEY);
+    conf.set(MAPPERS_KEY, mappers == null ? mapperMapping
        : mappers + "," + mapperMapping);
 
     conf.setMapperClass(DelegatingMapper.class);
@@ -158,11 +159,11 @@ public class AvroMultipleInputs {
    */
   @SuppressWarnings("unchecked")
   static Map<Path, Class<? extends AvroMapper>> getMapperTypeMap(JobConf conf) {
-    if (conf.get(mappersKey) == null) {
+    if (conf.get(MAPPERS_KEY) == null) {
       return Collections.emptyMap();
     }
     Map<Path, Class<? extends AvroMapper>> m = new HashMap<>();
-    String[] pathMappings = conf.get(mappersKey).split(",");
+    String[] pathMappings = conf.get(MAPPERS_KEY).split(",");
     for (String pathMapping : pathMappings) {
       String[] split = pathMapping.split(";");
       Class<? extends AvroMapper> mapClass;
@@ -185,12 +186,12 @@ public class AvroMultipleInputs {
    * @return A map of paths to schemas for the job
    */
   static Map<Path, Schema> getInputSchemaMap(JobConf conf) {
-    if (conf.get(schemaKey) == null) {
+    if (conf.get(SCHEMA_KEY) == null) {
       return Collections.emptyMap();
     }
     Map<Path, Schema> m = new HashMap<>();
     String[] schemaMappings =
-        conf.get(schemaKey).split(",");
+        conf.get(SCHEMA_KEY).split(",");
     Schema.Parser schemaParser = new Schema.Parser();
     for (String schemaMapping : schemaMappings) {
       String[] split = schemaMapping.split(";");
@@ -207,15 +208,13 @@ public class AvroMultipleInputs {
   }
 
   private static String toBase64(String rawString) {
-    final byte[] buf = rawString.getBytes(StandardCharsets.UTF_8);
-    return new String(Base64.getMimeEncoder().encode(buf),
-        StandardCharsets.UTF_8);
+    final byte[] buf = rawString.getBytes(UTF_8);
+    return new String(Base64.getMimeEncoder().encode(buf), UTF_8);
   }
 
   private static String fromBase64(String base64String) {
-    final byte[] buf = base64String.getBytes(StandardCharsets.UTF_8);
-    return new String(Base64.getMimeDecoder().decode(buf),
-        StandardCharsets.UTF_8);
+    final byte[] buf = base64String.getBytes(UTF_8);
+    return new String(Base64.getMimeDecoder().decode(buf), UTF_8);
   }
 
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
index ce385f6..a230ae1 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
@@ -435,11 +435,13 @@ public class AvroMultipleOutputs {
     }
 
     @SuppressWarnings({"unchecked"})
+    @Override
     public void write(Object key, Object value) throws IOException {
       reporter.incrCounter(COUNTERS_GROUP, counterName, 1);
       writer.write(key, value);
     }
 
+    @Override
     public void close(Reporter reporter) throws IOException {
       writer.close(reporter);
     }
@@ -454,7 +456,7 @@ public class AvroMultipleOutputs {
    * @param datum       output data
    * @throws IOException thrown if output collector could not be created
    */
-  public void collect(String namedOutput, Reporter reporter,Object datum) throws IOException{
+  public void collect(String namedOutput, Reporter reporter, Object datum) throws IOException{
     getCollector(namedOutput,reporter).collect(datum);
   }
 
@@ -468,7 +470,7 @@ public class AvroMultipleOutputs {
    * @param schema      schema to use for this output
    * @throws IOException thrown if output collector could not be created
   */
-  public void collect(String namedOutput, Reporter reporter, Schema schema,Object datum) throws IOException{
+  public void collect(String namedOutput, Reporter reporter, Schema schema, Object datum) throws IOException{
     getCollector(namedOutput,reporter,schema).collect(datum);
   }
 
@@ -483,7 +485,7 @@ public class AvroMultipleOutputs {
    * @param schema      schema to use for this output
    * @throws IOException thrown if output collector could not be created
   */
-  public void collect(String namedOutput,Reporter reporter,Schema schema,Object datum,String baseOutputPath) throws IOException{
+  public void collect(String namedOutput, Reporter reporter, Schema schema, Object datum, String baseOutputPath) throws IOException{
     getCollector(namedOutput,null,reporter,baseOutputPath,schema).collect(datum);
   }
 
@@ -570,6 +572,7 @@ public class AvroMultipleOutputs {
     return new AvroCollector() {
 
       @SuppressWarnings({"unchecked"})
+      @Override
       public void collect(Object key) throws IOException{
        AvroWrapper wrapper = new AvroWrapper(key);
        writer.write(wrapper, NullWritable.get());
@@ -602,6 +605,7 @@ public class AvroMultipleOutputs {
    public static final String CONFIG_NAMED_OUTPUT = "mo.config.namedOutput";
 
    @SuppressWarnings({"unchecked", "deprecation"})
+   @Override
    public RecordWriter<Object, Object> getRecordWriter(FileSystem fs,JobConf job, String baseFileName, Progressable arg3) throws IOException {
    String nameOutput = job.get(CONFIG_NAMED_OUTPUT, null);
    String fileName = getUniqueName(job, baseFileName);
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
index 3714988..88e5735 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
@@ -163,10 +163,12 @@ public class AvroOutputFormat <T>
     writer.create(schema, path.getFileSystem(job).create(path));
 
     return new RecordWriter<AvroWrapper<T>, NullWritable>() {
+      @Override
         public void write(AvroWrapper<T> wrapper, NullWritable ignore)
           throws IOException {
           writer.append(wrapper.datum());
         }
+      @Override
         public void close(Reporter reporter) throws IOException {
           writer.close();
         }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
index 58325c0..313dec3 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
@@ -53,12 +53,15 @@ public class AvroRecordReader<T>
     this.end = split.getStart() + split.getLength();
   }
 
+  @Override
   public AvroWrapper<T> createKey() {
     return new AvroWrapper<>(null);
   }
 
+  @Override
   public NullWritable createValue() { return NullWritable.get(); }
 
+  @Override
   public boolean next(AvroWrapper<T> wrapper, NullWritable ignore)
     throws IOException {
     if (!reader.hasNext() || reader.pastSync(end))
@@ -67,6 +70,7 @@ public class AvroRecordReader<T>
     return true;
   }
 
+  @Override
   public float getProgress() throws IOException {
     if (end == start) {
       return 0.0f;
@@ -75,10 +79,12 @@ public class AvroRecordReader<T>
     }
   }
 
+  @Override
   public long getPos() throws IOException {
     return reader.tell();
   }
 
+  @Override
   public void close() throws IOException { reader.close(); }
 
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
index c4f3f8d..e1db8a1 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
@@ -41,12 +41,14 @@ import org.apache.avro.io.EncoderFactory;
 public class AvroSerialization<T> extends Configured
   implements Serialization<AvroWrapper<T>> {
 
+  @Override
   public boolean accept(Class<?> c) {
     return AvroWrapper.class.isAssignableFrom(c);
   }
 
   /** Returns the specified map output deserializer.  Defaults to the final
    * output deserializer if no map output schema was specified. */
+  @Override
   public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) {
     Configuration conf = getConf();
     boolean isKey = AvroKey.class.isAssignableFrom(c);
@@ -72,10 +74,12 @@ public class AvroSerialization<T> extends Configured
       this.isKey = isKey;
     }
 
+    @Override
     public void open(InputStream in) {
       this.decoder = FACTORY.directBinaryDecoder(in, decoder);
     }
 
+    @Override
     public AvroWrapper<T> deserialize(AvroWrapper<T> wrapper)
       throws IOException {
       T datum = reader.read(wrapper == null ? null : wrapper.datum(), decoder);
@@ -87,6 +91,7 @@ public class AvroSerialization<T> extends Configured
       return wrapper;
     }
 
+    @Override
     public void close() throws IOException {
       decoder.inputStream().close();
     }
@@ -94,6 +99,7 @@ public class AvroSerialization<T> extends Configured
   }
 
   /** Returns the specified output serializer. */
+  @Override
   public Serializer<AvroWrapper<T>> getSerializer(Class<AvroWrapper<T>> c) {
     // AvroWrapper used for final output, AvroKey or AvroValue for map output
     boolean isFinalOutput = c.equals(AvroWrapper.class);
@@ -117,11 +123,13 @@ public class AvroSerialization<T> extends Configured
       this.writer = writer;
     }
 
+    @Override
     public void open(OutputStream out) {
       this.out = out;
       this.encoder = new EncoderFactory().binaryEncoder(out, null);
     }
 
+    @Override
     public void serialize(AvroWrapper<T> wrapper) throws IOException {
       writer.write(wrapper.datum(), encoder);
       // would be a lot faster if the Serializer interface had a flush()
@@ -130,6 +138,7 @@ public class AvroSerialization<T> extends Configured
       encoder.flush();
     }
 
+    @Override
     public void close() throws IOException {
       out.close();
     }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
index f07905f..be974fc 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
@@ -74,11 +74,12 @@ public class AvroTextOutputFormat<K, V> extends FileOutputFormat<K, V> {
       this.keyValueSeparator = keyValueSeparator;
     }
 
+    @Override
     public void write(K key, V value) throws IOException {
       boolean nullKey = key == null || key instanceof NullWritable;
       boolean nullValue = value == null || value instanceof NullWritable;
       if (nullKey && nullValue) {
-        return;
+        // NO-OP
       } else if (!nullKey && nullValue) {
         writer.append(toByteBuffer(key));
       } else if (nullKey && !nullValue) {
@@ -88,6 +89,7 @@ public class AvroTextOutputFormat<K, V> extends FileOutputFormat<K, V> {
       }
     }
 
+    @Override
     public void close(Reporter reporter) throws IOException {
       writer.close();
     }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
index 3e49519..e46caaf 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
@@ -58,25 +58,29 @@ public class AvroUtf8InputFormat
       this.lineRecordReader = new LineRecordReader(job, split);
     }
 
+    @Override
     public void close() throws IOException {
       lineRecordReader.close();
     }
 
+    @Override
     public long getPos() throws IOException {
       return lineRecordReader.getPos();
     }
 
+    @Override
     public float getProgress() throws IOException {
       return lineRecordReader.getProgress();
     }
 
+    @Override
     public boolean next(AvroWrapper<Utf8> key, NullWritable value)
       throws IOException {
       boolean success = lineRecordReader.next(currentKeyHolder,
           currentValueHolder);
       if (success) {
         key.datum(new Utf8(currentValueHolder.getBytes())
-            .setLength(currentValueHolder.getLength()));
+            .setByteLength(currentValueHolder.getLength()));
       } else {
         key.datum(null);
       }
@@ -97,10 +101,12 @@ public class AvroUtf8InputFormat
 
   private CompressionCodecFactory compressionCodecs = null;
 
+  @Override
   public void configure(JobConf conf) {
     compressionCodecs = new CompressionCodecFactory(conf);
   }
 
+  @Override
   protected boolean isSplitable(FileSystem fs, Path file) {
     return compressionCodecs.getCodec(file) == null;
   }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
index 2d46677..9265052 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
@@ -34,10 +34,12 @@ public class AvroWrapper<T> {
   /** Set the wrapped datum. */
   public void datum(T datum) { this.datum = datum; }
 
+  @Override
   public int hashCode() {
     return (datum == null) ? 0 : datum.hashCode();
   }
 
+  @Override
   public boolean equals(Object obj) {
     if (this == obj)
       return true;
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java
index ade8ad5..5a431a5 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java
@@ -21,7 +21,6 @@ package org.apache.avro.mapred;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -34,6 +33,7 @@ import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.lib.MultipleInputs;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -43,6 +43,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  */
 class DelegatingInputFormat<K, V> implements InputFormat<K, V> {
 
+  @Override
   public InputSplit[] getSplits(JobConf conf, int numSplits) throws IOException {
 
     JobConf confCopy = new JobConf(conf);
@@ -58,7 +59,7 @@ class DelegatingInputFormat<K, V> implements InputFormat<K, V> {
     // First, build a map of Schemas to Paths
     for (Entry<Path, Schema> entry : schemaMap.entrySet()) {
       if (!schemaPaths.containsKey(entry.getValue())) {
-        schemaPaths.put(entry.getValue(), new LinkedList<>());
+        schemaPaths.put(entry.getValue(), new ArrayList<>());
         System.out.println(entry.getValue());
         System.out.println(entry.getKey());
       }
@@ -82,7 +83,7 @@ class DelegatingInputFormat<K, V> implements InputFormat<K, V> {
       for (Path path : paths) {
        Class<? extends AvroMapper> mapperClass = mapperMap.get(path);
        if (!mapperPaths.containsKey(mapperClass)) {
-         mapperPaths.put(mapperClass, new LinkedList<>());
+         mapperPaths.put(mapperClass, new ArrayList<>());
        }
 
        mapperPaths.get(mapperClass).add(path);
@@ -116,6 +117,7 @@ class DelegatingInputFormat<K, V> implements InputFormat<K, V> {
   }
 
   @SuppressWarnings("unchecked")
+  @Override
   public RecordReader<K, V> getRecordReader(InputSplit split, JobConf conf,
       Reporter reporter) throws IOException {
 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java
index 80599ce..ff1f31f 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java
@@ -44,6 +44,7 @@ implements Mapper<AvroWrapper<IN>,NullWritable,KO,VO>
   boolean isMapOnly;
   AvroCollector<OUT> out;
 
+  @Override
   public void configure(JobConf conf) {
     this.conf = conf;
     this.isMapOnly = conf.getNumReduceTasks() == 0;
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
index 35ceef8..c6d9040 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
@@ -46,6 +46,7 @@ class HadoopCombiner<K,V>
       this.collector = collector;
     }
 
+    @Override
     public void collect(Pair<K,V> datum) throws IOException {
       keyWrapper.datum(datum.key());              // split the Pair
       valueWrapper.datum(datum.value());
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java
index 6be9286..b1bb2b2 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java
@@ -45,6 +45,7 @@ class HadoopReducer<K,V,OUT>
       this.out = out;
     }
 
+    @Override
     public void collect(OUT datum) throws IOException {
       wrapper.datum(datum);
       out.collect(wrapper, NullWritable.get());
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
index 82bde1b..615cd1c 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
@@ -43,10 +43,10 @@ abstract class HadoopReducerBase<K,V,OUT,KO,VO> extends MapReduceBase
 
   class ReduceIterable implements Iterable<V>, Iterator<V> {
     private Iterator<AvroValue<V>> values;
-    public boolean hasNext() { return values.hasNext(); }
-    public V next() { return values.next().datum(); }
-    public void remove() { throw new UnsupportedOperationException(); }
-    public Iterator<V> iterator() { return this; }
+    @Override public boolean hasNext() { return values.hasNext(); }
+    @Override public V next() { return values.next().datum(); }
+    @Override public void remove() { throw new UnsupportedOperationException(); }
+    @Override public Iterator<V> iterator() { return this; }
   }
   private ReduceIterable reduceIterable = new ReduceIterable();
 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java
index 742bb17..91fb5b6 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java
@@ -36,6 +36,7 @@ class MapCollector<OUT,K,V,KO,VO> extends AvroCollector<OUT> {
     this.isMapOnly = isMapOnly;
   }
 
+  @Override
   public void collect(OUT datum) throws IOException {
     if (isMapOnly) {
       wrapper.datum(datum);
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
index d4f78b9..bb5d01d 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
@@ -201,37 +201,38 @@ public class SequenceFileReader<K,V> implements FileReader<Pair<K,V>> {
     WRITABLE_CONVERTERS.put
       (NullWritable.class,
        new Converter<Void>() {
-        public Void convert(Writable o) { return null; }
+         @Override public Void convert(Writable o) { return null; }
       });
     WRITABLE_CONVERTERS.put
       (BooleanWritable.class,
        new Converter<Boolean>() {
-        public Boolean convert(Writable o) {return ((BooleanWritable)o).get();}
+         @Override public Boolean convert(Writable o) {return ((BooleanWritable)o).get();}
       });
     WRITABLE_CONVERTERS.put
       (IntWritable.class,
        new Converter<Integer>() {
-        public Integer convert(Writable o) { return ((IntWritable)o).get(); }
+         @Override public Integer convert(Writable o) { return ((IntWritable)o).get(); }
       });
     WRITABLE_CONVERTERS.put
       (LongWritable.class,
        new Converter<Long>() {
-        public Long convert(Writable o) { return ((LongWritable)o).get(); }
+         @Override public Long convert(Writable o) { return ((LongWritable)o).get(); }
       });
     WRITABLE_CONVERTERS.put
       (FloatWritable.class,
        new Converter<Float>() {
-        public Float convert(Writable o) { return ((FloatWritable)o).get(); }
+        @Override  public Float convert(Writable o) { return ((FloatWritable)o).get(); }
       });
     WRITABLE_CONVERTERS.put
       (DoubleWritable.class,
        new Converter<Double>() {
-        public Double convert(Writable o) { return ((DoubleWritable)o).get(); }
+         @Override public Double convert(Writable o) { return ((DoubleWritable)o).get(); }
       });
     WRITABLE_CONVERTERS.put
       (BytesWritable.class,
        new Converter<ByteBuffer>() {
-        public ByteBuffer convert(Writable o) {
+         @Override
+         public ByteBuffer convert(Writable o) {
           BytesWritable b = (BytesWritable)o;
           return ByteBuffer.wrap(b.getBytes(), 0, b.getLength());
         }
@@ -239,9 +240,8 @@ public class SequenceFileReader<K,V> implements FileReader<Pair<K,V>> {
     WRITABLE_CONVERTERS.put
       (Text.class,
        new Converter<String>() {
-        public String convert(Writable o) { return o.toString(); }
+         @Override
+         public String convert(Writable o) { return o.toString(); }
       });
   }
-
-
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java
index fc01816..c8eecc2 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java
@@ -110,15 +110,18 @@ class TaggedInputSplit implements Configurable, InputSplit {
     return schema;
   }
 
+  @Override
   public long getLength() throws IOException {
     return inputSplit.getLength();
   }
 
+  @Override
   public String[] getLocations() throws IOException {
     return inputSplit.getLocations();
   }
 
   @SuppressWarnings("unchecked")
+  @Override
   public void readFields(DataInput in) throws IOException {
     inputSplitClass = (Class<? extends InputSplit>) readClass(in);
     inputSplit = (InputSplit) ReflectionUtils
@@ -139,6 +142,7 @@ class TaggedInputSplit implements Configurable, InputSplit {
     }
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     Text.writeString(out, inputSplitClass.getName());
     inputSplit.write(out);
@@ -147,10 +151,12 @@ class TaggedInputSplit implements Configurable, InputSplit {
     Text.writeString(out, schema.toString());
   }
 
+  @Override
   public Configuration getConf() {
     return conf;
   }
 
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
index da1dbe3..618db8d 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
@@ -77,7 +77,7 @@ public class TetherJob extends Configured {
           }
           job.set(TETHER_EXEC_ARGS, sb.toString());
         }
-        job.set(TETHER_EXEC_CACHED,  (new Boolean(cached)).toString());
+        job.set(TETHER_EXEC_CACHED,  (Boolean.valueOf(cached)).toString());
   }
 
   /**
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
index 6a87af5..261c95f 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
@@ -47,6 +47,7 @@ class TetherOutputFormat
   }
 
   @SuppressWarnings("unchecked")
+  @Override
   public RecordWriter<TetherData, NullWritable>
     getRecordWriter(FileSystem ignore, JobConf job,
                     String name, Progressable prog)
@@ -67,10 +68,12 @@ class TetherOutputFormat
     writer.create(schema, path.getFileSystem(job).create(path));
 
     return new RecordWriter<TetherData, NullWritable>() {
+      @Override
         public void write(TetherData datum, NullWritable ignore)
           throws IOException {
           writer.appendEncoded(datum.buffer());
         }
+      @Override
         public void close(Reporter reporter) throws IOException {
           writer.close();
         }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java
index c320deb..75a810a 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java
@@ -44,6 +44,7 @@ class TetherOutputService implements OutputProtocol {
     this.collector = collector;
   }
 
+  @Override
   public synchronized void configure(int inputPort) {
     LOG.info("got input port from child: inputport="+inputPort);
     this.inputPort = inputPort;
@@ -63,6 +64,7 @@ class TetherOutputService implements OutputProtocol {
     return inputPort;
   }
 
+  @Override
   public void output(ByteBuffer datum) {
     try {
       collector.collect(new TetherData(datum), NullWritable.get());
@@ -74,24 +76,28 @@ class TetherOutputService implements OutputProtocol {
     }
   }
 
+  @Override
   public void outputPartitioned(int partition, ByteBuffer datum) {
     TetherPartitioner.setNextPartition(partition);
     output(datum);
   }
 
+  @Override
   public void status(String message) { reporter.setStatus(message.toString());  }
 
-
+  @Override
   public void count(String group, String name, long amount) {
     reporter.getCounter(group.toString(), name.toString()).increment(amount);
   }
 
+  @Override
   public synchronized void fail(String message) {
     LOG.warn("Failing: "+message);
     error = message.toString();
     notify();
   }
 
+  @Override
   public synchronized void complete() {
     LOG.info("got task complete");
     complete = true;
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
index 6930294..70fdf8a 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
@@ -34,6 +34,7 @@ class TetherPartitioner implements Partitioner<TetherData, NullWritable> {
 
   private Schema schema;
 
+  @Override
   public void configure(JobConf job) {
     schema = AvroJob.getMapOutputSchema(job);
   }
@@ -42,6 +43,7 @@ class TetherPartitioner implements Partitioner<TetherData, NullWritable> {
     CACHE.set(newValue);
   }
 
+  @Override
   public int getPartition(TetherData key, NullWritable value,
                           int numPartitions) {
     Integer result = CACHE.get();
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
index 7842aea..f2ab6d7 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
@@ -54,10 +54,13 @@ class TetherRecordReader
 
   public Schema getSchema() { return reader.getSchema(); }
 
+  @Override
   public TetherData createKey() { return new TetherData(); }
 
+  @Override
   public NullWritable createValue() { return NullWritable.get(); }
 
+  @Override
   public boolean next(TetherData data, NullWritable ignore)
     throws IOException {
     if (!reader.hasNext() || reader.pastSync(end))
@@ -67,6 +70,7 @@ class TetherRecordReader
     return true;
   }
 
+  @Override
   public float getProgress() throws IOException {
     if (end == start) {
       return 0.0f;
@@ -75,10 +79,11 @@ class TetherRecordReader
     }
   }
 
+  @Override
   public long getPos() throws IOException {
     return in.tell();
   }
 
+  @Override
   public void close() throws IOException { reader.close(); }
-
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
index 3895983..f64ffc4 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
@@ -36,10 +36,12 @@ class TetherReducer
   private TetheredProcess process;
   private boolean error;
 
+  @Override
   public void configure(JobConf job) {
     this.job = job;
   }
 
+  @Override
   public void reduce(TetherData datum, Iterator<NullWritable> ignore,
                      OutputCollector<TetherData, NullWritable> collector,
                      Reporter reporter) throws IOException {
@@ -64,6 +66,7 @@ class TetherReducer
   /**
    * Handle the end of the input by closing down the application.
    */
+  @Override
   public void close() throws IOException {
     if (process == null) return;
     try {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java
index d1644a1..466593e 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java
@@ -150,7 +150,7 @@ public final class AvroJob {
    */
   public static Schema getInputKeySchema(Configuration conf) {
     String schemaString = conf.get(CONF_INPUT_KEY_SCHEMA);
-    return schemaString != null ? Schema.parse(schemaString) : null;
+    return schemaString != null ? new Schema.Parser().parse(schemaString) : null;
   }
 
   /**
@@ -161,7 +161,7 @@ public final class AvroJob {
    */
   public static Schema getInputValueSchema(Configuration conf) {
     String schemaString = conf.get(CONF_INPUT_VALUE_SCHEMA);
-    return schemaString != null ? Schema.parse(schemaString) : null;
+    return schemaString != null ? new Schema.Parser().parse(schemaString) : null;
   }
 
   /**
@@ -192,7 +192,7 @@ public final class AvroJob {
    */
   public static Schema getOutputKeySchema(Configuration conf) {
     String schemaString = conf.get(CONF_OUTPUT_KEY_SCHEMA);
-    return schemaString != null ? Schema.parse(schemaString) : null;
+    return schemaString != null ? new Schema.Parser().parse(schemaString) : null;
   }
 
   /**
@@ -203,6 +203,6 @@ public final class AvroJob {
    */
   public static Schema getOutputValueSchema(Configuration conf) {
     String schemaString = conf.get(CONF_OUTPUT_VALUE_SCHEMA);
-    return schemaString != null ? Schema.parse(schemaString) : null;
+    return schemaString != null ? new Schema.Parser().parse(schemaString) : null;
   }
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
index 2dc0f4b..cb0b1d3 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
@@ -64,7 +64,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  * Usage pattern for job submission:
  * <pre>
  *
- * Job job = new Job();
+ * Job job = Job.getInstance();
  *
  * FileInputFormat.setInputPath(job, inDir);
  * FileOutputFormat.setOutputPath(job, outDir);
@@ -311,12 +311,14 @@ public class AvroMultipleOutputs{
     }
 
     @SuppressWarnings({"unchecked"})
+    @Override
     public void write(Object key, Object value)
         throws IOException, InterruptedException {
       context.getCounter(COUNTERS_GROUP, counterName).increment(1);
       writer.write(key, value);
     }
 
+    @Override
     public void close(TaskAttemptContext context)
         throws IOException, InterruptedException {
       writer.close(context);
@@ -434,7 +436,7 @@ public class AvroMultipleOutputs{
       Schema valSchema, String baseOutputPath) throws IOException,
       InterruptedException {
     checkBaseOutputPath(baseOutputPath);
-    Job job = new Job(context.getConfiguration());
+    Job job = Job.getInstance(context.getConfiguration());
     setSchema(job, keySchema, valSchema);
     TaskAttemptContext taskContext = createTaskAttemptContext(job.getConfiguration(), context.getTaskAttemptID());
     getRecordWriter(taskContext, baseOutputPath).write(key, value);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java
index 9b094bb..b0a67cc 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java
@@ -47,7 +47,7 @@ public class TestAvroDatumConverterFactory {
 
   @Before
   public void setup() throws IOException {
-    mJob = new Job();
+    mJob = Job.getInstance();
     mFactory = new AvroDatumConverterFactory(mJob.getConfiguration());
   }
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java
index 67a0e84..72605ce 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java
@@ -56,7 +56,7 @@ public class TestAvroSerialization {
   public void testGetSerializerForKey() throws IOException {
     // Set the writer schema in the job configuration.
     Schema writerSchema = Schema.create(Schema.Type.STRING);
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setMapOutputKeySchema(job, writerSchema);
 
     // Get a serializer from the configuration.
@@ -75,7 +75,7 @@ public class TestAvroSerialization {
   public void testGetSerializerForValue() throws IOException {
     // Set the writer schema in the job configuration.
     Schema writerSchema = Schema.create(Schema.Type.STRING);
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setMapOutputValueSchema(job, writerSchema);
 
     // Get a serializer from the configuration.
@@ -94,7 +94,7 @@ public class TestAvroSerialization {
   public void testGetDeserializerForKey() throws IOException {
     // Set the reader schema in the job configuration.
     Schema readerSchema = Schema.create(Schema.Type.STRING);
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setMapOutputKeySchema(job, readerSchema);
 
     // Get a deserializer from the configuration.
@@ -113,7 +113,7 @@ public class TestAvroSerialization {
   public void testGetDeserializerForValue() throws IOException {
     // Set the reader schema in the job configuration.
     Schema readerSchema = Schema.create(Schema.Type.STRING);
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setMapOutputValueSchema(job, readerSchema);
 
     // Get a deserializer from the configuration.
@@ -141,7 +141,7 @@ public class TestAvroSerialization {
   }
 
   private <T, O> O roundTrip(Schema schema, T data, Class<? extends GenericData> modelClass) throws IOException {
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setMapOutputKeySchema(job, schema);
     if (modelClass != null)
       AvroJob.setDataModelClass(job, modelClass);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
index 350dc7f..665d961 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
@@ -18,7 +18,6 @@
 
 package org.apache.avro.mapred;
 
-import junit.framework.Assert;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -30,11 +29,12 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
-import java.io.IOException;
-import java.nio.file.Files;
 import java.util.HashSet;
 import java.util.Set;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class TestAvroInputFormat {
 
   @Rule
@@ -69,16 +69,16 @@ public class TestAvroInputFormat {
 
     AvroInputFormat inputFormat = new AvroInputFormat();
     FileStatus[] statuses = inputFormat.listStatus(conf);
-    Assert.assertEquals(1, statuses.length);
-    Assert.assertEquals("somefile.avro", statuses[0].getPath().getName());
+    assertEquals(1, statuses.length);
+    assertEquals("somefile.avro", statuses[0].getPath().getName());
 
     conf.setBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY, false);
     statuses = inputFormat.listStatus(conf);
-    Assert.assertEquals(2, statuses.length);
+    assertEquals(2, statuses.length);
     Set<String> names = new HashSet<>();
     names.add(statuses[0].getPath().getName());
     names.add(statuses[1].getPath().getName());
-    Assert.assertTrue(names.contains("somefile.avro"));
-    Assert.assertTrue(names.contains("someotherfile.txt"));
+    assertTrue(names.contains("somefile.avro"));
+    assertTrue(names.contains("someotherfile.txt"));
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java
index 1ba9189..20b0d6f 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java
@@ -243,7 +243,7 @@ public class TestAvroMultipleInputs {
     Path inputPath2 = new Path(INPUT_DIR_2.getRoot().getPath());
     Path outputPath = new Path(OUTPUT_DIR.getRoot().getPath());
 
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     writeNamesFiles(new File(inputPath1.toUri().getPath()));
     writeBalancesFiles(new File(inputPath2.toUri().getPath()));
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
index 7e84e5d..5aa07e3 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
@@ -18,7 +18,7 @@
 
 package org.apache.avro.mapred;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.apache.avro.Schema;
 import org.apache.avro.util.Utf8;
 import org.apache.hadoop.fs.FileStatus;
@@ -314,7 +314,7 @@ public class TestAvroMultipleOutputs {
     AvroJob.setInputSchema(job, readerSchema);
     Path inputPath = new Path(inputDirectory + "/myavro2-m-00000.avro");
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
-    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), (String[])null);
     AvroRecordReader<Utf8> recordReader = new AvroRecordReader<>(job, fileSplit);
     AvroWrapper<Utf8> inputPair = new AvroWrapper<>(null);
     NullWritable ignore = NullWritable.get();
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
index a468f4e..734d9aa 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
@@ -48,7 +48,7 @@ public class TestAvroTextSort {
     JobConf job = new JobConf();
     String inputPath = INPUT_DIR.getRoot().getPath();
     Path outputPath = new Path(OUTPUT_DIR.getRoot().getPath());
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     WordCountUtil.writeLinesBytesFile(inputPath);
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
index c171540..9e2cb69 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
 import java.io.IOException;
 import java.io.File;
 import java.net.URI;
-import java.nio.file.Files;
 import java.util.Iterator;
 
 import org.apache.hadoop.io.SequenceFile;
@@ -113,7 +112,7 @@ public class TestSequenceFileReader {
   public void testSequenceFileInputFormat() throws Exception {
     JobConf job = new JobConf();
     Path outputPath = new Path(OUTPUT_DIR.getRoot().getPath());
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     // configure input for Avro from sequence file
     AvroJob.setInputSequenceFile(job);
@@ -150,7 +149,7 @@ public class TestSequenceFileReader {
   public void testNonAvroMapper() throws Exception {
     JobConf job = new JobConf();
     Path outputPath = new Path(OUTPUT_DIR.getRoot().getPath());
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     // configure input for non-Avro sequence file
     job.setInputFormat(SequenceFileInputFormat.class);
@@ -188,7 +187,7 @@ public class TestSequenceFileReader {
   public void testNonAvroMapOnly() throws Exception {
     JobConf job = new JobConf();
     Path outputPath = new Path(OUTPUT_DIR.getRoot().getPath());
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     // configure input for non-Avro sequence file
     job.setInputFormat(SequenceFileInputFormat.class);
@@ -228,7 +227,7 @@ public class TestSequenceFileReader {
   public void testNonAvroReducer() throws Exception {
     JobConf job = new JobConf();
     Path outputPath = new Path(OUTPUT_DIR.getRoot().getPath());
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     // configure input for Avro from sequence file
     AvroJob.setInputSequenceFile(job);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
index f3128ec..b227926 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
@@ -18,7 +18,8 @@
 
 package org.apache.avro.mapred;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+
 import org.apache.avro.Schema;
 import org.apache.avro.util.Utf8;
 import org.apache.hadoop.fs.FileStatus;
@@ -132,19 +133,19 @@ public class TestWordCount {
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while (recordReader.next(inputPair, ignore)) {
-      Assert.assertEquals(inputPair.datum().get(0), defaultRank);
+      assertEquals(inputPair.datum().get(0), defaultRank);
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
 
-    Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
+    assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
 
     long actualSumOfCounts = 0;
     for (Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
 
-    Assert.assertEquals(sumOfCounts, actualSumOfCounts);
+    assertEquals(sumOfCounts, actualSumOfCounts);
   }
 
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
index 2bf2bdb..5927734 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
@@ -69,7 +69,7 @@ public class TestWordCountTether {
     JobConf job = new JobConf();
     Path outputPath = new Path(outputPathStr);
 
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     // create the input file
     WordCountUtil.writeLinesFile(inputPath);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
index a3b6065..7390369 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
@@ -141,8 +141,8 @@ public abstract class TetherTask<IN,MID,OUT> {
   void configure(TaskType taskType, CharSequence inSchemaText, CharSequence outSchemaText) {
     this.taskType = taskType;
     try {
-      Schema inSchema = Schema.parse(inSchemaText.toString());
-      Schema outSchema = Schema.parse(outSchemaText.toString());
+      Schema inSchema = new Schema.Parser().parse(inSchemaText.toString());
+      Schema outSchema = new Schema.Parser().parse(outSchemaText.toString());
       switch (taskType) {
       case MAP:
         this.inReader = new SpecificDatumReader<>(inSchema);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
index 092f3a1..99e6dd6 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
@@ -41,7 +41,7 @@ public class TestAvroKeyInputFormat {
   @Test
   public void testCreateRecordReader() throws IOException, InterruptedException {
     // Set up the job configuration.
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.STRING));
     Configuration conf = job.getConfiguration();
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
index 532ebe7..d7de2e7 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
@@ -120,7 +120,7 @@ public class TestAvroKeyOutputFormat {
   private void testGetRecordWriter(Configuration conf, CodecFactory expectedCodec, int expectedSyncInterval)
       throws IOException {
     // Configure a mock task attempt context.
-    Job job = new Job(conf);
+    Job job = Job.getInstance(conf);
     job.getConfiguration().set("mapred.output.dir", mTempDir.getRoot().getPath());
     Schema writerSchema = Schema.create(Schema.Type.INT);
     AvroJob.setOutputKeySchema(job, writerSchema);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
index b059c37..8e1e830 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -57,7 +57,7 @@ import org.junit.Test;
 public class TestAvroKeyValueRecordWriter {
   @Test
   public void testWriteRecords() throws IOException {
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
     TaskAttemptContext context = createMock(TaskAttemptContext.class);
 
@@ -78,10 +78,10 @@ public class TestAvroKeyValueRecordWriter {
         = new AvroKeyValueRecordWriter<>(keyConverter, valueConverter,
             new ReflectData(), compressionCodec, outputStream);
     TextStats appleStats = new TextStats();
-    appleStats.name = "apple";
+    appleStats.setName("apple");
     writer.write(new Text("apple"), new AvroValue<>(appleStats));
     TextStats bananaStats = new TextStats();
-    bananaStats.name = "banana";
+    bananaStats.setName("banana");
     writer.write(new Text("banana"), new AvroValue<>(bananaStats));
     writer.close(context);
 
@@ -101,7 +101,7 @@ public class TestAvroKeyValueRecordWriter {
         = new AvroKeyValue<>(avroFileReader.next());
     assertNotNull(firstRecord.get());
     assertEquals("apple", firstRecord.getKey().toString());
-    assertEquals("apple", firstRecord.getValue().name.toString());
+    assertEquals("apple", firstRecord.getValue().getName().toString());
 
     // Verify that the second record was written;
     assertTrue(avroFileReader.hasNext());
@@ -109,7 +109,7 @@ public class TestAvroKeyValueRecordWriter {
         = new AvroKeyValue<>(avroFileReader.next());
     assertNotNull(secondRecord.get());
     assertEquals("banana", secondRecord.getKey().toString());
-    assertEquals("banana", secondRecord.getValue().name.toString());
+    assertEquals("banana", secondRecord.getValue().getName().toString());
 
     // That's all, folks.
     assertFalse(avroFileReader.hasNext());
@@ -120,7 +120,7 @@ public class TestAvroKeyValueRecordWriter {
     String attribute;
   }
   @Test public void testUsingReflection() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
     Schema schema = ReflectData.get().getSchema(R1.class);
     AvroJob.setOutputValueSchema(job, schema);
     TaskAttemptContext context = createMock(TaskAttemptContext.class);
@@ -171,7 +171,7 @@ public class TestAvroKeyValueRecordWriter {
 
   @Test
   public void testSyncableWriteRecords() throws IOException {
-    Job job = new Job();
+    Job job = Job.getInstance();
     AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
     TaskAttemptContext context = createMock(TaskAttemptContext.class);
 
@@ -191,11 +191,11 @@ public class TestAvroKeyValueRecordWriter {
         = new AvroKeyValueRecordWriter<>(keyConverter, valueConverter,
             new ReflectData(), compressionCodec, outputStream);
     TextStats appleStats = new TextStats();
-    appleStats.name = "apple";
+    appleStats.setName("apple");
     long pointOne = writer.sync();
     writer.write(new Text("apple"), new AvroValue<>(appleStats));
     TextStats bananaStats = new TextStats();
-    bananaStats.name = "banana";
+    bananaStats.setName("banana");
     long pointTwo = writer.sync();
     writer.write(new Text("banana"), new AvroValue<>(bananaStats));
     writer.close(context);
@@ -216,7 +216,7 @@ public class TestAvroKeyValueRecordWriter {
         = new AvroKeyValue<>(avroFileReader.next());
     assertNotNull(secondRecord.get());
     assertEquals("banana", secondRecord.getKey().toString());
-    assertEquals("banana", secondRecord.getValue().name.toString());
+    assertEquals("banana", secondRecord.getValue().getName().toString());
 
 
     avroFileReader.seek(pointOne);
@@ -226,7 +226,7 @@ public class TestAvroKeyValueRecordWriter {
         = new AvroKeyValue<>(avroFileReader.next());
     assertNotNull(firstRecord.get());
     assertEquals("apple", firstRecord.getKey().toString());
-    assertEquals("apple", firstRecord.getValue().name.toString());
+    assertEquals("apple", firstRecord.getValue().getName().toString());
 
 
     // That's all, folks.
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
index 5e66ecc..c31651d 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
@@ -56,11 +56,11 @@ public class TestAvroMultipleOutputs {
   public TemporaryFolder DIR = new TemporaryFolder();
 
   public static final Schema STATS_SCHEMA =
-          Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+    new Schema.Parser().parse("{\"name\":\"stats\",\"type\":\"record\","
                   + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
                   + "{\"name\":\"name\",\"type\":\"string\"}]}");
   public static final Schema STATS_SCHEMA_2 =
-          Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+    new Schema.Parser().parse("{\"name\":\"stats\",\"type\":\"record\","
                   + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
                   + "{\"name\":\"name1\",\"type\":\"string\"}]}");
 
@@ -93,8 +93,8 @@ public class TestAvroMultipleOutputs {
     @Override
     protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
             throws IOException, InterruptedException {
-      mCount.set(record.datum().count);
-      mText.set(record.datum().name.toString());
+      mCount.set(record.datum().getCount());
+      mText.set(record.datum().getName().toString());
       context.write(mText, mCount);
     }
   }
@@ -154,11 +154,11 @@ public class TestAvroMultipleOutputs {
     protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
             throws IOException, InterruptedException {
       TextStats record = new TextStats();
-      record.count = 0;
+      record.setCount(0);
       for (IntWritable count : counts) {
-        record.count += count.get();
+        record.setCount(record.getCount() + count.get());
       }
-      record.name = line.toString();
+      record.setName(line.toString());
       mStats.datum(record);
       context.write(mStats, NullWritable.get());
       amos.write("myavro3", mStats, NullWritable.get());
@@ -190,7 +190,7 @@ public class TestAvroMultipleOutputs {
 
   @Test
   public void testAvroGenericOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -208,7 +208,7 @@ public class TestAvroMultipleOutputs {
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
 
     Path outputPath = new Path(DIR.getRoot().getPath() + "/testAvroGenericOutput");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath, true);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));
@@ -294,7 +294,7 @@ public class TestAvroMultipleOutputs {
 
   @Test
   public void testAvroSpecificOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -311,7 +311,7 @@ public class TestAvroMultipleOutputs {
 
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     Path outputPath = new Path(DIR.getRoot().getPath() + "/testAvroSpecificOutput");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath, true);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));
@@ -323,7 +323,7 @@ public class TestAvroMultipleOutputs {
             new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
             new SpecificDatumReader<>())) {
       for (TextStats record : reader) {
-        counts.put(record.name.toString(), record.count);
+        counts.put(record.getName().toString(), record.getCount());
       }
     }
 
@@ -334,7 +334,7 @@ public class TestAvroMultipleOutputs {
 
   @Test
   public void testAvroInput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
@@ -365,7 +365,7 @@ public class TestAvroMultipleOutputs {
             new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
             new SpecificDatumReader<>())) {
       for (TextStats record : reader) {
-        counts.put(record.name.toString(), record.count);
+        counts.put(record.getName().toString(), record.getCount());
       }
     }
 
@@ -376,7 +376,7 @@ public class TestAvroMultipleOutputs {
 
   @Test
   public void testAvroMapOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
@@ -406,7 +406,7 @@ public class TestAvroMultipleOutputs {
             new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
             new SpecificDatumReader<>())) {
       for (TextStats record : reader) {
-        counts.put(record.name.toString(), record.count);
+        counts.put(record.getName().toString(), record.getCount());
       }
     }
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
index e713f9a..37edef9 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
@@ -54,11 +54,11 @@ public class TestAvroMultipleOutputsSyncable {
   public TemporaryFolder tmpFolder = new TemporaryFolder();
 
   public static final Schema STATS_SCHEMA =
-      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+    new Schema.Parser().parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
           + "{\"name\":\"name\",\"type\":\"string\"}]}");
   public static final Schema STATS_SCHEMA_2 =
-      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+    new Schema.Parser().parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
           + "{\"name\":\"name1\",\"type\":\"string\"}]}");
 
@@ -91,8 +91,8 @@ public class TestAvroMultipleOutputsSyncable {
     @Override
     protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
         throws IOException, InterruptedException {
-      mCount.set(record.datum().count);
-      mText.set(record.datum().name.toString());
+      mCount.set(record.datum().getCount());
+      mText.set(record.datum().getName().toString());
       context.write(mText, mCount);
     }
   }
@@ -154,11 +154,11 @@ public class TestAvroMultipleOutputsSyncable {
     protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
         throws IOException, InterruptedException {
       TextStats record = new TextStats();
-      record.count = 0;
+      record.setCount(0);
       for (IntWritable count : counts) {
-        record.count += count.get();
+        record.setCount(record.getCount() + count.get());
       }
-      record.name = line.toString();
+      record.setName(line.toString());
       mStats.datum(record);
       context.write(mStats, NullWritable.get());
       amos.sync("myavro3","myavro3");
@@ -191,7 +191,7 @@ public class TestAvroMultipleOutputsSyncable {
 
   @Test
   public void testAvroGenericOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -208,7 +208,7 @@ public class TestAvroMultipleOutputsSyncable {
     AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2);
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath, true);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));
@@ -294,7 +294,7 @@ public class TestAvroMultipleOutputsSyncable {
 
   @Test
   public void testAvroSpecificOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -311,7 +311,7 @@ public class TestAvroMultipleOutputsSyncable {
 
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath, true);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));
@@ -323,7 +323,7 @@ public class TestAvroMultipleOutputsSyncable {
         new SpecificDatumReader<>());
     Map<String, Integer> counts = new HashMap<>();
     for (TextStats record : reader) {
-      counts.put(record.name.toString(), record.count);
+      counts.put(record.getName().toString(), record.getCount());
     }
     reader.close();
 
@@ -334,7 +334,7 @@ public class TestAvroMultipleOutputsSyncable {
 
   @Test
   public void testAvroInput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
@@ -365,7 +365,7 @@ public class TestAvroMultipleOutputsSyncable {
         new SpecificDatumReader<>());
     Map<String, Integer> counts = new HashMap<>();
     for (TextStats record : reader) {
-      counts.put(record.name.toString(), record.count);
+      counts.put(record.getName().toString(), record.getCount());
     }
     reader.close();
 
@@ -376,7 +376,7 @@ public class TestAvroMultipleOutputsSyncable {
 
   @Test
   public void testAvroMapOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
@@ -406,7 +406,7 @@ public class TestAvroMultipleOutputsSyncable {
         new SpecificDatumReader<>());
     Map<String, Integer> counts = new HashMap<>();
     for (TextStats record : reader) {
-      counts.put(record.name.toString(), record.count);
+      counts.put(record.getName().toString(), record.getCount());
     }
     reader.close();
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java
index 6a37d25..6c877c5 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java
@@ -123,7 +123,7 @@ public class TestKeyValueInput {
     File inputFile = createInputFile();
 
     // Configure the job input.
-    Job job = new Job();
+    Job job = Job.getInstance();
     FileInputFormat.setInputPaths(job, new Path(inputFile.getAbsolutePath()));
     job.setInputFormatClass(AvroKeyValueInputFormat.class);
     AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.INT));
@@ -196,7 +196,7 @@ public class TestKeyValueInput {
     File inputFile = createInputFile();
 
     // Configure the job input.
-    Job job = new Job();
+    Job job = Job.getInstance();
     FileInputFormat.setInputPaths(job, new Path(inputFile.getAbsolutePath()));
     job.setInputFormatClass(AvroKeyValueInputFormat.class);
     AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.INT));
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
index c73b394..cb44206 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
@@ -79,7 +79,7 @@ public class TestKeyValueWordCount {
   public void testKeyValueMapReduce()
       throws ClassNotFoundException, IOException, InterruptedException, URISyntaxException {
     // Configure a word count job over our test input file.
-    Job job = new Job();
+    Job job = Job.getInstance();
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
             .toURI().toString()));
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
index 00d8c19..5873eba 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
@@ -60,7 +60,7 @@ public class TestWordCount {
   @Rule
   public TemporaryFolder tmpFolder = new TemporaryFolder();
   public static final Schema STATS_SCHEMA =
-      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+    new Schema.Parser().parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
           + "{\"name\":\"name\",\"type\":\"string\"}]}");
 
@@ -105,8 +105,8 @@ public class TestWordCount {
     @Override
     protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
         throws IOException, InterruptedException {
-      mCount.set(record.datum().count);
-      mText.set(record.datum().name.toString());
+      mCount.set(record.datum().getCount());
+      mText.set(record.datum().getName().toString());
       context.write(mText, mCount);
     }
   }
@@ -181,11 +181,11 @@ public class TestWordCount {
     protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
         throws IOException, InterruptedException {
       TextStats record = new TextStats();
-      record.count = 0;
+      record.setCount(0);
       for (IntWritable count : counts) {
-        record.count += count.get();
+        record.setCount(record.getCount() + count.get());
       }
-      record.name = line.toString();
+      record.setName(line.toString());
       mStats.datum(record);
       context.write(mStats, NullWritable.get());
     }
@@ -234,7 +234,7 @@ public class TestWordCount {
 
   @Test
   public void testAvroGenericOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -274,7 +274,7 @@ public class TestWordCount {
 
   @Test
   public void testAvroSpecificOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -303,7 +303,7 @@ public class TestWordCount {
         new SpecificDatumReader<>());
     Map<String, Integer> counts = new HashMap<>();
     for (TextStats record : reader) {
-      counts.put(record.name.toString(), record.count);
+      counts.put(record.getName().toString(), record.getCount());
     }
     reader.close();
 
@@ -314,7 +314,7 @@ public class TestWordCount {
 
   @Test
   public void testAvroReflectOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
@@ -354,7 +354,7 @@ public class TestWordCount {
 
   @Test
   public void testAvroInput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
@@ -384,7 +384,7 @@ public class TestWordCount {
         new SpecificDatumReader<>());
     Map<String, Integer> counts = new HashMap<>();
     for (TextStats record : reader) {
-      counts.put(record.name.toString(), record.count);
+      counts.put(record.getName().toString(), record.getCount());
     }
     reader.close();
 
@@ -395,7 +395,7 @@ public class TestWordCount {
 
   @Test
   public void testReflectInput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
             .toURI().toString()));
@@ -435,7 +435,7 @@ public class TestWordCount {
 
   @Test
   public void testAvroMapOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
@@ -465,7 +465,7 @@ public class TestWordCount {
         new SpecificDatumReader<>());
     Map<String, Integer> counts = new HashMap<>();
     for (TextStats record : reader) {
-      counts.put(record.name.toString(), record.count);
+      counts.put(record.getName().toString(), record.getCount());
     }
     reader.close();
 
@@ -479,7 +479,7 @@ public class TestWordCount {
    */
   @Test
   public void testAvroUsingTextFileOutput() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     FileInputFormat.setInputPaths(job, new Path(getClass()
             .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index ae4eec0..81f6575 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -112,6 +112,16 @@
             <target>1.8</target>
             <compilerArgs>
               <arg>-parameters</arg>
+              <!--<arg>-Werror</arg>-->
+              <arg>-Xlint:all</arg>
+              <!-- Override options warnings to support cross-compilation -->
+              <arg>-Xlint:-options</arg>
+              <!-- Temporary lint overrides, to be removed over time. -->
+              <arg>-Xlint:-rawtypes</arg>
+              <arg>-Xlint:-serial</arg>
+              <arg>-Xlint:-unchecked</arg>
+              <!--<arg>-Xlint:sunapi</arg>-->
+              <!--<arg>-XDenableSunApiLintControl</arg>-->
             </compilerArgs>
           </configuration>
         </plugin>
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
index 171929a..0bf84be 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
@@ -326,7 +326,7 @@ public class ProtobufData extends GenericData {
       }
       String json = toString(value);
       try {
-        return MAPPER.readTree(FACTORY.createJsonParser(json));
+        return MAPPER.readTree(FACTORY.createParser(json));
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
diff --git a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
index e8e7a29..a244bc6 100644
--- a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
+++ b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
@@ -1710,6 +1710,7 @@ public final class Test {
        * all the primitive types
        * </pre>
        */
+      @Override
       public boolean hasInt32() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
@@ -1720,6 +1721,7 @@ public final class Test {
        * all the primitive types
        * </pre>
        */
+      @Override
       public int getInt32() {
         return int32_;
       }
@@ -1755,12 +1757,14 @@ public final class Test {
       /**
        * <code>optional int64 int64 = 2;</code>
        */
+      @Override
       public boolean hasInt64() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
       /**
        * <code>optional int64 int64 = 2;</code>
        */
+      @Override
       public long getInt64() {
         return int64_;
       }
@@ -1788,12 +1792,14 @@ public final class Test {
       /**
        * <code>optional uint32 uint32 = 3;</code>
        */
+      @Override
       public boolean hasUint32() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
       /**
        * <code>optional uint32 uint32 = 3;</code>
        */
+      @Override
       public int getUint32() {
         return uint32_;
       }
@@ -1821,12 +1827,14 @@ public final class Test {
       /**
        * <code>optional uint64 uint64 = 4;</code>
        */
+      @Override
       public boolean hasUint64() {
         return ((bitField0_ & 0x00000008) == 0x00000008);
       }
       /**
        * <code>optional uint64 uint64 = 4;</code>
        */
+      @Override
       public long getUint64() {
         return uint64_;
       }
@@ -1854,12 +1862,14 @@ public final class Test {
       /**
        * <code>optional sint32 sint32 = 5;</code>
        */
+      @Override
       public boolean hasSint32() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
       }
       /**
        * <code>optional sint32 sint32 = 5;</code>
        */
+      @Override
       public int getSint32() {
         return sint32_;
       }
@@ -1887,12 +1897,14 @@ public final class Test {
       /**
        * <code>optional sint64 sint64 = 6;</code>
        */
+      @Override
       public boolean hasSint64() {
         return ((bitField0_ & 0x00000020) == 0x00000020);
       }
       /**
        * <code>optional sint64 sint64 = 6;</code>
        */
+      @Override
       public long getSint64() {
         return sint64_;
       }
@@ -1920,12 +1932,14 @@ public final class Test {
       /**
        * <code>optional fixed32 fixed32 = 7;</code>
        */
+      @Override
       public boolean hasFixed32() {
         return ((bitField0_ & 0x00000040) == 0x00000040);
       }
       /**
        * <code>optional fixed32 fixed32 = 7;</code>
        */
+      @Override
       public int getFixed32() {
         return fixed32_;
       }
@@ -1953,12 +1967,14 @@ public final class Test {
       /**
        * <code>optional fixed64 fixed64 = 8;</code>
        */
+      @Override
       public boolean hasFixed64() {
         return ((bitField0_ & 0x00000080) == 0x00000080);
       }
       /**
        * <code>optional fixed64 fixed64 = 8;</code>
        */
+      @Override
       public long getFixed64() {
         return fixed64_;
       }
@@ -1986,12 +2002,14 @@ public final class Test {
       /**
        * <code>optional sfixed32 sfixed32 = 9;</code>
        */
+      @Override
       public boolean hasSfixed32() {
         return ((bitField0_ & 0x00000100) == 0x00000100);
       }
       /**
        * <code>optional sfixed32 sfixed32 = 9;</code>
        */
+      @Override
       public int getSfixed32() {
         return sfixed32_;
       }
@@ -2019,12 +2037,14 @@ public final class Test {
       /**
        * <code>optional sfixed64 sfixed64 = 10;</code>
        */
+      @Override
       public boolean hasSfixed64() {
         return ((bitField0_ & 0x00000200) == 0x00000200);
       }
       /**
        * <code>optional sfixed64 sfixed64 = 10;</code>
        */
+      @Override
       public long getSfixed64() {
         return sfixed64_;
       }
@@ -2052,12 +2072,14 @@ public final class Test {
       /**
        * <code>optional float float = 11;</code>
        */
+      @Override
       public boolean hasFloat() {
         return ((bitField0_ & 0x00000400) == 0x00000400);
       }
       /**
        * <code>optional float float = 11;</code>
        */
+      @Override
       public float getFloat() {
         return float_;
       }
@@ -2085,12 +2107,14 @@ public final class Test {
       /**
        * <code>optional double double = 12;</code>
        */
+      @Override
       public boolean hasDouble() {
         return ((bitField0_ & 0x00000800) == 0x00000800);
       }
       /**
        * <code>optional double double = 12;</code>
        */
+      @Override
       public double getDouble() {
         return double_;
       }
@@ -2118,12 +2142,14 @@ public final class Test {
       /**
        * <code>optional bool bool = 13;</code>
        */
+      @Override
       public boolean hasBool() {
         return ((bitField0_ & 0x00001000) == 0x00001000);
       }
       /**
        * <code>optional bool bool = 13;</code>
        */
+      @Override
       public boolean getBool() {
         return bool_;
       }
@@ -2151,12 +2177,14 @@ public final class Test {
       /**
        * <code>optional string string = 14;</code>
        */
+      @Override
       public boolean hasString() {
         return ((bitField0_ & 0x00002000) == 0x00002000);
       }
       /**
        * <code>optional string string = 14;</code>
        */
+      @Override
       public java.lang.String getString() {
         java.lang.Object ref = string_;
         if (!(ref instanceof java.lang.String)) {
@@ -2171,6 +2199,7 @@ public final class Test {
       /**
        * <code>optional string string = 14;</code>
        */
+      @Override
       public com.google.protobuf.ByteString
           getStringBytes() {
         java.lang.Object ref = string_;
diff --git a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
index 53eb568..9085005 100644
--- a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
+++ b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
@@ -78,7 +78,7 @@ public class TestProtobuf {
 
     Object o = new ProtobufDatumReader<>(Foo.class).read
       (null,
-       DecoderFactory.get().createBinaryDecoder
+       DecoderFactory.get().binaryDecoder
        (new ByteArrayInputStream(bao.toByteArray()), null));
 
     assertEquals(foo, o);
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java
index a4b43b0..87ee9bd 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java
@@ -87,7 +87,7 @@ public class TestThrift {
 
     Object o = new ThriftDatumReader<>(Test.class).read
       (null,
-       DecoderFactory.get().createBinaryDecoder
+       DecoderFactory.get().binaryDecoder
        (new ByteArrayInputStream(bao.toByteArray()), null));
 
     assertEquals(test, o);
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
index 8f7f8a8..8fecca2 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
@@ -21,6 +21,7 @@ public enum E implements org.apache.thrift.TEnum {
   /**
    * Get the integer value of this enum value, as defined in the Thrift IDL.
    */
+  @Override
   public int getValue() {
     return value;
   }
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
index a76f89b..f5bf35a 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
@@ -83,10 +83,12 @@ public class Error extends TException implements org.apache.thrift.TBase<Error,
       _fieldName = fieldName;
     }
 
+    @Override
     public short getThriftFieldId() {
       return _thriftId;
     }
 
+    @Override
     public String getFieldName() {
       return _fieldName;
     }
@@ -121,6 +123,7 @@ public class Error extends TException implements org.apache.thrift.TBase<Error,
     }
   }
 
+  @Override
   public Error deepCopy() {
     return new Error(this);
   }
@@ -130,6 +133,7 @@ public class Error extends TException implements org.apache.thrift.TBase<Error,
     this.message = null;
   }
 
+  @Override
   public String getMessage() {
     return this.message;
   }
@@ -153,6 +157,7 @@ public class Error extends TException implements org.apache.thrift.TBase<Error,
     }
   }
 
+  @Override
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case MESSAGE:
@@ -166,6 +171,7 @@ public class Error extends TException implements org.apache.thrift.TBase<Error,
     }
   }
 
+  @Override
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case MESSAGE:
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
index d775eba..e36b753 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
@@ -69,6 +69,7 @@ public class Foo {
       super(iprot, oprot);
     }
 
+    @Override
     public void ping() throws org.apache.thrift.TException
     {
       send_ping();
@@ -88,6 +89,7 @@ public class Foo {
       return;
     }
 
+    @Override
     public int add(int num1, int num2) throws org.apache.thrift.TException
     {
       send_add(num1, num2);
@@ -112,6 +114,7 @@ public class Foo {
       throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "add failed: unknown result");
     }
 
+    @Override
     public void zip() throws org.apache.thrift.TException
     {
       send_zip();
@@ -141,6 +144,7 @@ public class Foo {
       super(protocolFactory, clientManager, transport);
     }
 
+    @Override
     public void ping(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
       checkReady();
       ping_call method_call = new ping_call(resultHandler, this, ___protocolFactory, ___transport);
@@ -153,6 +157,7 @@ public class Foo {
         super(client, protocolFactory, transport, resultHandler, false);
       }
 
+      @Override
       public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
         prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("ping", org.apache.thrift.protocol.TMessageType.CALL, 0));
         ping_args args = new ping_args();
@@ -171,6 +176,7 @@ public class Foo {
       }
     }
 
+    @Override
     public void add(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
       checkReady();
       add_call method_call = new add_call(num1, num2, resultHandler, this, ___protocolFactory, ___transport);
@@ -187,6 +193,7 @@ public class Foo {
         this.num2 = num2;
       }
 
+      @Override
       public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
         prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("add", org.apache.thrift.protocol.TMessageType.CALL, 0));
         add_args args = new add_args();
@@ -196,6 +203,7 @@ public class Foo {
         prot.writeMessageEnd();
       }
 
+      @Override
       public Object getResult() throws org.apache.thrift.TException {
         if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
           throw new IllegalStateException("Method call not finished!");
@@ -206,6 +214,7 @@ public class Foo {
       }
     }
 
+    @Override
     public void zip(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
       checkReady();
       zip_call method_call = new zip_call(resultHandler, this, ___protocolFactory, ___transport);
@@ -218,6 +227,7 @@ public class Foo {
         super(client, protocolFactory, transport, resultHandler, true);
       }
 
+      @Override
       public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
         prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("zip", org.apache.thrift.protocol.TMessageType.CALL, 0));
         zip_args args = new zip_args();
@@ -225,6 +235,7 @@ public class Foo {
         prot.writeMessageEnd();
       }
 
+      @Override
       public Object getResult()  {
         if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
           throw new IllegalStateException("Method call not finished!");
@@ -238,7 +249,7 @@ public class Foo {
   }
 
   public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
-    private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+    private static final Logger LOG = LoggerFactory.getLogger(Processor.class.getName());
     public Processor(I iface) {
       super(iface, getProcessMap(new HashMap<>()));
     }
@@ -259,14 +270,17 @@ public class Foo {
         super("ping");
       }
 
+      @Override
       public ping_args getEmptyArgsInstance() {
         return new ping_args();
       }
 
+      @Override
       protected boolean isOneway() {
         return false;
       }
 
+      @Override
       public ping_result getResult(I iface, ping_args args) throws org.apache.thrift.TException {
         ping_result result = new ping_result();
         iface.ping();
@@ -279,10 +293,12 @@ public class Foo {
         super("add");
       }
 
+      @Override
       public add_args getEmptyArgsInstance() {
         return new add_args();
       }
 
+      @Override
       protected boolean isOneway() {
         return false;
       }
@@ -342,9 +358,11 @@ public class Foo {
         return new ping_args();
       }
 
+      @Override
       public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
         return new AsyncMethodCallback<Void>() {
+          @Override
           public void onComplete(Void o) {
             ping_result result = new ping_result();
             try {
@@ -355,6 +373,7 @@ public class Foo {
             }
             fb.close();
           }
+          @Override
           public void onError(Exception e) {
             byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
             org.apache.thrift.TBase msg;
@@ -374,10 +393,12 @@ public class Foo {
         };
       }
 
+      @Override
       protected boolean isOneway() {
         return false;
       }
 
+      @Override
       public void start(I iface, ping_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
         iface.ping(resultHandler);
       }
@@ -388,13 +409,16 @@ public class Foo {
         super("add");
       }
 
+      @Override
       public add_args getEmptyArgsInstance() {
         return new add_args();
       }
 
+      @Override
       public AsyncMethodCallback<Integer> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
         return new AsyncMethodCallback<Integer>() {
+          @Override
           public void onComplete(Integer o) {
             add_result result = new add_result();
             result.success = o;
@@ -407,6 +431,7 @@ public class Foo {
             }
             fb.close();
           }
+          @Override
           public void onError(Exception e) {
             byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
             org.apache.thrift.TBase msg;
@@ -426,10 +451,12 @@ public class Foo {
         };
       }
 
+      @Override
       protected boolean isOneway() {
         return false;
       }
 
+      @Override
       public void start(I iface, add_args args, org.apache.thrift.async.AsyncMethodCallback<Integer> resultHandler) throws TException {
         iface.add(args.num1, args.num2,resultHandler);
       }
@@ -440,24 +467,30 @@ public class Foo {
         super("zip");
       }
 
+      @Override
       public zip_args getEmptyArgsInstance() {
         return new zip_args();
       }
 
+      @Override
       public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
         return new AsyncMethodCallback<Void>() {
+          @Override
           public void onComplete(Void o) {
           }
+          @Override
           public void onError(Exception e) {
           }
         };
       }
 
+      @Override
       protected boolean isOneway() {
         return true;
       }
 
+      @Override
       public void start(I iface, zip_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
         iface.zip(resultHandler);
       }
@@ -523,10 +556,12 @@ public class Foo {
         _fieldName = fieldName;
       }
 
+      @Override
       public short getThriftFieldId() {
         return _thriftId;
       }
 
+      @Override
       public String getFieldName() {
         return _fieldName;
       }
@@ -547,6 +582,7 @@ public class Foo {
     public ping_args(ping_args other) {
     }
 
+    @Override
     public ping_args deepCopy() {
       return new ping_args(this);
     }
@@ -555,11 +591,13 @@ public class Foo {
     public void clear() {
     }
 
+    @Override
     public void setFieldValue(_Fields field, Object value) {
       switch (field) {
       }
     }
 
+    @Override
     public Object getFieldValue(_Fields field) {
       switch (field) {
       }
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
index 5330bfa..5c68a4f 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
@@ -71,10 +71,12 @@ public class FooOrBar extends org.apache.thrift.TUnion<FooOrBar, FooOrBar._Field
       _fieldName = fieldName;
     }
 
+    @Override
     public short getThriftFieldId() {
       return _thriftId;
     }
 
+    @Override
     public String getFieldName() {
       return _fieldName;
     }
@@ -102,6 +104,8 @@ public class FooOrBar extends org.apache.thrift.TUnion<FooOrBar, FooOrBar._Field
   public FooOrBar(FooOrBar other) {
     super(other);
   }
+
+  @Override
   public FooOrBar deepCopy() {
     return new FooOrBar(this);
   }
@@ -244,6 +248,7 @@ public class FooOrBar extends org.apache.thrift.TUnion<FooOrBar, FooOrBar._Field
     return _Fields.findByThriftIdOrThrow(id);
   }
 
+  @Override
   public _Fields fieldForId(int fieldId) {
     return _Fields.findByThriftId(fieldId);
   }
@@ -286,7 +291,7 @@ public class FooOrBar extends org.apache.thrift.TUnion<FooOrBar, FooOrBar._Field
     return setField_ == _Fields.BAR;
   }
 
-
+  @Override
   public boolean equals(Object other) {
     if (other instanceof FooOrBar) {
       return equals((FooOrBar)other);
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
index 86c273d..6b0d805 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
@@ -83,10 +83,12 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
       _fieldName = fieldName;
     }
 
+    @Override
     public short getThriftFieldId() {
       return _thriftId;
     }
 
+    @Override
     public String getFieldName() {
       return _fieldName;
     }
@@ -123,6 +125,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
     this.x = other.x;
   }
 
+  @Override
   public Nested deepCopy() {
     return new Nested(this);
   }
@@ -155,6 +158,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __X_ISSET_ID, value);
   }
 
+  @Override
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case X:
@@ -168,6 +172,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
     }
   }
 
+  @Override
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case X:
@@ -178,6 +183,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
   }
 
   /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  @Override
   public boolean isSet(_Fields field) {
     if (field == null) {
       throw new IllegalArgumentException();
@@ -241,14 +247,17 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
     return 0;
   }
 
+  @Override
   public _Fields fieldForId(int fieldId) {
     return _Fields.findByThriftId(fieldId);
   }
 
+  @Override
   public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
     schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
   }
 
+  @Override
   public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
     schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
   }
@@ -289,6 +298,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
   }
 
   private static class NestedStandardSchemeFactory implements SchemeFactory {
+    @Override
     public NestedStandardScheme getScheme() {
       return new NestedStandardScheme();
     }
@@ -296,6 +306,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
 
   private static class NestedStandardScheme extends StandardScheme<Nested> {
 
+    @Override
     public void read(org.apache.thrift.protocol.TProtocol iprot, Nested struct) throws org.apache.thrift.TException {
       org.apache.thrift.protocol.TField schemeField;
       iprot.readStructBegin();
@@ -323,6 +334,7 @@ public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>,
       struct.validate();
     }
 
+    @Override
     public void write(org.apache.thrift.protocol.TProtocol oprot, Nested struct) throws org.apache.thrift.TException {
       struct.validate();
 
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
index 8b07ae9..a6b736e 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
@@ -167,10 +167,12 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
       _fieldName = fieldName;
     }
 
+    @Override
     public short getThriftFieldId() {
       return _thriftId;
     }
 
+    @Override
     public String getFieldName() {
       return _fieldName;
     }
@@ -310,6 +312,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
     }
   }
 
+  @Override
   public Test deepCopy() {
     return new Test(this);
   }
@@ -760,6 +763,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
     }
   }
 
+  @Override
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case BOOL_FIELD:
@@ -893,6 +897,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
     }
   }
 
+  @Override
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case BOOL_FIELD:
@@ -948,6 +953,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
   }
 
   /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  @Override
   public boolean isSet(_Fields field) {
     if (field == null) {
       throw new IllegalArgumentException();
@@ -1326,14 +1332,17 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
     return 0;
   }
 
+  @Override
   public _Fields fieldForId(int fieldId) {
     return _Fields.findByThriftId(fieldId);
   }
 
+  @Override
   public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
     schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
   }
 
+  @Override
   public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
     schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
   }
@@ -1477,6 +1486,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
   }
 
   private static class TestStandardSchemeFactory implements SchemeFactory {
+    @Override
     public TestStandardScheme getScheme() {
       return new TestStandardScheme();
     }
@@ -1484,6 +1494,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
 
   private static class TestStandardScheme extends StandardScheme<Test> {
 
+    @Override
     public void read(org.apache.thrift.protocol.TProtocol iprot, Test struct) throws org.apache.thrift.TException {
       org.apache.thrift.protocol.TField schemeField;
       iprot.readStructBegin();
@@ -1665,6 +1676,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
       struct.validate();
     }
 
+    @Override
     public void write(org.apache.thrift.protocol.TProtocol oprot, Test struct) throws org.apache.thrift.TException {
       struct.validate();
 
@@ -1770,6 +1782,7 @@ public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.i
   }
 
   private static class TestTupleSchemeFactory implements SchemeFactory {
+    @Override
     public TestTupleScheme getScheme() {
       return new TestTupleScheme();
     }
diff --git a/lang/java/tools/pom.xml b/lang/java/tools/pom.xml
index aab4813..a1f4728 100644
--- a/lang/java/tools/pom.xml
+++ b/lang/java/tools/pom.xml
@@ -158,6 +158,28 @@
           <parallel>none</parallel>
         </configuration>
       </plugin>
+      <!-- Allow guava because hadoop brings it as a transitive dependency. -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce-banned-dependencies</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <bannedDependencies>
+                  <includes>
+                    <exclude>com.google.guava:guava</exclude>
+                  </includes>
+                </bannedDependencies>
+              </rules>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
index b40ab2d..877d25c 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
@@ -77,7 +77,7 @@ public class FromTextTool implements Tool {
     DataFileWriter<ByteBuffer> writer =
         new DataFileWriter<>(new GenericDatumWriter<>());
     writer.setCodec(codecFactory);
-    writer.create(Schema.parse(TEXT_FILE_SCHEMA), outStream);
+    writer.create(new Schema.Parser().parse(TEXT_FILE_SCHEMA), outStream);
 
     ByteBuffer line = ByteBuffer.allocate(128);
     boolean returnSeen = false;
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java
index d44f054..7b83059 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java
@@ -156,7 +156,7 @@ public class TetherTool implements Tool {
       job.set(AvroJob.OUTPUT_SCHEMA, Schema.parse(outschema).toString());
       if (line.hasOption("outschemamap")) {
         job.set(AvroJob.MAP_OUTPUT_SCHEMA,
-            Schema.parse((File)line.getParsedOptionValue("outschemamap")).toString());
+          new Schema.Parser().parse((File)line.getParsedOptionValue("outschemamap")).toString());
       }
       if (line.hasOption("reduces")) {
         job.setNumReduceTasks((Integer)line.getParsedOptionValue("reduces"));
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
index 1cbfc4b..c418bb0 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
@@ -73,7 +73,7 @@ public class TrevniMetadataTool implements Tool {
   /** Read a Trevni file and print each row as a JSON object. */
   public void dump(Input input, PrintStream out, boolean pretty)
     throws IOException {
-    this.generator = FACTORY.createJsonGenerator(out, JsonEncoding.UTF8);
+    this.generator = FACTORY.createGenerator(out, JsonEncoding.UTF8);
     if (pretty) {
       generator.useDefaultPrettyPrinter();
     } else {                                      // ensure newline separation
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
index fd2674a..170c0f5 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
@@ -77,7 +77,7 @@ public class TrevniToJsonTool implements Tool {
   /** Read a Trevni file and print each row as a JSON object. */
   public void toJson(Input input, PrintStream out, boolean pretty)
     throws IOException {
-    this.generator = FACTORY.createJsonGenerator(out, JsonEncoding.UTF8);
+    this.generator = FACTORY.createGenerator(out, JsonEncoding.UTF8);
     if (pretty) {
       generator.useDefaultPrettyPrinter();
     } else {                                      // ensure newline separation
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
index d4904ae..9c50d1c 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
@@ -190,7 +190,7 @@ class Util {
       pathList.add(path);
     } else if (fs.isDirectory(path)) {
       for (FileStatus status : fs.listStatus(path)) {
-        if(!status.isDir()) {
+        if(!status.isDirectory()) {
           pathList.add(status.getPath());
         }
       }
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
index a9ebab6..11e21cb 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
@@ -19,8 +19,8 @@ package org.apache.avro.tool;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
@@ -29,7 +29,6 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
-import java.io.StringBufferInputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -200,7 +199,7 @@ public class TestDataFileTools {
         args.add("-");
         args.addAll(extra);
         new DataFileWriteTool().run(
-                new StringBufferInputStream(jsonData),
+                new ByteArrayInputStream(jsonData.getBytes("UTF-8")),
                 new PrintStream(out), // stdout
                 null, // stderr
                 args);
@@ -230,7 +229,7 @@ public class TestDataFileTools {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     PrintStream out = new PrintStream(baos);
     new DataFileWriteTool().run(
-        new StringBufferInputStream("{"),
+        new ByteArrayInputStream("{".getBytes("UTF-8")),
         new PrintStream(out), // stdout
         null, // stderr
         Arrays.asList("-schema", "{ \"type\":\"record\", \"fields\":" +
@@ -271,7 +270,7 @@ public class TestDataFileTools {
     try(FileOutputStream fout = new FileOutputStream(outFile)) {
       try(PrintStream out = new PrintStream(fout)) {
         new DataFileWriteTool().run(
-                new StringBufferInputStream(json),
+                new ByteArrayInputStream(json.getBytes("UTF-8")),
                 new PrintStream(out), // stdout
                 null, // stderr
                 Arrays.asList("-schema", schema, "-"));
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
index 0a8db48..02118a7 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
@@ -73,7 +73,7 @@ public class TestTetherTool {
     String outputPathStr = OUTPUT_DIR.getRoot().getPath();
     Path outputPath = new Path(outputPathStr);
 
-    outputPath.getFileSystem(job).delete(outputPath);
+    outputPath.getFileSystem(job).delete(outputPath, true);
 
     // create the input file
     WordCountUtil.writeLinesFile(inputPathStr + "/lines.avro");
diff --git a/lang/java/trevni/avro/pom.xml b/lang/java/trevni/avro/pom.xml
index 9499760..62eb7e1 100644
--- a/lang/java/trevni/avro/pom.xml
+++ b/lang/java/trevni/avro/pom.xml
@@ -31,6 +31,33 @@
   <url>http://avro.apache.org/</url>
   <description>Trevni Java Avro</description>
 
+  <build>
+    <plugins>
+      <!-- Allow guava because hadoop brings it as a transitive dependency. -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce-banned-dependencies</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <bannedDependencies>
+                  <includes>
+                    <exclude>com.google.guava:guava</exclude>
+                  </includes>
+                </bannedDependencies>
+              </rules>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
   <dependencies>
     <dependency>
       <groupId>${project.groupId}</groupId>
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
index 194c68a..ffc3596 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
@@ -91,7 +91,7 @@ public class AvroColumnReader<D>
     this.reader = new ColumnFileReader(params.input);
     this.model = params.model;
     this.fileSchema =
-      Schema.parse(reader.getMetaData().getString(AvroColumnWriter.SCHEMA_KEY));
+      new Schema.Parser().parse(reader.getMetaData().getString(AvroColumnWriter.SCHEMA_KEY));
     this.readSchema = params.schema == null ? fileSchema : params.schema;
     initialize();
   }
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
index 1cc4a28..5c452d3 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
@@ -78,10 +78,13 @@ public class AvroTrevniInputFormat<T>
       private float rows = reader.getRowCount();
       private long row;
 
+      @Override
       public AvroWrapper<T> createKey() { return new AvroWrapper<>(null); }
 
+      @Override
       public NullWritable createValue() { return NullWritable.get(); }
 
+      @Override
       public boolean next(AvroWrapper<T> wrapper, NullWritable ignore)
         throws IOException {
         if (!reader.hasNext())
@@ -91,10 +94,13 @@ public class AvroTrevniInputFormat<T>
         return true;
       }
 
+      @Override
       public float getProgress() throws IOException { return row / rows; }
 
+      @Override
       public long getPos() throws IOException { return row; }
 
+      @Override
       public void close() throws IOException { reader.close(); }
 
     };
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
index b3a4b9d..edd6db6 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
@@ -77,7 +77,7 @@ public class AvroTrevniOutputFormat <T>
     final FileSystem fs = dir.getFileSystem(job);
     if (!fs.mkdirs(dir))
       throw new IOException("Failed to create directory: " + dir);
-    final long blockSize = fs.getDefaultBlockSize();
+    final long blockSize = fs.getDefaultBlockSize(dir);
 
     return new RecordWriter<AvroWrapper<T>, NullWritable>() {
       private int part = 0;
@@ -95,6 +95,7 @@ public class AvroTrevniOutputFormat <T>
         writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
       }
 
+      @Override
       public void write(AvroWrapper<T> wrapper, NullWritable ignore)
         throws IOException {
         writer.write(wrapper.datum());
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
index fde9b1d..c4bf836 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
@@ -90,7 +90,7 @@ public abstract class AvroTrevniRecordWriterBase<K,V, T> extends RecordWriter<K,
     fs = dirPath.getFileSystem(context.getConfiguration());
     fs.mkdirs(dirPath);
 
-    blockSize = fs.getDefaultBlockSize();
+    blockSize = fs.getDefaultBlockSize(dirPath);
   }
 
   /**
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java
index 2430e51..568d4bc 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java
@@ -48,7 +48,7 @@ public class TestCases {
   }
 
   private void runCase(File dir) throws Exception {
-    Schema schema = Schema.parse(new File(dir, "input.avsc"));
+    Schema schema = new Schema.Parser().parse(new File(dir, "input.avsc"));
     List<Object> data = fromJson(schema, new File(dir, "input.json"));
 
     // write full data
@@ -64,7 +64,7 @@ public class TestCases {
     // test that sub-schemas read correctly
     for (File f : dir.listFiles())
       if (f.isDirectory() && !f.getName().startsWith(".")) {
-        Schema s = Schema.parse(new File(f, "sub.avsc"));
+        Schema s = new Schema.Parser().parse(new File(f, "sub.avsc"));
         checkRead(s, fromJson(s, new File(f, "sub.json")));
       }
   }
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
index cbc8571..1d2f2e8 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
@@ -63,9 +63,9 @@ public class TestEvolvedSchema {
   GenericData.Record evolvedRecord;
   GenericData.Record innerRecord;
 
-  private static final Schema writer = Schema.parse(writerSchema);
-  private static final Schema evolved = Schema.parse(evolvedSchema2);
-  private static final Schema inner = Schema.parse(innerSchema);
+  private static final Schema writer = new Schema.Parser().parse(writerSchema);
+  private static final Schema evolved = new Schema.Parser().parse(evolvedSchema2);
+  private static final Schema inner = new Schema.Parser().parse(innerSchema);
 
   @Before
   public void setUp() {
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
index 62d58ac..d09558c 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
@@ -72,7 +72,7 @@ public class TestShredder {
     +"]}";
 
   @Test public void testSimpleRecord() throws Exception {
-    check(Schema.parse(SIMPLE_RECORD),
+    check(new Schema.Parser().parse(SIMPLE_RECORD),
           new ColumnMetaData("x", ValueType.INT),
           new ColumnMetaData("y", ValueType.STRING));
   }
@@ -84,8 +84,8 @@ public class TestShredder {
       +"{\"name\":\"z\",\"type\":\"int\","
       +"\"default\":1,\""+RandomData.USE_DEFAULT+"\":true}"
       +"]}";
-    checkWrite(Schema.parse(SIMPLE_RECORD));
-    checkRead(Schema.parse(s));
+    checkWrite(new Schema.Parser().parse(SIMPLE_RECORD));
+    checkRead(new Schema.Parser().parse(s));
   }
 
   @Test public void testNestedRecord() throws Exception {
@@ -95,7 +95,7 @@ public class TestShredder {
       +"{\"name\":\"R\",\"type\":"+SIMPLE_RECORD+"},"
       +"{\"name\":\"y\",\"type\":\"string\"}"
       +"]}";
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("x", ValueType.INT),
           new ColumnMetaData("R#x", ValueType.INT),
           new ColumnMetaData("R#y", ValueType.STRING),
@@ -108,7 +108,7 @@ public class TestShredder {
       +"{\"name\":\"R1\",\"type\":"+SIMPLE_RECORD+"},"
       +"{\"name\":\"R2\",\"type\":\"R\"}"
       +"]}";
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
       new ColumnMetaData("R1#x", ValueType.INT),
       new ColumnMetaData("R1#y", ValueType.STRING),
       new ColumnMetaData("R2#x", ValueType.INT),
@@ -117,7 +117,7 @@ public class TestShredder {
 
   @Test public void testSimpleArray() throws Exception {
     String s = "{\"type\":\"array\",\"items\":\"long\"}";
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("[]", ValueType.LONG).isArray(true));
   }
 
@@ -126,7 +126,7 @@ public class TestShredder {
 
   @Test public void testArray() throws Exception {
     ColumnMetaData p = new ColumnMetaData("[]", ValueType.NULL).isArray(true);
-    check(Schema.parse(RECORD_ARRAY),
+    check(new Schema.Parser().parse(RECORD_ARRAY),
           p,
           new ColumnMetaData("[]#x", ValueType.INT).setParent(p),
           new ColumnMetaData("[]#y", ValueType.STRING).setParent(p));
@@ -134,14 +134,14 @@ public class TestShredder {
 
   @Test public void testSimpleUnion() throws Exception {
     String s = "[\"int\",\"string\"]";
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("int", ValueType.INT).isArray(true),
           new ColumnMetaData("string", ValueType.STRING).isArray(true));
   }
 
   @Test public void testSimpleOptional() throws Exception {
     String s = "[\"null\",\"string\"]";
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("string", ValueType.STRING).isArray(true));
   }
 
@@ -149,7 +149,7 @@ public class TestShredder {
 
   @Test public void testUnion() throws Exception {
     ColumnMetaData p = new ColumnMetaData("R", ValueType.NULL).isArray(true);
-    check(Schema.parse(UNION),
+    check(new Schema.Parser().parse(UNION),
           new ColumnMetaData("int", ValueType.INT).isArray(true),
           p,
           new ColumnMetaData("R#x", ValueType.INT).setParent(p),
@@ -164,7 +164,7 @@ public class TestShredder {
       +"{\"name\":\"y\",\"type\":\"string\"}"
       +"]}";
     ColumnMetaData p = new ColumnMetaData("A[]", ValueType.NULL).isArray(true);
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("x", ValueType.INT),
           p,
           new ColumnMetaData("A[]#x", ValueType.INT).setParent(p),
@@ -180,7 +180,7 @@ public class TestShredder {
       +"{\"name\":\"y\",\"type\":\"string\"}"
       +"]}";
     ColumnMetaData p = new ColumnMetaData("u/R", ValueType.NULL).isArray(true);
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("x", ValueType.INT),
           new ColumnMetaData("u/int", ValueType.INT).isArray(true),
           p,
@@ -198,7 +198,7 @@ public class TestShredder {
     ColumnMetaData r = new ColumnMetaData("a[]/R", ValueType.NULL)
       .setParent(p)
       .isArray(true);
-      check(Schema.parse(s),
+      check(new Schema.Parser().parse(s),
           p,
           new ColumnMetaData("a[]/int", ValueType.INT)
             .setParent(p)
@@ -217,7 +217,7 @@ public class TestShredder {
     ColumnMetaData r = new ColumnMetaData("a/array[]", ValueType.NULL)
       .setParent(q)
       .isArray(true);
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           new ColumnMetaData("a/int", ValueType.INT).isArray(true),
           q,
           r,
@@ -228,7 +228,7 @@ public class TestShredder {
   @Test public void testSimpleMap() throws Exception {
     String s = "{\"type\":\"map\",\"values\":\"long\"}";
     ColumnMetaData p = new ColumnMetaData(">", ValueType.NULL).isArray(true);
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           p,
           new ColumnMetaData(">key", ValueType.STRING).setParent(p),
           new ColumnMetaData(">value", ValueType.LONG).setParent(p));
@@ -237,7 +237,7 @@ public class TestShredder {
   @Test public void testMap() throws Exception {
     String s = "{\"type\":\"map\",\"values\":"+SIMPLE_RECORD+"}";
     ColumnMetaData p = new ColumnMetaData(">", ValueType.NULL).isArray(true);
-    check(Schema.parse(s),
+    check(new Schema.Parser().parse(s),
           p,
           new ColumnMetaData(">key", ValueType.STRING).setParent(p),
           new ColumnMetaData(">value#x", ValueType.INT).setParent(p),
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
index 8f6c9b5..6814122 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
@@ -117,7 +117,7 @@ public class TestWordCount {
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
 
 
-    Schema subSchema = Schema.parse("{\"type\":\"record\"," +
+    Schema subSchema = new Schema.Parser().parse("{\"type\":\"record\"," +
                                     "\"name\":\"PairValue\","+
                                     "\"fields\": [ " +
                                     "{\"name\":\"value\", \"type\":\"long\"}" +
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
index 51900ec..da80b74 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
@@ -110,7 +110,7 @@ public class TestKeyValueWordCount {
   }
 
   public void checkOutputFormat() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest", "part-r-00000");
 
@@ -139,7 +139,7 @@ public class TestKeyValueWordCount {
   }
 
   public void checkInputFormat() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest");
 
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
index c9728ba..57e667e 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
@@ -122,7 +122,7 @@ public class TestKeyWordCount {
   }
 
   public void checkOutputFormat() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest", "part-r-00000");
 
@@ -150,13 +150,13 @@ public class TestKeyWordCount {
   }
 
   public void checkInputFormat() throws Exception {
-    Job job = new Job();
+    Job job = Job.getInstance();
 
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest");
 
     job.setMapperClass(Counter.class);
 
-    Schema subSchema = Schema.parse("{\"type\":\"record\"," +
+    Schema subSchema = new Schema.Parser().parse("{\"type\":\"record\"," +
                                     "\"name\":\"PairValue\","+
                                     "\"fields\": [ " +
                                     "{\"name\":\"value\", \"type\":\"long\"}" +
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
index e48dafd..2158096 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
@@ -29,7 +29,7 @@ class OutputBuffer extends ByteArrayOutputStream {
 
   private int bitCount;                           // position in booleans
 
-  public OutputBuffer() { super(BLOCK_SIZE + BLOCK_SIZE >> 2); }
+  public OutputBuffer() { super((BLOCK_SIZE + BLOCK_SIZE) >> 2); }
 
   public boolean isFull() { return size() >= BLOCK_SIZE; }
 
@@ -236,36 +236,36 @@ class OutputBuffer extends ByteArrayOutputStream {
 
   public static int size(int n) {
     n = (n << 1) ^ (n >> 31);                     // move sign to low-order bit
-    if (n <= (1<<7*1)-1)
+    if (n <= (1<<(7*1))-1)
       return 1;
-    if (n <= (1<<7*2)-1)
+    if (n <= (1<<(7*2))-1)
       return 2;
-    if (n <= (1<<7*3)-1)
+    if (n <= (1<<(7*3))-1)
       return 3;
-    if (n <= (1<<7*4)-1)
+    if (n <= (1<<(7*4))-1)
       return 4;
     return 5;
   }
 
   public static int size(long n) {
     n = (n << 1) ^ (n >> 63);                     // move sign to low-order bit
-    if (n <= (1<<7*1)-1)
+    if (n <= (1<<(7*1))-1)
       return 1;
-    if (n <= (1<<7*2)-1)
+    if (n <= (1<<(7*2))-1)
       return 2;
-    if (n <= (1<<7*3)-1)
+    if (n <= (1<<(7*3))-1)
       return 3;
-    if (n <= (1<<7*4)-1)
+    if (n <= (1<<(7*4))-1)
       return 4;
-    if (n <= (1<<7*5)-1)
+    if (n <= (1<<(7*5))-1)
       return 5;
-    if (n <= (1<<7*6)-1)
+    if (n <= (1<<(7*6))-1)
       return 6;
-    if (n <= (1<<7*7)-1)
+    if (n <= (1<<(7*7))-1)
       return 7;
-    if (n <= (1<<7*8)-1)
+    if (n <= (1<<(7*8))-1)
       return 8;
-    if (n <= (1<<7*9)-1)
+    if (n <= (1<<(7*9))-1)
       return 9;
     return 10;
   }
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
index 958668b..ccecb1e 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
@@ -20,7 +20,7 @@ package org.apache.trevni;
 /** The datatypes that may be stored in a column. */
 public enum ValueType {
   NULL, BOOLEAN, INT, LONG, FIXED32, FIXED64, FLOAT, DOUBLE, STRING, BYTES;
-  private String name;
+  private final String name;
   private ValueType() { this.name = this.name().toLowerCase(); }
 
   /** Return the name of this type. */
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
index b52b9d4..84e9e25 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
@@ -17,6 +17,8 @@
  */
 package org.apache.trevni;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.util.Random;
 
 import java.io.ByteArrayOutputStream;
@@ -246,7 +248,7 @@ public class TestIOBuffers {
   @Test public void testSkipBytes() throws Exception {
     long sentinel = Long.MAX_VALUE;
     OutputBuffer out = new OutputBuffer();
-    out.writeValue("trevni".getBytes(), ValueType.BYTES);
+    out.writeValue("trevni".getBytes(UTF_8), ValueType.BYTES);
     out.writeLong(sentinel);
 
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
diff --git a/pom.xml b/pom.xml
index 64095c0..ffa6685 100644
--- a/pom.xml
+++ b/pom.xml
@@ -108,7 +108,6 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-enforcer-plugin</artifactId>
         <version>${enforcer-plugin.version}</version>
-        <inherited>false</inherited>
         <executions>
           <execution>
             <id>enforce</id>


Mime
View raw message