lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pnas...@apache.org
Subject [2/5] git commit: Blocking Terms Codec - nearly finished
Date Sun, 14 Sep 2014 22:06:10 GMT
Blocking Terms Codec - nearly finished


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/b6b784fb
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/b6b784fb
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/b6b784fb

Branch: refs/heads/master
Commit: b6b784fb6cfe3a5c7785a24297e638f1b83af54c
Parents: 61a5c2c
Author: Prescott Nasser <pnasser@apache.org>
Authored: Sun Sep 7 02:52:22 2014 -0700
Committer: Prescott Nasser <pnasser@apache.org>
Committed: Sun Sep 7 02:52:22 2014 -0700

----------------------------------------------------------------------
 .../BlockTerms/BlockTermsReader.cs              |  82 +-
 .../BlockTerms/BlockTermsWriter.cs              | 666 +++++++-------
 .../BlockTerms/FixedGapTermsIndexReader.cs      | 861 ++++++++++---------
 .../BlockTerms/FixedGapTermsIndexWriter.cs      | 193 ++---
 .../BlockTerms/TermsIndexReaderBase.cs          |  42 +-
 .../BlockTerms/TermsIndexWriterBase.cs          |  13 +-
 .../BlockTerms/VariableGapTermsIndexReader.cs   | 277 +++---
 .../BlockTerms/VariableGapTermsIndexWriter.cs   | 516 ++++++-----
 8 files changed, 1357 insertions(+), 1293 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b6b784fb/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
index 1cbf8c3..f5c6d26 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
@@ -108,20 +108,20 @@ public class BlockTermsReader : FieldsProducer {
       // Read per-field details
       seekDir(in, dirOffset);
 
-      final int numFields = in.readVInt();
+      readonly int numFields = in.readVInt();
       if (numFields < 0) {
         throw new CorruptIndexException("invalid number of fields: " + numFields + " (resource=" + in + ")");
       }
       for(int i=0;i<numFields;i++) {
-        final int field = in.readVInt();
-        final long numTerms = in.readVLong();
+        readonly int field = in.readVInt();
+        readonly long numTerms = in.readVLong();
         Debug.Assert( numTerms >= 0;
-        final long termsStartPointer = in.readVLong();
-        final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
-        final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong();
-        final long sumDocFreq = in.readVLong();
-        final int docCount = in.readVInt();
-        final int longsSize = version >= BlockTermsWriter.VERSION_META_ARRAY ? in.readVInt() : 0;
+        readonly long termsStartPointer = in.readVLong();
+        readonly FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
+        readonly long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong();
+        readonly long sumDocFreq = in.readVLong();
+        readonly int docCount = in.readVInt();
+        readonly int longsSize = version >= BlockTermsWriter.VERSION_META_ARRAY ? in.readVInt() : 0;
         if (docCount < 0 || docCount > info.getDocCount()) { // #docs with field must be <= #docs
           throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + info.getDocCount() + " (resource=" + in + ")");
         }
@@ -137,7 +137,7 @@ public class BlockTermsReader : FieldsProducer {
         }
       }
       success = true;
-    } finally {
+    } readonlyly {
       if (!success) {
         in.close();
       }
@@ -174,7 +174,7 @@ public class BlockTermsReader : FieldsProducer {
         if (indexReader != null) {
           indexReader.close();
         }
-      } finally {
+      } readonlyly {
         // null so if an app hangs on to us (ie, we are not
         // GCable, despite being closed) we still free most
         // ram
@@ -183,7 +183,7 @@ public class BlockTermsReader : FieldsProducer {
           in.close();
         }
       }
-    } finally {
+    } readonlyly {
       if (postingsReader != null) {
         postingsReader.close();
       }
@@ -207,13 +207,13 @@ public class BlockTermsReader : FieldsProducer {
   }
 
   private class FieldReader extends Terms {
-    final long numTerms;
-    final FieldInfo fieldInfo;
-    final long termsStartPointer;
-    final long sumTotalTermFreq;
-    final long sumDocFreq;
-    final int docCount;
-    final int longsSize;
+    readonly long numTerms;
+    readonly FieldInfo fieldInfo;
+    readonly long termsStartPointer;
+    readonly long sumTotalTermFreq;
+    readonly long sumDocFreq;
+    readonly int docCount;
+    readonly int longsSize;
 
     FieldReader(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) {
       Debug.Assert( numTerms > 0;
@@ -277,13 +277,13 @@ public class BlockTermsReader : FieldsProducer {
     }
 
     // Iterates through terms in this field
-    private final class SegmentTermsEnum extends TermsEnum {
-      private final IndexInput in;
-      private final BlockTermState state;
-      private final bool doOrd;
-      private final FieldAndTerm fieldTerm = new FieldAndTerm();
-      private final TermsIndexReaderBase.FieldIndexEnum indexEnum;
-      private final BytesRef term = new BytesRef();
+    private readonly class SegmentTermsEnum extends TermsEnum {
+      private readonly IndexInput in;
+      private readonly BlockTermState state;
+      private readonly bool doOrd;
+      private readonly FieldAndTerm fieldTerm = new FieldAndTerm();
+      private readonly TermsIndexReaderBase.FieldIndexEnum indexEnum;
+      private readonly BytesRef term = new BytesRef();
 
       /* This is true if indexEnum is "still" seek'd to the index term
          for the current term. We set it to true on seeking, and then it
@@ -318,7 +318,7 @@ public class BlockTermsReader : FieldsProducer {
       private int blockTermCount;
 
       private byte[] docFreqBytes;
-      private final ByteArrayDataInput freqReader = new ByteArrayDataInput();
+      private readonly ByteArrayDataInput freqReader = new ByteArrayDataInput();
       private int metaDataUpto;
 
       private long[] longs;
@@ -353,7 +353,7 @@ public class BlockTermsReader : FieldsProducer {
       // return NOT_FOUND so it's a waste for us to fill in
       // the term that was actually NOT_FOUND
       @Override
-      public SeekStatus seekCeil(final BytesRef target)  {
+      public SeekStatus seekCeil(readonly BytesRef target)  {
 
         if (indexEnum == null) {
           throw new IllegalStateException("terms index was not loaded");
@@ -374,7 +374,7 @@ public class BlockTermsReader : FieldsProducer {
         // is after current term but before next index term:
         if (indexIsCurrent) {
 
-          final int cmp = BytesRef.getUTF8SortedAsUnicodeComparator().compare(term, target);
+          readonly int cmp = BytesRef.getUTF8SortedAsUnicodeComparator().compare(term, target);
 
           if (cmp == 0) {
             // Already at the requested term
@@ -450,7 +450,7 @@ public class BlockTermsReader : FieldsProducer {
           // First, see if target term matches common prefix
           // in this block:
           if (common < termBlockPrefix) {
-            final int cmp = (term.bytes[common]&0xFF) - (target.bytes[target.offset + common]&0xFF);
+            readonly int cmp = (term.bytes[common]&0xFF) - (target.bytes[target.offset + common]&0xFF);
             if (cmp < 0) {
 
               // TODO: maybe we should store common prefix
@@ -468,7 +468,7 @@ public class BlockTermsReader : FieldsProducer {
                   state.ord++;
                   termSuffixesReader.skipBytes(termSuffixesReader.readVInt());
                 }
-                final int suffix = termSuffixesReader.readVInt();
+                readonly int suffix = termSuffixesReader.readVInt();
                 term.length = termBlockPrefix + suffix;
                 if (term.bytes.length < term.length) {
                   term.grow(term.length);
@@ -489,7 +489,7 @@ public class BlockTermsReader : FieldsProducer {
               // block and return NOT_FOUND:
               Debug.Assert( state.termBlockOrd == 0;
 
-              final int suffix = termSuffixesReader.readVInt();
+              readonly int suffix = termSuffixesReader.readVInt();
               term.length = termBlockPrefix + suffix;
               if (term.bytes.length < term.length) {
                 term.grow(term.length);
@@ -508,17 +508,17 @@ public class BlockTermsReader : FieldsProducer {
             state.termBlockOrd++;
             state.ord++;
 
-            final int suffix = termSuffixesReader.readVInt();
+            readonly int suffix = termSuffixesReader.readVInt();
             
             // We know the prefix matches, so just compare the new suffix:
-            final int termLen = termBlockPrefix + suffix;
+            readonly int termLen = termBlockPrefix + suffix;
             int bytePos = termSuffixesReader.getPosition();
 
             bool next = false;
-            final int limit = target.offset + (termLen < target.length ? termLen : target.length);
+            readonly int limit = target.offset + (termLen < target.length ? termLen : target.length);
             int targetPos = target.offset + termBlockPrefix;
             while(targetPos < limit) {
-              final int cmp = (termSuffixes[bytePos++]&0xFF) - (target.bytes[targetPos++]&0xFF);
+              readonly int cmp = (termSuffixes[bytePos++]&0xFF) - (target.bytes[targetPos++]&0xFF);
               if (cmp < 0) {
                 // Current term is still before the target;
                 // keep scanning
@@ -597,10 +597,10 @@ public class BlockTermsReader : FieldsProducer {
         if (seekPending) {
           Debug.Assert( !indexIsCurrent;
           in.seek(state.blockFilePointer);
-          final int pendingSeekCount = state.termBlockOrd;
+          readonly int pendingSeekCount = state.termBlockOrd;
           bool result = nextBlock();
 
-          final long savOrd = state.ord;
+          readonly long savOrd = state.ord;
 
           // Block must exist since seek(TermState) was called w/ a
           // TermState previously returned by this enum when positioned
@@ -629,7 +629,7 @@ public class BlockTermsReader : FieldsProducer {
         }
 
         // TODO: cutover to something better for these ints!  simple64?
-        final int suffix = termSuffixesReader.readVInt();
+        readonly int suffix = termSuffixesReader.readVInt();
         //System.out.println("  suffix=" + suffix);
 
         term.length = termBlockPrefix + suffix;
@@ -734,7 +734,7 @@ public class BlockTermsReader : FieldsProducer {
         // Now, scan:
         int left = (int) (ord - state.ord);
         while(left > 0) {
-          final BytesRef term = _next();
+          readonly BytesRef term = _next();
           Debug.Assert( term != null;
           left--;
           Debug.Assert( indexIsCurrent;
@@ -822,7 +822,7 @@ public class BlockTermsReader : FieldsProducer {
           // that we really need...
 
           // lazily catch up on metadata decode:
-          final int limit = state.termBlockOrd;
+          readonly int limit = state.termBlockOrd;
           bool absolute = metaDataUpto == 0;
           // TODO: better API would be "jump straight to term=N"???
           while (metaDataUpto < limit) {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b6b784fb/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
index 3c20376..eaf7afa 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
@@ -17,330 +17,372 @@
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
-    
-}
-
-// TODO: currently we encode all terms between two indexed
-// terms as a block; but, we could decouple the two, ie
-// allow several blocks in between two indexed terms
-
-/**
- * Writes terms dict, block-encoding (column stride) each
- * term's metadata for each set of terms between two
- * index terms.
- *
- * @lucene.experimental
- */
-
-public class BlockTermsWriter extends FieldsConsumer {
-
-  final static String CODEC_NAME = "BLOCK_TERMS_DICT";
-
-  // Initial format
-  public static final int VERSION_START = 0;
-  public static final int VERSION_APPEND_ONLY = 1;
-  public static final int VERSION_META_ARRAY = 2;
-  public static final int VERSION_CHECKSUM = 3;
-  public static final int VERSION_CURRENT = VERSION_CHECKSUM;
-
-  /** Extension of terms file */
-  static final String TERMS_EXTENSION = "tib";
-
-  protected IndexOutput out;
-  final PostingsWriterBase postingsWriter;
-  final FieldInfos fieldInfos;
-  FieldInfo currentField;
-  private final TermsIndexWriterBase termsIndexWriter;
-
-  private static class FieldMetaData {
-    public final FieldInfo fieldInfo;
-    public final long numTerms;
-    public final long termsStartPointer;
-    public final long sumTotalTermFreq;
-    public final long sumDocFreq;
-    public final int docCount;
-    public final int longsSize;
-
-    public FieldMetaData(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) {
-      Debug.Assert( numTerms > 0;
-      this.fieldInfo = fieldInfo;
-      this.termsStartPointer = termsStartPointer;
-      this.numTerms = numTerms;
-      this.sumTotalTermFreq = sumTotalTermFreq;
-      this.sumDocFreq = sumDocFreq;
-      this.docCount = docCount;
-      this.longsSize = longsSize;
-    }
-  }
-
-  private final List<FieldMetaData> fields = new ArrayList<>();
-
-  // private final String segment;
-
-  public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter,
-      SegmentWriteState state, PostingsWriterBase postingsWriter)
-       {
-    final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION);
-    this.termsIndexWriter = termsIndexWriter;
-    out = state.directory.createOutput(termsFileName, state.context);
-    bool success = false;
-    try {
-      fieldInfos = state.fieldInfos;
-      writeHeader(out);
-      currentField = null;
-      this.postingsWriter = postingsWriter;
-      // segment = state.segmentName;
-      
-      //System.out.println("BTW.init seg=" + state.segmentName);
-      
-      postingsWriter.init(out); // have consumer write its format/header
-      success = true;
-    } finally {
-      if (!success) {
-        IOUtils.closeWhileHandlingException(out);
-      }
-    }
-  }
-  
-  private void writeHeader(IndexOutput out)  {
-    CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);     
-  }
-
-  @Override
-  public TermsConsumer addField(FieldInfo field)  {
-    //System.out.println("\nBTW.addField seg=" + segment + " field=" + field.name);
-    Debug.Assert( currentField == null || currentField.name.compareTo(field.name) < 0;
-    currentField = field;
-    TermsIndexWriterBase.FieldWriter fieldIndexWriter = termsIndexWriter.addField(field, out.getFilePointer());
-    return new TermsWriter(fieldIndexWriter, field, postingsWriter);
-  }
-
-  @Override
-  public void close()  {
-    if (out != null) {
-      try {
-        final long dirStart = out.getFilePointer();
-        
-        out.writeVInt(fields.size());
-        for(FieldMetaData field : fields) {
-          out.writeVInt(field.fieldInfo.number);
-          out.writeVLong(field.numTerms);
-          out.writeVLong(field.termsStartPointer);
-          if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) {
-            out.writeVLong(field.sumTotalTermFreq);
-          }
-          out.writeVLong(field.sumDocFreq);
-          out.writeVInt(field.docCount);
-          if (VERSION_CURRENT >= VERSION_META_ARRAY) {
-            out.writeVInt(field.longsSize);
-          }
-        }
-        writeTrailer(dirStart);
-        CodecUtil.writeFooter(out);
-      } finally {
-        IOUtils.close(out, postingsWriter, termsIndexWriter);
-        out = null;
-      }
-    }
-  }
-
-  private void writeTrailer(long dirStart)  {
-    out.writeLong(dirStart);    
-  }
-  
-  private static class TermEntry {
-    public final BytesRef term = new BytesRef();
-    public BlockTermState state;
-  }
-
-  class TermsWriter extends TermsConsumer {
-    private final FieldInfo fieldInfo;
-    private final PostingsWriterBase postingsWriter;
-    private final long termsStartPointer;
-    private long numTerms;
-    private final TermsIndexWriterBase.FieldWriter fieldIndexWriter;
-    long sumTotalTermFreq;
-    long sumDocFreq;
-    int docCount;
-    int longsSize;
-
-    private TermEntry[] pendingTerms;
-
-    private int pendingCount;
-
-    TermsWriter(
-        TermsIndexWriterBase.FieldWriter fieldIndexWriter,
-        FieldInfo fieldInfo,
-        PostingsWriterBase postingsWriter) 
+    using System;
+    using System.Collections.Generic;
+    using System.Diagnostics;
+    using Lucene.Net.Index;
+    using Lucene.Net.Store;
+    using Lucene.Net.Util;
+
+    /// <summary>
+    /// Writes terms dict, block-encoding (column stride) each term's metadata 
+    /// for each set of terms between two index terms
+    /// 
+    /// lucene.experimental
+    /// </summary>
+    /// <remarks>
+    /// TODO Currently we encode all terms between two indexed terms as a block
+    /// But we could decouple the two, ie allow several blocks in between two indexed terms
+    /// </remarks>
+    public class BlockTermsWriter : FieldsConsumer
     {
-      this.fieldInfo = fieldInfo;
-      this.fieldIndexWriter = fieldIndexWriter;
-      pendingTerms = new TermEntry[32];
-      for(int i=0;i<pendingTerms.length;i++) {
-        pendingTerms[i] = new TermEntry();
-      }
-      termsStartPointer = out.getFilePointer();
-      this.postingsWriter = postingsWriter;
-      this.longsSize = postingsWriter.setField(fieldInfo);
-    }
-    
-    @Override
-    public Comparator<BytesRef> getComparator() {
-      return BytesRef.getUTF8SortedAsUnicodeComparator();
-    }
 
-    @Override
-    public PostingsConsumer startTerm(BytesRef text)  {
-      //System.out.println("BTW: startTerm term=" + fieldInfo.name + ":" + text.utf8ToString() + " " + text + " seg=" + segment);
-      postingsWriter.startTerm();
-      return postingsWriter;
-    }
-
-    private final BytesRef lastPrevTerm = new BytesRef();
+        public const String CODEC_NAME = "BLOCK_TERMS_DICT";
+
+        // Initial format
+        public const int VERSION_START = 0;
+        public const int VERSION_APPEND_ONLY = 1;
+        public const int VERSION_META_ARRAY = 2;
+        public const int VERSION_CHECKSUM = 3;
+        public const int VERSION_CURRENT = VERSION_CHECKSUM;
+
+        /** Extension of terms file */
+        public const String TERMS_EXTENSION = "tib";
+
+        protected IndexOutput output;
+        protected readonly PostingsWriterBase postingsWriter;
+        protected readonly FieldInfos fieldInfos;
+        protected FieldInfo currentField;
+        private readonly TermsIndexWriterBase termsIndexWriter;
+        private readonly List<FieldMetaData> fields = new List<FieldMetaData>();
+
+        public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter,
+            SegmentWriteState state, PostingsWriterBase postingsWriter)
+        {
+            String termsFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix,
+                TERMS_EXTENSION);
+            this.termsIndexWriter = termsIndexWriter;
+            output = state.Directory.CreateOutput(termsFileName, state.Context);
+            bool success = false;
+
+            try
+            {
+                fieldInfos = state.FieldInfos;
+                WriteHeader(output);
+                currentField = null;
+                this.postingsWriter = postingsWriter;
+
+                postingsWriter.Init(output); // have consumer write its format/header
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(output);
+                }
+            }
+        }
 
-    @Override
-    public void finishTerm(BytesRef text, TermStats stats)  {
+        private void WriteHeader(IndexOutput output)
+        {
+            CodecUtil.WriteHeader(output, CODEC_NAME, VERSION_CURRENT);
+        }
 
-      Debug.Assert( stats.docFreq > 0;
-      //System.out.println("BTW: finishTerm term=" + fieldInfo.name + ":" + text.utf8ToString() + " " + text + " seg=" + segment + " df=" + stats.docFreq);
+        public override TermsConsumer AddField(FieldInfo field)
+        {
+            Debug.Assert(currentField == null || currentField.Name.CompareTo(field.Name) < 0);
 
-      final bool isIndexTerm = fieldIndexWriter.checkIndexTerm(text, stats);
+            currentField = field;
+            var fiw = termsIndexWriter.AddField(field, output.FilePointer);
+            return new TermsWriter(fiw, field, postingsWriter);
+        }
 
-      if (isIndexTerm) {
-        if (pendingCount > 0) {
-          // Instead of writing each term, live, we gather terms
-          // in RAM in a pending buffer, and then write the
-          // entire block in between index terms:
-          flushBlock();
+        public override void Dispose()
+        {
+            if (output != null)
+            {
+                try
+                {
+                    long dirStart = output.FilePointer;
+
+                    output.WriteVInt(fields.Size);
+
+                    foreach (var field in fields)
+                    {
+                        output.WriteVInt(field.FieldInfo.Number);
+                        output.WriteVLong(field.NumTerms);
+                        output.WriteVLong(field.TermsStartPointer);
+                        if (field.FieldInfo.FieldIndexOptions != FieldInfo.IndexOptions.DOCS_ONLY)
+                        {
+                            output.WriteVLong(field.SumTotalTermFreq);
+                        }
+                        output.WriteVLong(field.SumDocFreq);
+                        output.WriteVInt(field.DocCount);
+                        if (VERSION_CURRENT >= VERSION_META_ARRAY)
+                        {
+                            output.WriteVInt(field.LongsSize);
+                        }
+
+                    }
+                    WriteTrailer(dirStart);
+                    CodecUtil.WriteFooter(output);
+                }
+                finally
+                {
+                    IOUtils.Close(output, postingsWriter, termsIndexWriter);
+                    output = null;
+                }
+            }
         }
-        fieldIndexWriter.add(text, stats, out.getFilePointer());
-        //System.out.println("  index term!");
-      }
-
-      if (pendingTerms.length == pendingCount) {
-        final TermEntry[] newArray = new TermEntry[ArrayUtil.oversize(pendingCount+1, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
-        System.arraycopy(pendingTerms, 0, newArray, 0, pendingCount);
-        for(int i=pendingCount;i<newArray.length;i++) {
-          newArray[i] = new TermEntry();
+
+        private void WriteTrailer(long dirStart)
+        {
+            output.WriteLong(dirStart);
         }
-        pendingTerms = newArray;
-      }
-      final TermEntry te = pendingTerms[pendingCount];
-      te.term.copyBytes(text);
-      te.state = postingsWriter.newTermState();
-      te.state.docFreq = stats.docFreq;
-      te.state.totalTermFreq = stats.totalTermFreq;
-      postingsWriter.finishTerm(te.state);
-
-      pendingCount++;
-      numTerms++;
-    }
 
-    // Finishes all terms in this field
-    @Override
-    public void finish(long sumTotalTermFreq, long sumDocFreq, int docCount)  {
-      if (pendingCount > 0) {
-        flushBlock();
-      }
-      // EOF marker:
-      out.writeVInt(0);
-
-      this.sumTotalTermFreq = sumTotalTermFreq;
-      this.sumDocFreq = sumDocFreq;
-      this.docCount = docCount;
-      fieldIndexWriter.finish(out.getFilePointer());
-      if (numTerms > 0) {
-        fields.add(new FieldMetaData(fieldInfo,
-                                     numTerms,
-                                     termsStartPointer,
-                                     sumTotalTermFreq,
-                                     sumDocFreq,
-                                     docCount,
-                                     longsSize));
-      }
-    }
 
-    private int sharedPrefix(BytesRef term1, BytesRef term2) {
-      Debug.Assert( term1.offset == 0;
-      Debug.Assert( term2.offset == 0;
-      int pos1 = 0;
-      int pos1End = pos1 + Math.min(term1.length, term2.length);
-      int pos2 = 0;
-      while(pos1 < pos1End) {
-        if (term1.bytes[pos1] != term2.bytes[pos2]) {
-          return pos1;
+        protected class FieldMetaData
+        {
+            public FieldInfo FieldInfo { get; private set; }
+            public long NumTerms { get; private set; }
+            public long TermsStartPointer { get; private set; }
+            public long SumTotalTermFreq { get; private set; }
+            public long SumDocFreq { get; private set; }
+            public int DocCount { get; private set; }
+            public int LongsSize { get; private set; }
+
+            public FieldMetaData(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq,
+                long sumDocFreq, int docCount, int longsSize)
+            {
+                Debug.Assert(numTerms > 0);
+
+                FieldInfo = fieldInfo;
+                TermsStartPointer = termsStartPointer;
+                NumTerms = numTerms;
+                SumTotalTermFreq = sumTotalTermFreq;
+                SumDocFreq = sumDocFreq;
+                DocCount = docCount;
+                LongsSize = longsSize;
+            }
         }
-        pos1++;
-        pos2++;
-      }
-      return pos1;
-    }
 
-    private final RAMOutputStream bytesWriter = new RAMOutputStream();
-    private final RAMOutputStream bufferWriter = new RAMOutputStream();
-
-    private void flushBlock()  {
-      //System.out.println("BTW.flushBlock seg=" + segment + " pendingCount=" + pendingCount + " fp=" + out.getFilePointer());
-
-      // First pass: compute common prefix for all terms
-      // in the block, against term before first term in
-      // this block:
-      int commonPrefix = sharedPrefix(lastPrevTerm, pendingTerms[0].term);
-      for(int termCount=1;termCount<pendingCount;termCount++) {
-        commonPrefix = Math.min(commonPrefix,
-                                sharedPrefix(lastPrevTerm,
-                                             pendingTerms[termCount].term));
-      }        
-
-      out.writeVInt(pendingCount);
-      out.writeVInt(commonPrefix);
-
-      // 2nd pass: write suffixes, as separate byte[] blob
-      for(int termCount=0;termCount<pendingCount;termCount++) {
-        final int suffix = pendingTerms[termCount].term.length - commonPrefix;
-        // TODO: cutover to better intblock codec, instead
-        // of interleaving here:
-        bytesWriter.writeVInt(suffix);
-        bytesWriter.writeBytes(pendingTerms[termCount].term.bytes, commonPrefix, suffix);
-      }
-      out.writeVInt((int) bytesWriter.getFilePointer());
-      bytesWriter.writeTo(out);
-      bytesWriter.reset();
-
-      // 3rd pass: write the freqs as byte[] blob
-      // TODO: cutover to better intblock codec.  simple64?
-      // write prefix, suffix first:
-      for(int termCount=0;termCount<pendingCount;termCount++) {
-        final BlockTermState state = pendingTerms[termCount].state;
-        Debug.Assert( state != null;
-        bytesWriter.writeVInt(state.docFreq);
-        if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) {
-          bytesWriter.writeVLong(state.totalTermFreq-state.docFreq);
+        private class TermEntry
+        {
+            public readonly BytesRef Term = new BytesRef();
+            public BlockTermState State;
         }
-      }
-      out.writeVInt((int) bytesWriter.getFilePointer());
-      bytesWriter.writeTo(out);
-      bytesWriter.reset();
-
-      // 4th pass: write the metadata 
-      long[] longs = new long[longsSize];
-      bool absolute = true;
-      for(int termCount=0;termCount<pendingCount;termCount++) {
-        final BlockTermState state = pendingTerms[termCount].state;
-        postingsWriter.encodeTerm(longs, bufferWriter, fieldInfo, state, absolute);
-        for (int i = 0; i < longsSize; i++) {
-          bytesWriter.writeVLong(longs[i]);
+
+        public class TermsWriter : TermsConsumer
+        {
+            private readonly FieldInfo fieldInfo;
+            private readonly PostingsWriterBase postingsWriter;
+            private readonly long termsStartPointer;
+
+            private readonly BytesRef lastPrevTerm = new BytesRef();
+            private readonly TermsIndexWriterBase.FieldWriter fieldIndexWriter;
+
+            private long numTerms;
+            private long sumTotalTermFreq;
+            private long sumDocFreq;
+            private int docCount;
+            private int longsSize;
+
+            private TermEntry[] pendingTerms;
+
+            private int pendingCount;
+
+            private TermsWriter(
+                TermsIndexWriterBase.FieldWriter fieldIndexWriter,
+                FieldInfo fieldInfo,
+                PostingsWriterBase postingsWriter)
+            {
+                this.fieldInfo = fieldInfo;
+                this.fieldIndexWriter = fieldIndexWriter;
+                pendingTerms = new TermEntry[32];
+                for (int i = 0; i < pendingTerms.Length; i++)
+                {
+                    pendingTerms[i] = new TermEntry();
+                }
+                termsStartPointer = output.FilePointer;
+                this.postingsWriter = postingsWriter;
+                this.longsSize = postingsWriter.SetField(fieldInfo);
+            }
+
+            public override IComparer<BytesRef> Comparator()
+            {
+                return BytesRef.UTF8SortedAsUnicodeComparer;
+            }
+
+            public override PostingsConsumer StartTerm(BytesRef text)
+            {
+                postingsWriter.StartTerm();
+                return postingsWriter;
+            }
+
+            public override void FinishTerm(BytesRef text, TermStats stats)
+            {
+
+                Debug.Assert(stats.DocFreq > 0);
+
+                bool isIndexTerm = fieldIndexWriter.CheckIndexTerm(text, stats);
+
+                if (isIndexTerm)
+                {
+                    if (pendingCount > 0)
+                    {
+                        // Instead of writing each term, live, we gather terms
+                        // in RAM in a pending buffer, and then write the
+                        // entire block in between index terms:
+                        FlushBlock();
+                    }
+                    fieldIndexWriter.Add(text, stats, output.FilePointer);
+                }
+
+                if (pendingTerms.Length == pendingCount)
+                {
+                    TermEntry[] newArray =
+                        new TermEntry[ArrayUtil.Oversize(pendingCount + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
+                    System.Arraycopy(pendingTerms, 0, newArray, 0, pendingCount);
+                    for (int i = pendingCount; i < newArray.Length; i++)
+                    {
+                        newArray[i] = new TermEntry();
+                    }
+                    pendingTerms = newArray;
+                }
+                TermEntry te = pendingTerms[pendingCount];
+                te.Term.CopyBytes(text);
+                te.State = postingsWriter.NewTermState();
+                te.State.DocFreq = stats.DocFreq;
+                te.State.TotalTermFreq = stats.TotalTermFreq;
+                postingsWriter.FinishTerm(te.State);
+
+                pendingCount++;
+                numTerms++;
+            }
+
+            // Finishes all terms in this field
+            public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount)
+            {
+                if (pendingCount > 0)
+                {
+                    FlushBlock();
+                }
+
+                // EOF marker:
+                output.WriteVInt(0);
+
+                this.sumTotalTermFreq = sumTotalTermFreq;
+                this.sumDocFreq = sumDocFreq;
+                this.docCount = docCount;
+                fieldIndexWriter.Finish(output.FilePointer);
+
+                if (numTerms > 0)
+                {
+                    fields.Add(new FieldMetaData(fieldInfo,
+                        numTerms,
+                        termsStartPointer,
+                        sumTotalTermFreq,
+                        sumDocFreq,
+                        docCount,
+                        longsSize));
+                }
+            }
+
+            private int SharedPrefix(BytesRef term1, BytesRef term2)
+            {
+                Debug.Assert(term1.Offset == 0);
+                Debug.Assert(term2.Offset == 0);
+                int pos1 = 0;
+                int pos1End = pos1 + Math.Min(term1.Length, term2.Length);
+                int pos2 = 0;
+                while (pos1 < pos1End)
+                {
+                    if (term1.Bytes[pos1] != term2.Bytes[pos2])
+                    {
+                        return pos1;
+                    }
+                    pos1++;
+                    pos2++;
+                }
+                return pos1;
+            }
+
+            private readonly RAMOutputStream bytesWriter = new RAMOutputStream();
+            private readonly RAMOutputStream bufferWriter = new RAMOutputStream();
+
+            private void FlushBlock()
+            {
+                // First pass: compute common prefix for all terms
+                // in the block, against term before first term in
+                // this block:
+
+                int commonPrefix = SharedPrefix(lastPrevTerm, pendingTerms[0].Term);
+                for (int termCount = 1; termCount < pendingCount; termCount++)
+                {
+                    commonPrefix = Math.Min(commonPrefix,
+                        SharedPrefix(lastPrevTerm,
+                            pendingTerms[termCount].Term));
+                }
+
+                output.WriteVInt(pendingCount);
+                output.WriteVInt(commonPrefix);
+
+                // 2nd pass: write suffixes, as separate byte[] blob
+                for (int termCount = 0; termCount < pendingCount; termCount++)
+                {
+                    int suffix = pendingTerms[termCount].Term.Length - commonPrefix;
+                    // TODO: cutover to better intblock codec, instead
+                    // of interleaving here:
+                    bytesWriter.WriteVInt(suffix);
+                    bytesWriter.WriteBytes(pendingTerms[termCount].Term.Bytes, commonPrefix, suffix);
+                }
+                output.WriteVInt((int) bytesWriter.FilePointer);
+                bytesWriter.WriteTo(output);
+                bytesWriter.Reset();
+
+                // 3rd pass: write the freqs as byte[] blob
+                // TODO: cutover to better intblock codec.  simple64?
+                // write prefix, suffix first:
+                for (int termCount = 0; termCount < pendingCount; termCount++)
+                {
+                    BlockTermState state = pendingTerms[termCount].State;
+
+                    Debug.Assert(state != null);
+
+                    bytesWriter.WriteVInt(state.DocFreq);
+                    if (fieldInfo.FieldIndexOptions != FieldInfo.IndexOptions.DOCS_ONLY)
+                    {
+                        bytesWriter.WriteVLong(state.TotalTermFreq - state.DocFreq);
+                    }
+                }
+                output.WriteVInt((int) bytesWriter.FilePointer);
+                bytesWriter.WriteTo(output);
+                bytesWriter.Reset();
+
+                // 4th pass: write the metadata 
+                var longs = new long[longsSize];
+                bool absolute = true;
+                for (int termCount = 0; termCount < pendingCount; termCount++)
+                {
+                    BlockTermState state = pendingTerms[termCount].State;
+                    postingsWriter.EncodeTerm(longs, bufferWriter, fieldInfo, state, absolute);
+                    for (int i = 0; i < longsSize; i++)
+                    {
+                        bytesWriter.WriteVLong(longs[i]);
+                    }
+                    bufferWriter.WriteTo(bytesWriter);
+                    bufferWriter.Reset();
+                    absolute = false;
+                }
+                output.WriteVInt((int) bytesWriter.FilePointer);
+                bytesWriter.WriteTo(output);
+                bytesWriter.Reset();
+
+                lastPrevTerm.CopyBytes(pendingTerms[pendingCount - 1].Term);
+                pendingCount = 0;
+            }
         }
-        bufferWriter.writeTo(bytesWriter);
-        bufferWriter.reset();
-        absolute = false;
-      }
-      out.writeVInt((int) bytesWriter.getFilePointer());
-      bytesWriter.writeTo(out);
-      bytesWriter.reset();
-
-      lastPrevTerm.copyBytes(pendingTerms[pendingCount-1].term);
-      pendingCount = 0;
+
     }
-  }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b6b784fb/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
index effd850..d2aa242 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
@@ -17,417 +17,512 @@
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
-    
-}
-
-/** 
- * TermsIndexReader for simple every Nth terms indexes.
- *
- * @see FixedGapTermsIndexWriter
- * @lucene.experimental 
- */
-public class FixedGapTermsIndexReader extends TermsIndexReaderBase {
-
-  // NOTE: long is overkill here, since this number is 128
-  // by default and only indexDivisor * 128 if you change
-  // the indexDivisor at search time.  But, we use this in a
-  // number of places to multiply out the actual ord, and we
-  // will overflow int during those multiplies.  So to avoid
-  // having to upgrade each multiple to long in multiple
-  // places (error prone), we use long here:
-  private long totalIndexInterval;
-
-  private int indexDivisor;
-  final private int indexInterval;
-
-  // Closed if indexLoaded is true:
-  private IndexInput in;
-  private volatile bool indexLoaded;
-
-  private final Comparator<BytesRef> termComp;
-
-  private final static int PAGED_BYTES_BITS = 15;
-
-  // all fields share this single logical byte[]
-  private final PagedBytes termBytes = new PagedBytes(PAGED_BYTES_BITS);
-  private PagedBytes.Reader termBytesReader;
-
-  final HashMap<FieldInfo,FieldIndexData> fields = new HashMap<>();
-  
-  // start of the field info data
-  private long dirOffset;
-  
-  private final int version;
-
-  public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, Comparator<BytesRef> termComp, String segmentSuffix, IOContext context)
-     {
-
-    this.termComp = termComp;
-
-    Debug.Assert( indexDivisor == -1 || indexDivisor > 0;
-
-    in = dir.openInput(IndexFileNames.segmentFileName(segment, segmentSuffix, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context);
-    
-    bool success = false;
-
-    try {
-      
-      version = readHeader(in);
-      
-      if (version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM) {
-        CodecUtil.checksumEntireFile(in);
-      }
-      
-      indexInterval = in.readInt();
-      if (indexInterval < 1) {
-        throw new CorruptIndexException("invalid indexInterval: " + indexInterval + " (resource=" + in + ")");
-      }
-      this.indexDivisor = indexDivisor;
-
-      if (indexDivisor < 0) {
-        totalIndexInterval = indexInterval;
-      } else {
-        // In case terms index gets loaded, later, on demand
-        totalIndexInterval = indexInterval * indexDivisor;
-      }
-      Debug.Assert( totalIndexInterval > 0;
-      
-      seekDir(in, dirOffset);
-
-      // Read directory
-      final int numFields = in.readVInt();     
-      if (numFields < 0) {
-        throw new CorruptIndexException("invalid numFields: " + numFields + " (resource=" + in + ")");
-      }
-      //System.out.println("FGR: init seg=" + segment + " div=" + indexDivisor + " nF=" + numFields);
-      for(int i=0;i<numFields;i++) {
-        final int field = in.readVInt();
-        final int numIndexTerms = in.readVInt();
-        if (numIndexTerms < 0) {
-          throw new CorruptIndexException("invalid numIndexTerms: " + numIndexTerms + " (resource=" + in + ")");
-        }
-        final long termsStart = in.readVLong();
-        final long indexStart = in.readVLong();
-        final long packedIndexStart = in.readVLong();
-        final long packedOffsetsStart = in.readVLong();
-        if (packedIndexStart < indexStart) {
-          throw new CorruptIndexException("invalid packedIndexStart: " + packedIndexStart + " indexStart: " + indexStart + "numIndexTerms: " + numIndexTerms + " (resource=" + in + ")");
-        }
-        final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
-        FieldIndexData previous = fields.put(fieldInfo, new FieldIndexData(fieldInfo, numIndexTerms, indexStart, termsStart, packedIndexStart, packedOffsetsStart));
-        if (previous != null) {
-          throw new CorruptIndexException("duplicate field: " + fieldInfo.name + " (resource=" + in + ")");
-        }
-      }
-      success = true;
-    } finally {
-      if (!success) {
-        IOUtils.closeWhileHandlingException(in);
-      }
-      if (indexDivisor > 0) {
-        in.close();
-        in = null;
-        if (success) {
-          indexLoaded = true;
+    using System;
+    using System.Collections.Generic;
+    using System.Diagnostics;
+    using System.Linq;
+    using Index;
+    using Store;
+    using Util;
+    using Util.Packed;
+
+    /// <summary>
+    /// TermsIndexReader for simple every Nth terms indexes
+    /// 
+    /// See FixedGapTermsIndexWriter
+    /// 
+    /// lucene.experimental
+    /// </summary>
+    public class FixedGapTermsIndexReader : TermsIndexReaderBase
+    {
+
+        // NOTE: long is overkill here, since this number is 128
+        // by default and only indexDivisor * 128 if you change
+        // the indexDivisor at search time.  But, we use this in a
+        // number of places to multiply out the actual ord, and we
+        // will overflow int during those multiplies.  So to avoid
+        // having to upgrade each multiple to long in multiple
+        // places (error prone), we use long here:
+        private long totalIndexInterval;
+
+        private int indexDivisor;
+        private readonly int indexInterval;
+
+        // Closed if indexLoaded is true:
+        private IndexInput input;
+        private volatile bool indexLoaded;
+
+        private readonly IComparer<BytesRef> termComp;
+
+        private static readonly int PAGED_BYTES_BITS = 15;
+
+        // all fields share this single logical byte[]
+        private readonly PagedBytes termBytes = new PagedBytes(PAGED_BYTES_BITS);
+        private PagedBytes.Reader termBytesReader;
+
+        private readonly Dictionary<FieldInfo, FieldIndexData> fields = new Dictionary<FieldInfo, FieldIndexData>();
+
+        // start of the field info data
+        private long dirOffset;
+
+        private readonly int version;
+
+        public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor,
+            IComparer<BytesRef> termComp, String segmentSuffix, IOContext context)
+        {
+            this.termComp = termComp;
+
+            Debug.Assert(indexDivisor == -1 || indexDivisor > 0;
+
+            input =
+                dir.OpenInput(
+                    IndexFileNames.SegmentFileName(segment, segmentSuffix,
+                        FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION),
+                    context);
+
+            bool success = false;
+
+            try
+            {
+
+                version = ReadHeader(input);
+
+                if (version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM)
+                    CodecUtil.ChecksumEntireFile(input);
+                
+                indexInterval = input.ReadInt();
+                
+                if (indexInterval < 1)
+                {
+                    throw new CorruptIndexException(String.Format("Invalid indexInterval: {0}, Resource: {1}",
+                        indexInterval, input));
+                }
+
+                this.indexDivisor = indexDivisor;
+
+                if (indexDivisor < 0)
+                {
+                    totalIndexInterval = indexInterval;
+                }
+                else
+                {
+                    // In case terms index gets loaded, later, on demand
+                    totalIndexInterval = indexInterval*indexDivisor;
+                }
+
+                Debug.Assert(totalIndexInterval > 0);
+
+                SeekDir(input, dirOffset);
+
+                // Read directory
+                int numFields = input.ReadVInt();
+
+                if (numFields < 0)
+                    throw new CorruptIndexException(String.Format("Invalid numFields: {0}, Resource: {1}", numFields,
+                        input));
+
+                for (int i = 0; i < numFields; i++)
+                {
+                    int field = input.ReadVInt();
+                    int numIndexTerms = input.ReadVInt();
+                    if (numIndexTerms < 0)
+                        throw new CorruptIndexException(String.Format("Invalid numIndexTerms: {0}, Resource: {1}",
+                            numIndexTerms,
+                            input));
+
+                    long termsStart = input.ReadVLong();
+                    long indexStart = input.ReadVLong();
+                    long packedIndexStart = input.ReadVLong();
+                    long packedOffsetsStart = input.ReadVLong();
+
+                    if (packedIndexStart < indexStart)
+                        throw new CorruptIndexException(
+                            String.Format(
+                                "Invalid packedIndexStart: {0}, IndexStart: {1}, NumIndexTerms: {2}, Resource: {3}",
+                                packedIndexStart,
+                                indexStart, numIndexTerms, input));
+
+                    FieldInfo fieldInfo = fieldInfos.FieldInfo(field);
+
+                    try
+                    {
+                        fields.Add(fieldInfo,
+                            new FieldIndexData(fieldInfo, numIndexTerms, indexStart, termsStart, packedIndexStart,
+                                packedOffsetsStart));
+                    }
+                    catch (ArgumentException)
+                    {
+                        throw new CorruptIndexException(String.Format("Duplicate field: {0}, Resource {1}",
+                            fieldInfo.Name,
+                            input));
+                    }
+
+
+                }
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(input);
+                }
+                if (indexDivisor > 0)
+                {
+                    input.Dispose();
+                    input = null;
+                    if (success)
+                        indexLoaded = true;
+
+                    termBytesReader = termBytes.Freeze(true);
+                }
+            }
         }
-        termBytesReader = termBytes.freeze(true);
-      }
-    }
-  }
-  
-  @Override
-  public int getDivisor() {
-    return indexDivisor;
-  }
-
-  private int readHeader(IndexInput input)  {
-    int version = CodecUtil.checkHeader(input, FixedGapTermsIndexWriter.CODEC_NAME,
-      FixedGapTermsIndexWriter.VERSION_START, FixedGapTermsIndexWriter.VERSION_CURRENT);
-    if (version < FixedGapTermsIndexWriter.VERSION_APPEND_ONLY) {
-      dirOffset = input.readLong();
-    }
-    return version;
-  }
-
-  private class IndexEnum extends FieldIndexEnum {
-    private final FieldIndexData.CoreFieldIndex fieldIndex;
-    private final BytesRef term = new BytesRef();
-    private long ord;
-
-    public IndexEnum(FieldIndexData.CoreFieldIndex fieldIndex) {
-      this.fieldIndex = fieldIndex;
-    }
-
-    @Override
-    public BytesRef term() {
-      return term;
-    }
 
-    @Override
-    public long seek(BytesRef target) {
-      int lo = 0;          // binary search
-      int hi = fieldIndex.numIndexTerms - 1;
-      Debug.Assert( totalIndexInterval > 0 : "totalIndexInterval=" + totalIndexInterval;
-
-      while (hi >= lo) {
-        int mid = (lo + hi) >>> 1;
-
-        final long offset = fieldIndex.termOffsets.get(mid);
-        final int length = (int) (fieldIndex.termOffsets.get(1+mid) - offset);
-        termBytesReader.fillSlice(term, fieldIndex.termBytesStart + offset, length);
-
-        int delta = termComp.compare(target, term);
-        if (delta < 0) {
-          hi = mid - 1;
-        } else if (delta > 0) {
-          lo = mid + 1;
-        } else {
-          Debug.Assert( mid >= 0;
-          ord = mid*totalIndexInterval;
-          return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(mid);
+        public override int Divisor
+        {
+            get { return indexDivisor; }
         }
-      }
-
-      if (hi < 0) {
-        Debug.Assert( hi == -1;
-        hi = 0;
-      }
-
-      final long offset = fieldIndex.termOffsets.get(hi);
-      final int length = (int) (fieldIndex.termOffsets.get(1+hi) - offset);
-      termBytesReader.fillSlice(term, fieldIndex.termBytesStart + offset, length);
-
-      ord = hi*totalIndexInterval;
-      return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(hi);
-    }
-
-    @Override
-    public long next() {
-      final int idx = 1 + (int) (ord / totalIndexInterval);
-      if (idx >= fieldIndex.numIndexTerms) {
-        return -1;
-      }
-      ord += totalIndexInterval;
-
-      final long offset = fieldIndex.termOffsets.get(idx);
-      final int length = (int) (fieldIndex.termOffsets.get(1+idx) - offset);
-      termBytesReader.fillSlice(term, fieldIndex.termBytesStart + offset, length);
-      return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(idx);
-    }
-
-    @Override
-    public long ord() {
-      return ord;
-    }
-
-    @Override
-    public long seek(long ord) {
-      int idx = (int) (ord / totalIndexInterval);
-      // caller must ensure ord is in bounds
-      Debug.Assert( idx < fieldIndex.numIndexTerms;
-      final long offset = fieldIndex.termOffsets.get(idx);
-      final int length = (int) (fieldIndex.termOffsets.get(1+idx) - offset);
-      termBytesReader.fillSlice(term, fieldIndex.termBytesStart + offset, length);
-      this.ord = idx * totalIndexInterval;
-      return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(idx);
-    }
-  }
-
-  @Override
-  public bool supportsOrd() {
-    return true;
-  }
-
-  private final class FieldIndexData {
-
-    volatile CoreFieldIndex coreIndex;
-
-    private final long indexStart;
-    private final long termsStart;
-    private final long packedIndexStart;
-    private final long packedOffsetsStart;
-
-    private final int numIndexTerms;
-
-    public FieldIndexData(FieldInfo fieldInfo, int numIndexTerms, long indexStart, long termsStart, long packedIndexStart,
-                          long packedOffsetsStart)  {
 
-      this.termsStart = termsStart;
-      this.indexStart = indexStart;
-      this.packedIndexStart = packedIndexStart;
-      this.packedOffsetsStart = packedOffsetsStart;
-      this.numIndexTerms = numIndexTerms;
+        private int ReadHeader(IndexInput input)
+        {
+            int version = CodecUtil.CheckHeader(input, FixedGapTermsIndexWriter.CODEC_NAME,
+                FixedGapTermsIndexWriter.VERSION_START, FixedGapTermsIndexWriter.VERSION_CURRENT);
+            if (version < FixedGapTermsIndexWriter.VERSION_APPEND_ONLY)
+                dirOffset = input.ReadLong();
 
-      if (indexDivisor > 0) {
-        loadTermsIndex();
-      }
-    }
-
-    private void loadTermsIndex()  {
-      if (coreIndex == null) {
-        coreIndex = new CoreFieldIndex(indexStart, termsStart, packedIndexStart, packedOffsetsStart, numIndexTerms);
-      }
-    }
-
-    private final class CoreFieldIndex {
-
-      // where this field's terms begin in the packed byte[]
-      // data
-      final long termBytesStart;
-
-      // offset into index termBytes
-      final PackedInts.Reader termOffsets;
-
-      // index pointers into main terms dict
-      final PackedInts.Reader termsDictOffsets;
-
-      final int numIndexTerms;
-      final long termsStart;
-
-      public CoreFieldIndex(long indexStart, long termsStart, long packedIndexStart, long packedOffsetsStart, int numIndexTerms)  {
+            return version;
+        }
 
-        this.termsStart = termsStart;
-        termBytesStart = termBytes.getPointer();
+        private class IndexEnum : FieldIndexEnum
+        {
+            private readonly FieldIndexData.CoreFieldIndex fieldIndex;
+            public override long Ord { get; set; }
 
-        IndexInput clone = in.clone();
-        clone.seek(indexStart);
+            public IndexEnum(FieldIndexData.CoreFieldIndex fieldIndex)
+            {
+                Term = new BytesRef();
+                this.fieldIndex = fieldIndex;
+            }
 
-        // -1 is passed to mean "don't load term index", but
-        // if we are then later loaded it's overwritten with
-        // a real value
-        Debug.Assert( indexDivisor > 0;
+            public override BytesRef Term { get; set; }
+
+            public override long Seek(BytesRef target)
+            {
+                int lo = 0; // binary search
+                int hi = fieldIndex.numIndexTerms - 1;
+                Debug.Assert(totalIndexInterval > 0, "totalIndexInterval=" + totalIndexInterval);
+
+                while (hi >= lo)
+                {
+                    int mid = (lo + hi) >> > 1;
+
+                    readonly
+                    long offset = fieldIndex.termOffsets.get(mid);
+                    readonly
+                    int length = (int) (fieldIndex.termOffsets.Get(1 + mid) - offset);
+                    termBytesReader.FillSlice(Term, fieldIndex.termBytesStart + offset, length);
+
+                    int delta = termComp.compare(target, term);
+                    if (delta < 0)
+                    {
+                        hi = mid - 1;
+                    }
+                    else if (delta > 0)
+                    {
+                        lo = mid + 1;
+                    }
+                    else
+                    {
+                        Debug.Assert(mid >= 0;
+                        ord = mid*totalIndexInterval;
+                        return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(mid);
+                    }
+                }
+
+                if (hi < 0)
+                {
+                    Debug.Assert(hi == -1;
+                    hi = 0;
+                }
+
+                
+                long offset = fieldIndex.termOffsets.Get(hi);
+                int length = (int) (fieldIndex.termOffsets.Get(1 + hi) - offset);
+                termBytesReader.FillSlice(term, fieldIndex.termBytesStart + offset, length);
+
+                ord = hi*totalIndexInterval;
+                return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(hi);
+            }
 
-        this.numIndexTerms = 1+(numIndexTerms-1) / indexDivisor;
+            public override long Next
+            {
+                get
+                {
+                    int idx = 1 + (int) (ord/totalIndexInterval);
+                    if (idx >= fieldIndex.numIndexTerms)
+                    {
+                        return -1;
+                    }
+                    ord += totalIndexInterval;
+
+                    long offset = fieldIndex.termOffsets.Get(idx);
+                    int length = (int) (fieldIndex.termOffsets.Get(1 + idx) - offset);
+                    termBytesReader.FillSlice(term, fieldIndex.termBytesStart + offset, length);
+                    return fieldIndex.termsStart + fieldIndex.termsDictOffsets.Get(idx);
+                }
+            }
 
-        Debug.Assert( this.numIndexTerms  > 0: "numIndexTerms=" + numIndexTerms + " indexDivisor=" + indexDivisor;
+            public override long Seek(long ord)
+            {
+                int idx = (int) (ord/totalIndexInterval);
+                // caller must ensure ord is in bounds
+                Debug.Assert(idx < fieldIndex.NumIndexTerms);
+
+                long offset = fieldIndex.termOffsets.get(idx);
+                int length = (int) (fieldIndex.termOffsets.get(1 + idx) - offset);
+                termBytesReader.FillSlice(term, fieldIndex.termBytesStart + offset, length);
+                Ord = idx*totalIndexInterval;
+                return fieldIndex.termsStart + fieldIndex.termsDictOffsets.get(idx);
+            }
+        }
 
-        if (indexDivisor == 1) {
-          // Default (load all index terms) is fast -- slurp in the images from disk:
-          
-          try {
-            final long numTermBytes = packedIndexStart - indexStart;
-            termBytes.copy(clone, numTermBytes);
+        public override bool SupportsOrd
+        {
+            get { return true; }
+        }
 
-            // records offsets into main terms dict file
-            termsDictOffsets = PackedInts.getReader(clone);
-            Debug.Assert( termsDictOffsets.size() == numIndexTerms;
+        protected class FieldIndexData
+        {
 
-            // records offsets into byte[] term data
-            termOffsets = PackedInts.getReader(clone);
-            Debug.Assert( termOffsets.size() == 1+numIndexTerms;
-          } finally {
-            clone.close();
-          }
-        } else {
-          // Get packed iterators
-          final IndexInput clone1 = in.clone();
-          final IndexInput clone2 = in.clone();
+            public volatile CoreFieldIndex CoreIndex;
 
-          try {
-            // Subsample the index terms
-            clone1.seek(packedIndexStart);
-            final PackedInts.ReaderIterator termsDictOffsetsIter = PackedInts.getReaderIterator(clone1, PackedInts.DEFAULT_BUFFER_SIZE);
+            private readonly long indexStart;
+            private readonly long termsStart;
+            private readonly long packedIndexStart;
+            private readonly long packedOffsetsStart;
 
-            clone2.seek(packedOffsetsStart);
-            final PackedInts.ReaderIterator termOffsetsIter = PackedInts.getReaderIterator(clone2,  PackedInts.DEFAULT_BUFFER_SIZE);
+            private readonly int numIndexTerms;
 
-            // TODO: often we can get by w/ fewer bits per
-            // value, below.. .but this'd be more complex:
-            // we'd have to try @ fewer bits and then grow
-            // if we overflowed it.
+            public FieldIndexData(FieldInfo fieldInfo, int numIndexTerms, long indexStart, long termsStart,
+                long packedIndexStart,
+                long packedOffsetsStart)
+            {
 
-            PackedInts.Mutable termsDictOffsetsM = PackedInts.getMutable(this.numIndexTerms, termsDictOffsetsIter.getBitsPerValue(), PackedInts.DEFAULT);
-            PackedInts.Mutable termOffsetsM = PackedInts.getMutable(this.numIndexTerms+1, termOffsetsIter.getBitsPerValue(), PackedInts.DEFAULT);
+                this.termsStart = termsStart;
+                this.indexStart = indexStart;
+                this.packedIndexStart = packedIndexStart;
+                this.packedOffsetsStart = packedOffsetsStart;
+                this.numIndexTerms = numIndexTerms;
 
-            termsDictOffsets = termsDictOffsetsM;
-            termOffsets = termOffsetsM;
+                if (indexDivisor > 0)
+                {
+                    loadTermsIndex();
+                }
+            }
 
-            int upto = 0;
+            private void loadTermsIndex()
+            {
+                if (coreIndex == null)
+                {
+                    coreIndex = new CoreFieldIndex(indexStart, termsStart, packedIndexStart, packedOffsetsStart,
+                        numIndexTerms);
+                }
+            }
 
-            long termOffsetUpto = 0;
+            private class CoreFieldIndex
+            {
+
+                // where this field's terms begin in the packed byte[]
+                // data
+                private readonly long termBytesStart;
+
+                // offset into index termBytes
+                private readonly PackedInts.Reader termOffsets;
+
+                // index pointers into main terms dict
+                private readonly PackedInts.Reader termsDictOffsets;
+
+                private readonly int numIndexTerms;
+                private readonly long termsStart;
+
+                public CoreFieldIndex(long indexStart, long termsStart, long packedIndexStart, long packedOffsetsStart,
+                    int numIndexTerms)
+                {
+
+                    this.termsStart = termsStart;
+                    termBytesStart = termBytes.Pointer;
+
+                    IndexInput clone = input.Clone();
+                    clone.Seek(indexStart);
+
+                    // -1 is passed to mean "don't load term index", but
+                    // if we are then later loaded it's overwritten with
+                    // a real value
+                    Debug.Assert(indexDivisor > 0);
+
+                    this.numIndexTerms = 1 + (numIndexTerms - 1)/indexDivisor;
+
+                    Debug.Assert(this.numIndexTerms > 0:
+                    "numIndexTerms=" + numIndexTerms + " indexDivisor=" + indexDivisor;
+
+                    if (indexDivisor == 1)
+                    {
+                        // Default (load all index terms) is fast -- slurp in the images from disk:
+
+                        try
+                        {
+                        readonly
+                            long numTermBytes = packedIndexStart - indexStart;
+                            termBytes.copy(clone, numTermBytes);
+
+                            // records offsets into main terms dict file
+                            termsDictOffsets = PackedInts.getReader(clone);
+                            Debug.Assert(termsDictOffsets.size() == numIndexTerms;
+
+                            // records offsets into byte[] term data
+                            termOffsets = PackedInts.GetReader(clone);
+                            Debug.Assert(termOffsets.Size() == 1 + numIndexTerms);
+                        }
+                        finally
+                        {
+                            clone.Dispose();
+                        }
+                    }
+                    else
+                    {
+                        // Get packed iterators
+                        var clone1 = input.Clone();
+                        var clone2 = input.Clone();
+
+                        try
+                        {
+                            // Subsample the index terms
+                            clone1.Seek(packedIndexStart);
+                            
+                            PackedInts.ReaderIterator termsDictOffsetsIter = PackedInts.GetReaderIterator(clone1,
+                                PackedInts.DEFAULT_BUFFER_SIZE);
+
+                            clone2.Seek(packedOffsetsStart);
+                            
+                            PackedInts.ReaderIterator termOffsetsIter = PackedInts.GetReaderIterator(clone2,
+                                PackedInts.DEFAULT_BUFFER_SIZE);
+
+                            // TODO: often we can get by w/ fewer bits per
+                            // value, below.. .but this'd be more complex:
+                            // we'd have to try @ fewer bits and then grow
+                            // if we overflowed it.
+
+                            PackedInts.Mutable termsDictOffsetsM = PackedInts.GetMutable(this.numIndexTerms,
+                                termsDictOffsetsIter.BitsPerValue, PackedInts.DEFAULT);
+                            PackedInts.Mutable termOffsetsM = PackedInts.GetMutable(this.numIndexTerms + 1,
+                                termOffsetsIter.BitsPerValue, PackedInts.DEFAULT);
+
+                            termsDictOffsets = termsDictOffsetsM;
+                            termOffsets = termOffsetsM;
+
+                            int upto = 0;
+
+                            long termOffsetUpto = 0;
+
+                            while (upto < this.numIndexTerms)
+                            {
+                                // main file offset copies straight over
+                                termsDictOffsetsM.Set(upto, termsDictOffsetsIter.Next());
+
+                                termOffsetsM.Set(upto, termOffsetUpto);
+
+                                long termOffset = termOffsetsIter.Next();
+                                long nextTermOffset = termOffsetsIter.Next();
+                                int numTermBytes = (int) (nextTermOffset - termOffset);
+
+                                clone.Seek(indexStart + termOffset);
+                                
+                                Debug.Assert(indexStart + termOffset < clone.Length(),
+                                    String.Format("IndexStart: {0}, TermOffset: {1}, Len: {2}", indexStart, termOffset,
+                                        clone.Length()));
+                                
+                                Debug.Assert(indexStart + termOffset + numTermBytes < clone.Length());
+
+                                termBytes.Copy(clone, numTermBytes);
+                                termOffsetUpto += numTermBytes;
+
+                                upto++;
+                                if (upto == this.numIndexTerms)
+                                {
+                                    break;
+                                }
+
+                                // skip terms:
+                                termsDictOffsetsIter.Next();
+                                for (int i = 0; i < indexDivisor - 2; i++)
+                                {
+                                    termOffsetsIter.Next();
+                                    termsDictOffsetsIter.Next();
+                                }
+                            }
+                            termOffsetsM.Set(upto, termOffsetUpto);
+
+                        }
+                        finally
+                        {
+                            clone1.Dispose();
+                            clone2.Dispose();
+                            clone.Dispose();
+                        }
+                    }
+                }
+
+                /** Returns approximate RAM bytes Used */
+
+                public long RamBytesUsed()
+                {
+                    return ((termOffsets != null) ? termOffsets.RamBytesUsed() : 0) +
+                           ((termsDictOffsets != null) ? termsDictOffsets.RamBytesUsed() : 0);
+                }
+            }
+        }
 
-            while(upto < this.numIndexTerms) {
-              // main file offset copies straight over
-              termsDictOffsetsM.set(upto, termsDictOffsetsIter.next());
+        public override FieldIndexEnum GetFieldEnum(FieldInfo fieldInfo)
+        {
+            FieldIndexData fieldData = fields[fieldInfo];
+            return fieldData.CoreIndex == null ? null : new IndexEnum(fieldData.CoreIndex);
+        }
 
-              termOffsetsM.set(upto, termOffsetUpto);
+        public override void Dispose()
+        {
+            if (input != null && !indexLoaded)
+                input.Dispose();
+        }
 
-              long termOffset = termOffsetsIter.next();
-              long nextTermOffset = termOffsetsIter.next();
-              final int numTermBytes = (int) (nextTermOffset - termOffset);
+        private void SeekDir(IndexInput input, long dirOffset)
+        {
+            if (version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM)
+            {
+                input.Seek(input.Length() - CodecUtil.FooterLength() - 8);
+                dirOffset = input.ReadLong();
 
-              clone.seek(indexStart + termOffset);
-              Debug.Assert( indexStart + termOffset < clone.length() : "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.length();
-              Debug.Assert( indexStart + termOffset + numTermBytes < clone.length();
+            }
+            else if (version >= FixedGapTermsIndexWriter.VERSION_APPEND_ONLY)
+            {
+                input.Seek(input.Length() - 8);
+                dirOffset = input.ReadLong();
+            }
 
-              termBytes.copy(clone, numTermBytes);
-              termOffsetUpto += numTermBytes;
+            input.Seek(dirOffset);
+        }
 
-              upto++;
-              if (upto == this.numIndexTerms) {
-                break;
-              }
+        public override long RamBytesUsed
+        {
+            get
+            {
+                var sizeInBytes = ((termBytes != null) ? termBytes.RamBytesUsed() : 0) +
+                                  ((termBytesReader != null) ? termBytesReader.RamBytesUsed() : 0);
 
-              // skip terms:
-              termsDictOffsetsIter.next();
-              for(int i=0;i<indexDivisor-2;i++) {
-                termOffsetsIter.next();
-                termsDictOffsetsIter.next();
-              }
+                return fields.Values.Aggregate(sizeInBytes,
+                    (current, entry) => (long) (current + entry.CoreIndex.RamBytesUsed));
             }
-            termOffsetsM.set(upto, termOffsetUpto);
-
-          } finally {
-            clone1.close();
-            clone2.close();
-            clone.close();
-          }
         }
-      }
-      
-      /** Returns approximate RAM bytes Used */
-      public long ramBytesUsed() {
-        return ((termOffsets!=null)? termOffsets.ramBytesUsed() : 0) +
-            ((termsDictOffsets!=null)? termsDictOffsets.ramBytesUsed() : 0);
-      }
-    }
-  }
-
-  @Override
-  public FieldIndexEnum getFieldEnum(FieldInfo fieldInfo) {
-    final FieldIndexData fieldData = fields.get(fieldInfo);
-    if (fieldData.coreIndex == null) {
-      return null;
-    } else {
-      return new IndexEnum(fieldData.coreIndex);
-    }
-  }
 
-  @Override
-  public void close()  {
-    if (in != null && !indexLoaded) {
-      in.close();
-    }
-  }
-
-  private void seekDir(IndexInput input, long dirOffset)  {
-    if (version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM) {
-      input.seek(input.length() - CodecUtil.footerLength() - 8);
-      dirOffset = input.readLong();
-    } else if (version >= FixedGapTermsIndexWriter.VERSION_APPEND_ONLY) {
-      input.seek(input.length() - 8);
-      dirOffset = input.readLong();
-    }
-    input.seek(dirOffset);
-  }
-  
-  @Override
-  public long ramBytesUsed() {
-    long sizeInBytes = ((termBytes!=null) ? termBytes.ramBytesUsed() : 0) +
-        ((termBytesReader!=null)? termBytesReader.ramBytesUsed() : 0);
-    for(FieldIndexData entry : fields.values()) {
-      sizeInBytes += entry.coreIndex.ramBytesUsed();
     }
-    return sizeInBytes;
-  }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b6b784fb/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
index ba0bbf9..39d9065 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
@@ -21,10 +21,10 @@ namespace Lucene.Net.Codecs.BlockTerms
     using System;
     using System.Collections.Generic;
     using System.Diagnostics;
-    using Lucene.Net.Index;
-    using Lucene.Net.Store;
-    using Lucene.Net.Util;
-    using Lucene.Net.Util.Packed;
+    using Index;
+    using Store;
+    using Util;
+    using Util.Packed;
 
     /// <summary>
     /// Selects every Nth term as and index term, and hold term
@@ -37,41 +37,40 @@ namespace Lucene.Net.Codecs.BlockTerms
     /// </summary>
     public class FixedGapTermsIndexWriter : TermsIndexWriterBase
     {
-        protected IndexOutput output;
+        protected IndexOutput Output;
 
         /** Extension of terms index file */
-        private static readonly String TERMS_INDEX_EXTENSION = "tii";
-        private static readonly String CODEC_NAME = "SIMPLE_STANDARD_TERMS_INDEX";
-        private static readonly int VERSION_START = 0;
-        private static readonly int VERSION_APPEND_ONLY = 1;
+        private const String TERMS_INDEX_EXTENSION = "tii";
+        public const String CODEC_NAME = "SIMPLE_STANDARD_TERMS_INDEX";
 
-        private static readonly int VERSION_CHECKSUM = 1000;
+        public const int VERSION_CHECKSUM = 1000;
 
         // 4.x "skipped" trunk's monotonic addressing: give any user a nice exception
-        private static readonly int VERSION_CURRENT = VERSION_CHECKSUM;
-        private readonly int termIndexInterval;
-        private readonly List<SimpleFieldWriter> fields = new List<SimpleFieldWriter>();
-        private readonly FieldInfos fieldInfos;  //@SuppressWarnings("unused") 
+        public const int VERSION_CURRENT = VERSION_CHECKSUM;
+        private readonly int _termIndexInterval;
+        private readonly List<SimpleFieldWriter> _fields = new List<SimpleFieldWriter>();
+
+        private readonly FieldInfos _fieldInfos;  //@SuppressWarnings("unused") 
 
         public FixedGapTermsIndexWriter(SegmentWriteState state)
         {
             String indexFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix,
                 TERMS_INDEX_EXTENSION);
-            termIndexInterval = state.TermIndexInterval;
-            output = state.Directory.CreateOutput(indexFileName, state.Context);
+            _termIndexInterval = state.TermIndexInterval;
+            Output = state.Directory.CreateOutput(indexFileName, state.Context);
             bool success = false;
             try
             {
-                fieldInfos = state.FieldInfos;
-                WriteHeader(output);
-                output.WriteInt(termIndexInterval);
+                _fieldInfos = state.FieldInfos;
+                WriteHeader(Output);
+                Output.WriteInt(_termIndexInterval);
                 success = true;
             }
             finally
             {
                 if (!success)
                 {
-                    IOUtils.CloseWhileHandlingException(output);
+                    IOUtils.CloseWhileHandlingException(Output);
                 }
             }
         }
@@ -83,9 +82,8 @@ namespace Lucene.Net.Codecs.BlockTerms
 
         public override FieldWriter AddField(FieldInfo field, long termsFilePointer)
         {
-            //System.output.println("FGW: addFfield=" + field.name);
-            SimpleFieldWriter writer = new SimpleFieldWriter(field, termsFilePointer);
-            fields.Add(writer);
+            var writer = new SimpleFieldWriter(field, termsFilePointer, this);
+            _fields.Add(writer);
             return writer;
         }
 
@@ -112,182 +110,179 @@ namespace Lucene.Net.Codecs.BlockTerms
             return Math.Min(1 + priorTerm.Length, indexedTerm.Length);
         }
 
-        public void Dispose()
+        public override void Dispose()
         {
-            if (output != null)
+            if (Output != null)
             {
                 bool success = false;
                 try
                 {
-                    long dirStart = output.FilePointer;
-                    int fieldCount = fields.Count;
+                    long dirStart = Output.FilePointer;
+                    int fieldCount = _fields.Count;
 
                     int nonNullFieldCount = 0;
                     for (int i = 0; i < fieldCount; i++)
                     {
-                        SimpleFieldWriter field = fields[i];
-                        if (field.numIndexTerms > 0)
+                        SimpleFieldWriter field = _fields[i];
+                        if (field.NumIndexTerms > 0)
                         {
                             nonNullFieldCount++;
                         }
                     }
 
-                    output.WriteVInt(nonNullFieldCount);
+                    Output.WriteVInt(nonNullFieldCount);
                     for (int i = 0; i < fieldCount; i++)
                     {
-                        SimpleFieldWriter field = fields[i];
-                        if (field.numIndexTerms > 0)
+                        SimpleFieldWriter field = _fields[i];
+                        if (field.NumIndexTerms > 0)
                         {
-                            output.WriteVInt(field.fieldInfo.Number);
-                            output.WriteVInt(field.numIndexTerms);
-                            output.WriteVLong(field.termsStart);
-                            output.WriteVLong(field.indexStart);
-                            output.WriteVLong(field.packedIndexStart);
-                            output.WriteVLong(field.packedOffsetsStart);
+                            Output.WriteVInt(field.FieldInfo.Number);
+                            Output.WriteVInt(field.NumIndexTerms);
+                            Output.WriteVLong(field.TermsStart);
+                            Output.WriteVLong(field.IndexStart);
+                            Output.WriteVLong(field.PackedIndexStart);
+                            Output.WriteVLong(field.PackedOffsetsStart);
                         }
                     }
                     WriteTrailer(dirStart);
-                    CodecUtil.WriteFooter(output);
+                    CodecUtil.WriteFooter(Output);
                     success = true;
                 }
                 finally
                 {
                     if (success)
                     {
-                        IOUtils.Close(output);
+                        IOUtils.Close(Output);
                     }
                     else
                     {
-                        IOUtils.CloseWhileHandlingException(output);
+                        IOUtils.CloseWhileHandlingException(Output);
                     }
-                    output = null;
+                    Output = null;
                 }
             }
         }
 
         private void WriteTrailer(long dirStart)
         {
-            output.WriteLong(dirStart);
+            Output.WriteLong(dirStart);
         }
 
 
         private class SimpleFieldWriter : FieldWriter
         {
-            public readonly FieldInfo fieldInfo;
-            public int numIndexTerms;
-            public readonly long indexStart;
-            public readonly long termsStart;
-            public long packedIndexStart;
-            public long packedOffsetsStart;
-            private long numTerms;
+            public readonly FieldInfo FieldInfo;
+            public int NumIndexTerms;
+            public readonly long IndexStart;
+            public readonly long TermsStart;
+            public long PackedIndexStart;
+            public long PackedOffsetsStart;
+            private long _numTerms;
 
             // TODO: we could conceivably make a PackedInts wrapper
             // that auto-grows... then we wouldn't force 6 bytes RAM
             // per index term:
-            private short[] termLengths;
-            private int[] termsPointerDeltas;
-            private long lastTermsPointer;
-            private long totTermLength;
+            private short[] _termLengths;
+            private int[] _termsPointerDeltas;
+            private long _lastTermsPointer;
+            private long _totTermLength;
+
+            private readonly BytesRef _lastTerm = new BytesRef();
 
-            private readonly BytesRef lastTerm = new BytesRef();
+            private readonly FixedGapTermsIndexWriter _fgtiw;
 
-            public SimpleFieldWriter(FieldInfo fieldInfo, long termsFilePointer)
+            public SimpleFieldWriter(FieldInfo fieldInfo, long termsFilePointer, FixedGapTermsIndexWriter fgtiw)
             {
-                this.fieldInfo = fieldInfo;
-                indexStart = output.FilePointer;
-                termsStart = lastTermsPointer = termsFilePointer;
-                termLengths = new short[0];
-                termsPointerDeltas = new int[0];
+                FieldInfo = fieldInfo;
+                IndexStart = fgtiw.Output.FilePointer;
+                TermsStart = _lastTermsPointer = termsFilePointer;
+                _termLengths = new short[0];
+                _termsPointerDeltas = new int[0];
+                _fgtiw = fgtiw;
             }
 
             public override bool CheckIndexTerm(BytesRef text, TermStats stats)
             {
                 // First term is first indexed term:
                 //System.output.println("FGW: checkIndexTerm text=" + text.utf8ToString());
-                if (0 == (numTerms++ % termIndexInterval))
-                {
+                if (0 == (_numTerms++ % _fgtiw._termIndexInterval))
                     return true;
-                }
-                else
-                {
-                    if (0 == numTerms % termIndexInterval)
-                    {
-                        // save last term just before next index term so we
-                        // can compute wasted suffix
-                        lastTerm.CopyBytes(text);
-                    }
-                    return false;
-                }
+
+                // save last term just before next index term so we
+                // can compute wasted suffix
+                if (0 == _numTerms % _fgtiw._termIndexInterval)
+                    _lastTerm.CopyBytes(text);
+                
+                return false;
             }
 
             public override void Add(BytesRef text, TermStats stats, long termsFilePointer)
             {
-                int indexedTermLength = IndexedTermPrefixLength(lastTerm, text);
-                //System.output.println("FGW: add text=" + text.utf8ToString() + " " + text + " fp=" + termsFilePointer);
-
+                int indexedTermLength = _fgtiw.IndexedTermPrefixLength(_lastTerm, text);
+                
                 // write only the min prefix that shows the diff
                 // against prior term
-                output.WriteBytes(text.Bytes, text.Offset, indexedTermLength);
+                _fgtiw.Output.WriteBytes(text.Bytes, text.Offset, indexedTermLength);
 
-                if (termLengths.Length == numIndexTerms)
+                if (_termLengths.Length == NumIndexTerms)
                 {
-                    termLengths = ArrayUtil.Grow(termLengths);
+                    _termLengths = ArrayUtil.Grow(_termLengths);
                 }
-                if (termsPointerDeltas.Length == numIndexTerms)
+                if (_termsPointerDeltas.Length == NumIndexTerms)
                 {
-                    termsPointerDeltas = ArrayUtil.Grow(termsPointerDeltas);
+                    _termsPointerDeltas = ArrayUtil.Grow(_termsPointerDeltas);
                 }
 
                 // save delta terms pointer
-                termsPointerDeltas[numIndexTerms] = (int)(termsFilePointer - lastTermsPointer);
-                lastTermsPointer = termsFilePointer;
+                _termsPointerDeltas[NumIndexTerms] = (int)(termsFilePointer - _lastTermsPointer);
+                _lastTermsPointer = termsFilePointer;
 
                 // save term length (in bytes)
-                Debug.Assert(indexedTermLength <= Short.MAX_VALUE);
-                termLengths[numIndexTerms] = (short)indexedTermLength;
-                totTermLength += indexedTermLength;
+                Debug.Assert(indexedTermLength <= short.MaxValue);
+                _termLengths[NumIndexTerms] = (short)indexedTermLength;
+                _totTermLength += indexedTermLength;
 
-                lastTerm.CopyBytes(text);
-                numIndexTerms++;
+                _lastTerm.CopyBytes(text);
+                NumIndexTerms++;
             }
 
             public override void Finish(long termsFilePointer)
             {
 
                 // write primary terms dict offsets
-                packedIndexStart = output.FilePointer;
+                PackedIndexStart = _fgtiw.Output.FilePointer;
 
-                PackedInts.Writer w = PackedInts.GetWriter(output, numIndexTerms,
+                PackedInts.Writer w = PackedInts.GetWriter(_fgtiw.Output, NumIndexTerms,
                     PackedInts.BitsRequired(termsFilePointer),
                     PackedInts.DEFAULT);
 
                 // relative to our indexStart
                 long upto = 0;
-                for (int i = 0; i < numIndexTerms; i++)
+                for (int i = 0; i < NumIndexTerms; i++)
                 {
-                    upto += termsPointerDeltas[i];
+                    upto += _termsPointerDeltas[i];
                     w.Add(upto);
                 }
                 w.Finish();
 
-                packedOffsetsStart = output.FilePointer;
+                PackedOffsetsStart = _fgtiw.Output.FilePointer;
 
                 // write offsets into the byte[] terms
-                w = PackedInts.GetWriter(output, 1 + numIndexTerms, PackedInts.BitsRequired(totTermLength),
+                w = PackedInts.GetWriter(_fgtiw.Output, 1 + NumIndexTerms, PackedInts.BitsRequired(_totTermLength),
                     PackedInts.DEFAULT);
                 upto = 0;
-                for (int i = 0; i < numIndexTerms; i++)
+                for (int i = 0; i < NumIndexTerms; i++)
                 {
                     w.Add(upto);
-                    upto += termLengths[i];
+                    upto += _termLengths[i];
                 }
                 w.Add(upto);
                 w.Finish();
 
                 // our referrer holds onto us, while other fields are
                 // being written, so don't tie up this RAM:
-                termLengths = null;
-                termsPointerDeltas = null;
+                _termLengths = null;
+                _termsPointerDeltas = null;
             }
         }
     }


Mime
View raw message