lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [47/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory
Date Sun, 26 Feb 2017 23:37:35 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
new file mode 100644
index 0000000..c2bdadf
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
@@ -0,0 +1,221 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MatchNoBits = Lucene.Net.Util.Bits.MatchNoBits;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    // TODO: really this should be in BaseTestPF or somewhere else? useful test!
+    [TestFixture]
+    public class TestReuseDocsEnum : LuceneTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        [Test]
+        public virtual void TestReuseDocsEnumNoReuse()
+        {
+            Directory dir = NewDirectory();
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
+            int numdocs = AtLeast(20);
+            CreateRandomIndex(numdocs, writer, Random());
+            writer.Commit();
+
+            DirectoryReader open = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext ctx in open.Leaves)
+            {
+                AtomicReader indexReader = (AtomicReader)ctx.Reader;
+                Terms terms = indexReader.Terms("body");
+                TermsEnum iterator = terms.GetIterator(null);
+                IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
+                MatchNoBits bits = new MatchNoBits(indexReader.MaxDoc);
+                while ((iterator.Next()) != null)
+                {
+                    DocsEnum docs = iterator.Docs(Random().NextBoolean() ? bits : new MatchNoBits(indexReader.MaxDoc), null, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+
+                Assert.AreEqual(terms.Count, enums.Count);
+            }
+            IOUtils.Close(writer, open, dir);
+        }
+
+        // tests for reuse only if bits are the same either null or the same instance
+        [Test]
+        public virtual void TestReuseDocsEnumSameBitsOrNull()
+        {
+            Directory dir = NewDirectory();
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
+            int numdocs = AtLeast(20);
+            CreateRandomIndex(numdocs, writer, Random());
+            writer.Commit();
+
+            DirectoryReader open = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext ctx in open.Leaves)
+            {
+                Terms terms = ((AtomicReader)ctx.Reader).Terms("body");
+                TermsEnum iterator = terms.GetIterator(null);
+                IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
+                MatchNoBits bits = new MatchNoBits(open.MaxDoc);
+                DocsEnum docs = null;
+                while ((iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(bits, docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+
+                Assert.AreEqual(1, enums.Count);
+                enums.Clear();
+                iterator = terms.GetIterator(null);
+                docs = null;
+                while ((iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(new MatchNoBits(open.MaxDoc), docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(terms.Count, enums.Count);
+
+                enums.Clear();
+                iterator = terms.GetIterator(null);
+                docs = null;
+                while ((iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(null, docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(1, enums.Count);
+            }
+            IOUtils.Close(writer, open, dir);
+        }
+
+        // make sure we never reuse from another reader even if it is the same field & codec etc
+        [Test]
+        public virtual void TestReuseDocsEnumDifferentReader()
+        {
+            Directory dir = NewDirectory();
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetCodec(cp));
+            int numdocs = AtLeast(20);
+            CreateRandomIndex(numdocs, writer, Random());
+            writer.Commit();
+
+            DirectoryReader firstReader = DirectoryReader.Open(dir);
+            DirectoryReader secondReader = DirectoryReader.Open(dir);
+            IList<AtomicReaderContext> leaves = firstReader.Leaves;
+            IList<AtomicReaderContext> leaves2 = secondReader.Leaves;
+
+            foreach (AtomicReaderContext ctx in leaves)
+            {
+                Terms terms = ((AtomicReader)ctx.Reader).Terms("body");
+                TermsEnum iterator = terms.GetIterator(null);
+                IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
+                MatchNoBits bits = new MatchNoBits(firstReader.MaxDoc);
+                iterator = terms.GetIterator(null);
+                DocsEnum docs = null;
+                BytesRef term = null;
+                while ((term = iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(null, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(terms.Count, enums.Count);
+
+                iterator = terms.GetIterator(null);
+                enums.Clear();
+                docs = null;
+                while ((term = iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(bits, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(terms.Count, enums.Count);
+            }
+            IOUtils.Close(writer, firstReader, secondReader, dir);
+        }
+
+        public virtual DocsEnum RandomDocsEnum(string field, BytesRef term, IList<AtomicReaderContext> readers, IBits bits)
+        {
+            if (Random().Next(10) == 0)
+            {
+                return null;
+            }
+            AtomicReader indexReader = (AtomicReader)readers[Random().Next(readers.Count)].Reader;
+            Terms terms = indexReader.Terms(field);
+            if (terms == null)
+            {
+                return null;
+            }
+            TermsEnum iterator = terms.GetIterator(null);
+            if (iterator.SeekExact(term))
+            {
+                return iterator.Docs(bits, null, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+            }
+            return null;
+        }
+
+        /// <summary>
+        /// populates a writer with random stuff. this must be fully reproducable with
+        /// the seed!
+        /// </summary>
+        public static void CreateRandomIndex(int numdocs, RandomIndexWriter writer, Random random)
+        {
+            LineFileDocs lineFileDocs = new LineFileDocs(random);
+
+            for (int i = 0; i < numdocs; i++)
+            {
+                writer.AddDocument(lineFileDocs.NextDoc());
+            }
+
+            lineFileDocs.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs
new file mode 100644
index 0000000..fb309a5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs
@@ -0,0 +1,103 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene41
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BasePostingsFormatTestCase = Lucene.Net.Index.BasePostingsFormatTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests BlockPostingsFormat
+    /// </summary>
+    public class TestBlockPostingsFormat : BasePostingsFormatTestCase
+    {
+        private readonly Codec Codec_Renamed = TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat());
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs
new file mode 100644
index 0000000..f0f471d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs
@@ -0,0 +1,166 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.Text;
+
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldType = FieldType;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Tests special cases of BlockPostingsFormat
+    /// </summary>
+    [TestFixture]
+    public class TestBlockPostingsFormat2 : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal RandomIndexWriter Iw;
+        internal IndexWriterConfig Iwc;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewFSDirectory(CreateTempDir("testDFBlockSize"));
+            Iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            Iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()));
+            Iw = new RandomIndexWriter(Random(), Dir, (IndexWriterConfig)Iwc.Clone());
+            Iw.RandomForceMerge = false; // we will ourselves
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Iw.Dispose();
+            TestUtil.CheckIndex(Dir); // for some extra coverage, checkIndex before we forceMerge
+            Iwc.SetOpenMode(OpenMode.APPEND);
+            IndexWriter iw = new IndexWriter(Dir, (IndexWriterConfig)Iwc.Clone());
+            iw.ForceMerge(1);
+            iw.Dispose();
+            Dir.Dispose(); // just force a checkindex for now
+            base.TearDown();
+        }
+
+        private Document NewDocument()
+        {
+            Document doc = new Document();
+            foreach (IndexOptions option in Enum.GetValues(typeof(IndexOptions)))
+            {
+                var ft = new FieldType(TextField.TYPE_NOT_STORED)
+                {
+                    StoreTermVectors = true,
+                    StoreTermVectorOffsets = true,
+                    StoreTermVectorPositions = true,
+                    StoreTermVectorPayloads = true,
+                    IndexOptions = option
+                };
+                // turn on tvs for a cross-check, since we rely upon checkindex in this test (for now)
+                doc.Add(new Field(option.ToString(), "", ft));
+            }
+            return doc;
+        }
+
+        /// <summary>
+        /// tests terms with df = blocksize </summary>
+        [Test]
+        public virtual void TestDFBlockSize()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    ((Field)f).SetStringValue(f.Name + " " + f.Name + "_2");
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// tests terms with df % blocksize = 0 </summary>
+        [Test]
+        public virtual void TestDFBlockSizeMultiple()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE * 16; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    ((Field)f).SetStringValue(f.Name + " " + f.Name + "_2");
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// tests terms with ttf = blocksize </summary>
+        [Test]
+        public virtual void TestTTFBlockSize()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE / 2; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    ((Field)f).SetStringValue(f.Name + " " + f.Name + " " + f.Name + "_2 " + f.Name + "_2");
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// tests terms with ttf % blocksize = 0 </summary>
+        [Test]
+        public virtual void TestTTFBlockSizeMultiple()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE / 2; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    string proto = (f.Name + " " + f.Name + " " + f.Name + " " + f.Name + " " + f.Name + "_2 " + f.Name + "_2 " + f.Name + "_2 " + f.Name + "_2");
+                    StringBuilder val = new StringBuilder();
+                    for (int j = 0; j < 16; j++)
+                    {
+                        val.Append(proto);
+                        val.Append(" ");
+                    }
+                    ((Field)f).SetStringValue(val.ToString());
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
new file mode 100644
index 0000000..3e6b0e4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
@@ -0,0 +1,571 @@
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CompiledAutomaton = Lucene.Net.Util.Automaton.CompiledAutomaton;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using FieldType = FieldType;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockFixedLengthPayloadFilter = Lucene.Net.Analysis.MockFixedLengthPayloadFilter;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using MockVariableLengthPayloadFilter = Lucene.Net.Analysis.MockVariableLengthPayloadFilter;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using SeekStatus = Lucene.Net.Index.TermsEnum.SeekStatus;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+    using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+
+    /// <summary>
+    /// Tests partial enumeration (only pulling a subset of the indexed data)
+    /// </summary>
+    [TestFixture]
+    public class TestBlockPostingsFormat3 : LuceneTestCase
+    {
+        internal static readonly int MAXDOC = Lucene41PostingsFormat.BLOCK_SIZE * 20;
+
+        // creates 8 fields with different options and does "duels" of fields against each other
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(50000)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()));
+            // TODO we could actually add more fields implemented with different PFs
+            // or, just put this test into the usual rotation?
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwc.Clone());
+            Document doc = new Document();
+            FieldType docsOnlyType = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn this on for a cross-check
+            docsOnlyType.StoreTermVectors = true;
+            docsOnlyType.IndexOptions = IndexOptions.DOCS_ONLY;
+
+            FieldType docsAndFreqsType = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn this on for a cross-check
+            docsAndFreqsType.StoreTermVectors = true;
+            docsAndFreqsType.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+
+            FieldType positionsType = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn these on for a cross-check
+            positionsType.StoreTermVectors = true;
+            positionsType.StoreTermVectorPositions = true;
+            positionsType.StoreTermVectorOffsets = true;
+            positionsType.StoreTermVectorPayloads = true;
+            FieldType offsetsType = new FieldType(positionsType);
+            offsetsType.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            Field field1 = new Field("field1docs", "", docsOnlyType);
+            Field field2 = new Field("field2freqs", "", docsAndFreqsType);
+            Field field3 = new Field("field3positions", "", positionsType);
+            Field field4 = new Field("field4offsets", "", offsetsType);
+            Field field5 = new Field("field5payloadsFixed", "", positionsType);
+            Field field6 = new Field("field6payloadsVariable", "", positionsType);
+            Field field7 = new Field("field7payloadsFixedOffsets", "", offsetsType);
+            Field field8 = new Field("field8payloadsVariableOffsets", "", offsetsType);
+            doc.Add(field1);
+            doc.Add(field2);
+            doc.Add(field3);
+            doc.Add(field4);
+            doc.Add(field5);
+            doc.Add(field6);
+            doc.Add(field7);
+            doc.Add(field8);
+            for (int i = 0; i < MAXDOC; i++)
+            {
+                string stringValue = Convert.ToString(i) + " verycommon " + English.IntToEnglish(i).Replace('-', ' ') + " " + TestUtil.RandomSimpleString(Random());
+                field1.SetStringValue(stringValue);
+                field2.SetStringValue(stringValue);
+                field3.SetStringValue(stringValue);
+                field4.SetStringValue(stringValue);
+                field5.SetStringValue(stringValue);
+                field6.SetStringValue(stringValue);
+                field7.SetStringValue(stringValue);
+                field8.SetStringValue(stringValue);
+                iw.AddDocument(doc);
+            }
+            iw.Dispose();
+            Verify(dir);
+            TestUtil.CheckIndex(dir); // for some extra coverage, checkIndex before we forceMerge
+            iwc.SetOpenMode(OpenMode.APPEND);
+            IndexWriter iw2 = new IndexWriter(dir, (IndexWriterConfig)iwc.Clone());
+            iw2.ForceMerge(1);
+            iw2.Dispose();
+            Verify(dir);
+            dir.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestBlockPostingsFormat3 OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestBlockPostingsFormat3 outerInstance, Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader);
+                if (fieldName.Contains("payloadsFixed"))
+                {
+                    TokenFilter filter = new MockFixedLengthPayloadFilter(new Random(0), tokenizer, 1);
+                    return new TokenStreamComponents(tokenizer, filter);
+                }
+                else if (fieldName.Contains("payloadsVariable"))
+                {
+                    TokenFilter filter = new MockVariableLengthPayloadFilter(new Random(0), tokenizer);
+                    return new TokenStreamComponents(tokenizer, filter);
+                }
+                else
+                {
+                    return new TokenStreamComponents(tokenizer);
+                }
+            }
+        }
+
+        private void Verify(Directory dir)
+        {
+            DirectoryReader ir = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext leaf in ir.Leaves)
+            {
+                AtomicReader leafReader = (AtomicReader)leaf.Reader;
+                AssertTerms(leafReader.Terms("field1docs"), leafReader.Terms("field2freqs"), true);
+                AssertTerms(leafReader.Terms("field3positions"), leafReader.Terms("field4offsets"), true);
+                AssertTerms(leafReader.Terms("field4offsets"), leafReader.Terms("field5payloadsFixed"), true);
+                AssertTerms(leafReader.Terms("field5payloadsFixed"), leafReader.Terms("field6payloadsVariable"), true);
+                AssertTerms(leafReader.Terms("field6payloadsVariable"), leafReader.Terms("field7payloadsFixedOffsets"), true);
+                AssertTerms(leafReader.Terms("field7payloadsFixedOffsets"), leafReader.Terms("field8payloadsVariableOffsets"), true);
+            }
+            ir.Dispose();
+        }
+
+        // following code is almost an exact dup of code from TestDuelingCodecs: sorry!
+
+        public virtual void AssertTerms(Terms leftTerms, Terms rightTerms, bool deep)
+        {
+            if (leftTerms == null || rightTerms == null)
+            {
+                Assert.IsNull(leftTerms);
+                Assert.IsNull(rightTerms);
+                return;
+            }
+            AssertTermsStatistics(leftTerms, rightTerms);
+
+            // NOTE: we don't assert hasOffsets/hasPositions/hasPayloads because they are allowed to be different
+
+            TermsEnum leftTermsEnum = leftTerms.GetIterator(null);
+            TermsEnum rightTermsEnum = rightTerms.GetIterator(null);
+            AssertTermsEnum(leftTermsEnum, rightTermsEnum, true);
+
+            AssertTermsSeeking(leftTerms, rightTerms);
+
+            if (deep)
+            {
+                int numIntersections = AtLeast(3);
+                for (int i = 0; i < numIntersections; i++)
+                {
+                    string re = AutomatonTestUtil.RandomRegexp(Random());
+                    CompiledAutomaton automaton = new CompiledAutomaton((new RegExp(re, RegExp.NONE)).ToAutomaton());
+                    if (automaton.Type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL)
+                    {
+                        // TODO: test start term too
+                        TermsEnum leftIntersection = leftTerms.Intersect(automaton, null);
+                        TermsEnum rightIntersection = rightTerms.Intersect(automaton, null);
+                        AssertTermsEnum(leftIntersection, rightIntersection, Rarely());
+                    }
+                }
+            }
+        }
+
+        private void AssertTermsSeeking(Terms leftTerms, Terms rightTerms)
+        {
+            TermsEnum leftEnum = null;
+            TermsEnum rightEnum = null;
+
+            // just an upper bound
+            int numTests = AtLeast(20);
+            Random random = Random();
+
+            // collect this number of terms from the left side
+            HashSet<BytesRef> tests = new HashSet<BytesRef>();
+            int numPasses = 0;
+            while (numPasses < 10 && tests.Count < numTests)
+            {
+                leftEnum = leftTerms.GetIterator(leftEnum);
+                BytesRef term = null;
+                while ((term = leftEnum.Next()) != null)
+                {
+                    int code = random.Next(10);
+                    if (code == 0)
+                    {
+                        // the term
+                        tests.Add(BytesRef.DeepCopyOf(term));
+                    }
+                    else if (code == 1)
+                    {
+                        // truncated subsequence of term
+                        term = BytesRef.DeepCopyOf(term);
+                        if (term.Length > 0)
+                        {
+                            // truncate it
+                            term.Length = random.Next(term.Length);
+                        }
+                    }
+                    else if (code == 2)
+                    {
+                        // term, but ensure a non-zero offset
+                        var newbytes = new byte[term.Length + 5];
+                        Array.Copy(term.Bytes, term.Offset, newbytes, 5, term.Length);
+                        tests.Add(new BytesRef(newbytes, 5, term.Length));
+                    }
+                }
+                numPasses++;
+            }
+
+            List<BytesRef> shuffledTests = new List<BytesRef>(tests);
+            Collections.Shuffle(shuffledTests);
+
+            foreach (BytesRef b in shuffledTests)
+            {
+                leftEnum = leftTerms.GetIterator(leftEnum);
+                rightEnum = rightTerms.GetIterator(rightEnum);
+
+                Assert.AreEqual(leftEnum.SeekExact(b), rightEnum.SeekExact(b));
+                Assert.AreEqual(leftEnum.SeekExact(b), rightEnum.SeekExact(b));
+
+                SeekStatus leftStatus;
+                SeekStatus rightStatus;
+
+                leftStatus = leftEnum.SeekCeil(b);
+                rightStatus = rightEnum.SeekCeil(b);
+                Assert.AreEqual(leftStatus, rightStatus);
+                if (leftStatus != SeekStatus.END)
+                {
+                    Assert.AreEqual(leftEnum.Term, rightEnum.Term);
+                }
+
+                leftStatus = leftEnum.SeekCeil(b);
+                rightStatus = rightEnum.SeekCeil(b);
+                Assert.AreEqual(leftStatus, rightStatus);
+                if (leftStatus != SeekStatus.END)
+                {
+                    Assert.AreEqual(leftEnum.Term, rightEnum.Term);
+                }
+            }
+        }
+
+        /// <summary>
+        /// checks collection-level statistics on Terms
+        /// </summary>
+        public virtual void AssertTermsStatistics(Terms leftTerms, Terms rightTerms)
+        {
+            Debug.Assert(leftTerms.Comparer == rightTerms.Comparer);
+            if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1)
+            {
+                Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount);
+            }
+            if (leftTerms.SumDocFreq != -1 && rightTerms.SumDocFreq != -1)
+            {
+                Assert.AreEqual(leftTerms.SumDocFreq, rightTerms.SumDocFreq);
+            }
+            if (leftTerms.SumTotalTermFreq != -1 && rightTerms.SumTotalTermFreq != -1)
+            {
+                Assert.AreEqual(leftTerms.SumTotalTermFreq, rightTerms.SumTotalTermFreq);
+            }
+            if (leftTerms.Count != -1 && rightTerms.Count != -1)
+            {
+                Assert.AreEqual(leftTerms.Count, rightTerms.Count);
+            }
+        }
+
+        /// <summary>
+        /// checks the terms enum sequentially
+        /// if deep is false, it does a 'shallow' test that doesnt go down to the docsenums
+        /// </summary>
+        public virtual void AssertTermsEnum(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum, bool deep)
+        {
+            BytesRef term;
+            IBits randomBits = new RandomBits(MAXDOC, Random().NextDouble(), Random());
+            DocsAndPositionsEnum leftPositions = null;
+            DocsAndPositionsEnum rightPositions = null;
+            DocsEnum leftDocs = null;
+            DocsEnum rightDocs = null;
+
+            while ((term = leftTermsEnum.Next()) != null)
+            {
+                Assert.AreEqual(term, rightTermsEnum.Next());
+                AssertTermStats(leftTermsEnum, rightTermsEnum);
+                if (deep)
+                {
+                    // with payloads + off
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions));
+                    // with payloads only
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+
+                    // with offsets only
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+
+                    // with positions only
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsEnum.FLAG_NONE));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsEnum.FLAG_NONE));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsEnum.FLAG_NONE));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsEnum.FLAG_NONE));
+
+                    // with freqs:
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(null, leftDocs), rightDocs = rightTermsEnum.Docs(null, rightDocs));
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(randomBits, leftDocs), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs));
+
+                    // w/o freqs:
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(null, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(null, rightDocs, DocsEnum.FLAG_NONE));
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(randomBits, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs, DocsEnum.FLAG_NONE));
+
+                    // with freqs:
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(null, leftDocs), rightDocs = rightTermsEnum.Docs(null, rightDocs));
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(randomBits, leftDocs), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs));
+
+                    // w/o freqs:
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(null, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(null, rightDocs, DocsEnum.FLAG_NONE));
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(randomBits, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs, DocsEnum.FLAG_NONE));
+                }
+            }
+            Assert.IsNull(rightTermsEnum.Next());
+        }
+
+        /// <summary>
+        /// checks term-level statistics
+        /// </summary>
+        public virtual void AssertTermStats(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum)
+        {
+            Assert.AreEqual(leftTermsEnum.DocFreq, rightTermsEnum.DocFreq);
+            if (leftTermsEnum.TotalTermFreq != -1 && rightTermsEnum.TotalTermFreq != -1)
+            {
+                Assert.AreEqual(leftTermsEnum.TotalTermFreq, rightTermsEnum.TotalTermFreq);
+            }
+        }
+
+        /// <summary>
+        /// checks docs + freqs + positions + payloads, sequentially
+        /// </summary>
+        public virtual void AssertDocsAndPositionsEnum(DocsAndPositionsEnum leftDocs, DocsAndPositionsEnum rightDocs)
+        {
+            if (leftDocs == null || rightDocs == null)
+            {
+                Assert.IsNull(leftDocs);
+                Assert.IsNull(rightDocs);
+                return;
+            }
+            Assert.AreEqual(-1, leftDocs.DocID);
+            Assert.AreEqual(-1, rightDocs.DocID);
+            int docid;
+            while ((docid = leftDocs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(docid, rightDocs.NextDoc());
+                int freq = leftDocs.Freq;
+                Assert.AreEqual(freq, rightDocs.Freq);
+                for (int i = 0; i < freq; i++)
+                {
+                    Assert.AreEqual(leftDocs.NextPosition(), rightDocs.NextPosition());
+                    // we don't assert offsets/payloads, they are allowed to be different
+                }
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, rightDocs.NextDoc());
+        }
+
+        /// <summary>
+        /// checks docs + freqs, sequentially
+        /// </summary>
+        public virtual void AssertDocsEnum(DocsEnum leftDocs, DocsEnum rightDocs)
+        {
+            if (leftDocs == null)
+            {
+                Assert.IsNull(rightDocs);
+                return;
+            }
+            Assert.AreEqual(-1, leftDocs.DocID);
+            Assert.AreEqual(-1, rightDocs.DocID);
+            int docid;
+            while ((docid = leftDocs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(docid, rightDocs.NextDoc());
+                // we don't assert freqs, they are allowed to be different
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, rightDocs.NextDoc());
+        }
+
+        /// <summary>
+        /// checks advancing docs
+        /// </summary>
+        public virtual void AssertDocsSkipping(int docFreq, DocsEnum leftDocs, DocsEnum rightDocs)
+        {
+            if (leftDocs == null)
+            {
+                Assert.IsNull(rightDocs);
+                return;
+            }
+            int docid = -1;
+            int averageGap = MAXDOC / (1 + docFreq);
+            int skipInterval = 16;
+
+            while (true)
+            {
+                if (Random().NextBoolean())
+                {
+                    // nextDoc()
+                    docid = leftDocs.NextDoc();
+                    Assert.AreEqual(docid, rightDocs.NextDoc());
+                }
+                else
+                {
+                    // advance()
+                    int skip = docid + (int)Math.Ceiling(Math.Abs(skipInterval + Random().NextDouble() * averageGap));
+                    docid = leftDocs.Advance(skip);
+                    Assert.AreEqual(docid, rightDocs.Advance(skip));
+                }
+
+                if (docid == DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    return;
+                }
+                // we don't assert freqs, they are allowed to be different
+            }
+        }
+
+        /// <summary>
+        /// checks advancing docs + positions
+        /// </summary>
+        public virtual void AssertPositionsSkipping(int docFreq, DocsAndPositionsEnum leftDocs, DocsAndPositionsEnum rightDocs)
+        {
+            if (leftDocs == null || rightDocs == null)
+            {
+                Assert.IsNull(leftDocs);
+                Assert.IsNull(rightDocs);
+                return;
+            }
+
+            int docid = -1;
+            int averageGap = MAXDOC / (1 + docFreq);
+            int skipInterval = 16;
+
+            while (true)
+            {
+                if (Random().NextBoolean())
+                {
+                    // nextDoc()
+                    docid = leftDocs.NextDoc();
+                    Assert.AreEqual(docid, rightDocs.NextDoc());
+                }
+                else
+                {
+                    // advance()
+                    int skip = docid + (int)Math.Ceiling(Math.Abs(skipInterval + Random().NextDouble() * averageGap));
+                    docid = leftDocs.Advance(skip);
+                    Assert.AreEqual(docid, rightDocs.Advance(skip));
+                }
+
+                if (docid == DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    return;
+                }
+                int freq = leftDocs.Freq;
+                Assert.AreEqual(freq, rightDocs.Freq);
+                for (int i = 0; i < freq; i++)
+                {
+                    Assert.AreEqual(leftDocs.NextPosition(), rightDocs.NextPosition());
+                    // we don't compare the payloads, its allowed that one is empty etc
+                }
+            }
+        }
+
+        new private class RandomBits : IBits
+        {
+            internal FixedBitSet Bits;
+
+            internal RandomBits(int maxDoc, double pctLive, Random random)
+            {
+                Bits = new FixedBitSet(maxDoc);
+                for (int i = 0; i < maxDoc; i++)
+                {
+                    if (random.NextDouble() <= pctLive)
+                    {
+                        Bits.Set(i);
+                    }
+                }
+            }
+
+            public bool Get(int index)
+            {
+                return Bits.Get(index);
+            }
+
+            public int Length
+            {
+                get { return Bits.Length; }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs
new file mode 100644
index 0000000..2f6a7bc
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs
@@ -0,0 +1,97 @@
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+    [TestFixture]
+    public class TestForUtil : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestEncodeDecode()
+        {
+            int iterations = RandomInts.NextIntBetween(Random(), 1, 1000);
+            float AcceptableOverheadRatio = (float)Random().NextDouble();
+            int[] values = new int[(iterations - 1) * Lucene41PostingsFormat.BLOCK_SIZE + ForUtil.MAX_DATA_SIZE];
+            for (int i = 0; i < iterations; ++i)
+            {
+                int bpv = Random().Next(32);
+                if (bpv == 0)
+                {
+                    int value = RandomInts.NextIntBetween(Random(), 0, int.MaxValue);
+                    for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j)
+                    {
+                        values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = value;
+                    }
+                }
+                else
+                {
+                    for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j)
+                    {
+                        values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = RandomInts.NextIntBetween(Random(), 0, (int)PackedInt32s.MaxValue(bpv));
+                    }
+                }
+            }
+
+            Directory d = new RAMDirectory();
+            long endPointer;
+
+            {
+                // encode
+                IndexOutput @out = d.CreateOutput("test.bin", IOContext.DEFAULT);
+                ForUtil forUtil = new ForUtil(AcceptableOverheadRatio, @out);
+
+                for (int i = 0; i < iterations; ++i)
+                {
+                    forUtil.WriteBlock(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, values.Length), new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], @out);
+                }
+                endPointer = @out.FilePointer;
+                @out.Dispose();
+            }
+
+            {
+                // decode
+                IndexInput @in = d.OpenInput("test.bin", IOContext.READ_ONCE);
+                ForUtil forUtil = new ForUtil(@in);
+                for (int i = 0; i < iterations; ++i)
+                {
+                    if (Random().NextBoolean())
+                    {
+                        forUtil.SkipBlock(@in);
+                        continue;
+                    }
+                    int[] restored = new int[Lucene41.ForUtil.MAX_DATA_SIZE];
+                    forUtil.ReadBlock(@in, new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], restored);
+                    Assert.AreEqual(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, (i + 1) * Lucene41PostingsFormat.BLOCK_SIZE), Arrays.CopyOf(restored, Lucene41PostingsFormat.BLOCK_SIZE));
+                }
+                Assert.AreEqual(endPointer, @in.FilePointer);
+                @in.Dispose();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
new file mode 100644
index 0000000..0cbb28b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
@@ -0,0 +1,146 @@
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Attributes;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseStoredFieldsFormatTestCase = Lucene.Net.Index.BaseStoredFieldsFormatTestCase;
+
+    public class TestLucene41StoredFieldsFormat : BaseStoredFieldsFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+
+        #region BaseStoredFieldsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestRandomStoredFields()
+        {
+            base.TestRandomStoredFields();
+        }
+
+        [Test]
+        // LUCENE-1727: make sure doc fields are stored in order
+        public override void TestStoredFieldsOrder()
+        {
+            base.TestStoredFieldsOrder();
+        }
+
+        [Test]
+        // LUCENE-1219
+        public override void TestBinaryFieldOffsetLength()
+        {
+            base.TestBinaryFieldOffsetLength();
+        }
+
+        [Test]
+        public override void TestNumericField()
+        {
+            base.TestNumericField();
+        }
+
+        [Test]
+        public override void TestIndexedBit()
+        {
+            base.TestIndexedBit();
+        }
+
+        [Test]
+        public override void TestReadSkip()
+        {
+            base.TestReadSkip();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestEmptyDocs()
+        {
+            base.TestEmptyDocs();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestConcurrentReads()
+        {
+            base.TestConcurrentReads();
+        }
+
+        [Test]
+        public override void TestWriteReadMerge()
+        {
+            base.TestWriteReadMerge();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestBigDocuments()
+        {
+            base.TestBigDocuments();
+        }
+
+        [Test]
+        public override void TestBulkMergeWithDeletes()
+        {
+            base.TestBulkMergeWithDeletes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
new file mode 100644
index 0000000..f9c47ce
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
@@ -0,0 +1,581 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene42
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseCompressingDocValuesFormatTestCase = Lucene.Net.Index.BaseCompressingDocValuesFormatTestCase;
+
+    /// <summary>
+    /// Tests Lucene42DocValuesFormat
+    /// </summary>
+    public class TestLucene42DocValuesFormat : BaseCompressingDocValuesFormatTestCase
+    {
+        private Codec Codec_Renamed;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+            Codec_Renamed = new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+        protected internal override bool CodecAcceptsHugeBinaryValues(string field)
+        {
+            return false;
+        }
+
+
+        #region BaseCompressingDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestUniqueValuesCompression()
+        {
+            base.TestUniqueValuesCompression();
+        }
+
+        [Test]
+        public override void TestDateCompression()
+        {
+            base.TestDateCompression();
+        }
+
+        [Test]
+        public override void TestSingleBigValueCompression()
+        {
+            base.TestSingleBigValueCompression();
+        }
+
+        #endregion
+
+        #region BaseDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestOneNumber()
+        {
+            base.TestOneNumber();
+        }
+
+        [Test]
+        public override void TestOneFloat()
+        {
+            base.TestOneFloat();
+        }
+
+        [Test]
+        public override void TestTwoNumbers()
+        {
+            base.TestTwoNumbers();
+        }
+
+        [Test]
+        public override void TestTwoBinaryValues()
+        {
+            base.TestTwoBinaryValues();
+        }
+
+        [Test]
+        public override void TestTwoFieldsMixed()
+        {
+            base.TestTwoFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed()
+        {
+            base.TestThreeFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed2()
+        {
+            base.TestThreeFieldsMixed2();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsNumeric()
+        {
+            base.TestTwoDocumentsNumeric();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsMerged()
+        {
+            base.TestTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestBigNumericRange()
+        {
+            base.TestBigNumericRange();
+        }
+
+        [Test]
+        public override void TestBigNumericRange2()
+        {
+            base.TestBigNumericRange2();
+        }
+
+        [Test]
+        public override void TestBytes()
+        {
+            base.TestBytes();
+        }
+
+        [Test]
+        public override void TestBytesTwoDocumentsMerged()
+        {
+            base.TestBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedBytes()
+        {
+            base.TestSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocuments()
+        {
+            base.TestSortedBytesTwoDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesThreeDocuments()
+        {
+            base.TestSortedBytesThreeDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocumentsMerged()
+        {
+            base.TestSortedBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedMergeAwayAllValues()
+        {
+            base.TestSortedMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestBytesWithNewline()
+        {
+            base.TestBytesWithNewline();
+        }
+
+        [Test]
+        public override void TestMissingSortedBytes()
+        {
+            base.TestMissingSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedTermsEnum()
+        {
+            base.TestSortedTermsEnum();
+        }
+
+        [Test]
+        public override void TestEmptySortedBytes()
+        {
+            base.TestEmptySortedBytes();
+        }
+
+        [Test]
+        public override void TestEmptyBytes()
+        {
+            base.TestEmptyBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalBytes()
+        {
+            base.TestVeryLargeButLegalBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalSortedBytes()
+        {
+            base.TestVeryLargeButLegalSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytes()
+        {
+            base.TestCodecUsesOwnBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytes()
+        {
+            base.TestCodecUsesOwnSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytesEachTime()
+        {
+            base.TestCodecUsesOwnBytesEachTime();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytesEachTime()
+        {
+            base.TestCodecUsesOwnSortedBytesEachTime();
+        }
+
+        /*
+         * Simple test case to show how to use the API
+         */
+        [Test]
+        public override void TestDocValuesSimple()
+        {
+            base.TestDocValuesSimple();
+        }
+
+        [Test]
+        public override void TestRandomSortedBytes()
+        {
+            base.TestRandomSortedBytes();
+        }
+
+        [Test]
+        public override void TestBooleanNumericsVsStoredFields()
+        {
+            base.TestBooleanNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteNumericsVsStoredFields()
+        {
+            base.TestByteNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteMissingVsFieldCache()
+        {
+            base.TestByteMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestShortNumericsVsStoredFields()
+        {
+            base.TestShortNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestShortMissingVsFieldCache()
+        {
+            base.TestShortMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestIntNumericsVsStoredFields()
+        {
+            base.TestIntNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestIntMissingVsFieldCache()
+        {
+            base.TestIntMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestLongNumericsVsStoredFields()
+        {
+            base.TestLongNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestLongMissingVsFieldCache()
+        {
+            base.TestLongMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestBinaryFixedLengthVsStoredFields()
+        {
+            base.TestBinaryFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestBinaryVariableLengthVsStoredFields()
+        {
+            base.TestBinaryVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsStoredFields()
+        {
+            base.TestSortedFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsFieldCache()
+        {
+            base.TestSortedFixedLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsFieldCache()
+        {
+            base.TestSortedVariableLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsStoredFields()
+        {
+            base.TestSortedVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetOneValue()
+        {
+            base.TestSortedSetOneValue();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoFields()
+        {
+            base.TestSortedSetTwoFields();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsMerged()
+        {
+            base.TestSortedSetTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValues()
+        {
+            base.TestSortedSetTwoValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValuesUnordered()
+        {
+            base.TestSortedSetTwoValuesUnordered();
+        }
+
+        [Test]
+        public override void TestSortedSetThreeValuesTwoDocs()
+        {
+            base.TestSortedSetThreeValuesTwoDocs();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissing()
+        {
+            base.TestSortedSetTwoDocumentsLastMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsLastMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissing()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetMergeAwayAllValues()
+        {
+            base.TestSortedSetMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTermsEnum()
+        {
+            base.TestSortedSetTermsEnum();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsUninvertedField()
+        {
+            base.TestSortedSetFixedLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsUninvertedField()
+        {
+            base.TestSortedSetVariableLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestGCDCompression()
+        {
+            base.TestGCDCompression();
+        }
+
+        [Test]
+        public override void TestZeros()
+        {
+            base.TestZeros();
+        }
+
+        [Test]
+        public override void TestZeroOrMin()
+        {
+            base.TestZeroOrMin();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissing()
+        {
+            base.TestTwoNumbersOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissingWithMerging()
+        {
+            base.TestTwoNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeNumbersOneMissingWithMerging()
+        {
+            base.TestThreeNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissing()
+        {
+            base.TestTwoBytesOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissingWithMerging()
+        {
+            base.TestTwoBytesOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeBytesOneMissingWithMerging()
+        {
+            base.TestThreeBytesOneMissingWithMerging();
+        }
+
+        // LUCENE-4853
+        [Test]
+        public override void TestHugeBinaryValues()
+        {
+            base.TestHugeBinaryValues();
+        }
+
+        // TODO: get this out of here and into the deprecated codecs (4.0, 4.2)
+        [Test]
+        public override void TestHugeBinaryValueLimit()
+        {
+            base.TestHugeBinaryValueLimit();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (binary/numeric/sorted, no missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads()
+        {
+            base.TestThreads();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (all types + missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads2()
+        {
+            base.TestThreads2();
+        }
+
+        // LUCENE-5218
+        [Test]
+        public override void TestEmptyBinaryValueOnPageSizes()
+        {
+            base.TestEmptyBinaryValueOnPageSizes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


Mime
View raw message