lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [05/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory
Date Sun, 26 Feb 2017 23:36:53 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
new file mode 100644
index 0000000..63f77d9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
@@ -0,0 +1,613 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.IO;
+
+namespace Lucene.Net.Search
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
+    ///
+    /// <p>
+    /// NOTE: at the moment, this class only tests for 'positive' results,
+    /// it does not verify the results to ensure there are no 'false positives',
+    /// nor does it adequately test 'negative' results.  It also does not test
+    /// that garbage in results in an Exception.
+    /// </summary>
+    [TestFixture]
+    public class TestFieldCacheRangeFilter : BaseTestRangeFilter
+    {
+        /// <summary>
+        /// LUCENENET specific. Ensure we have an infostream attached to the default FieldCache
+        /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream() 
+        /// method (which polluted the state of these tests), but we need to make the tests self-contained 
+        /// so they can be run correctly regardless of order. Not setting the InfoStream skips an execution
+        /// path within these tests, so we should do it to make sure we test all of the code.
+        /// </summary>
+        public override void SetUp()
+        {
+            base.SetUp();
+            FieldCache.DEFAULT.InfoStream = new StringWriter();
+        }
+
+        /// <summary>
+        /// LUCENENET specific. See <see cref="SetUp()"/>. Dispose our InfoStream and set it to null
+        /// to avoid polluting the state of other tests.
+        /// </summary>
+        public override void TearDown()
+        {
+            FieldCache.DEFAULT.InfoStream.Dispose();
+            FieldCache.DEFAULT.InfoStream = null;
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestRangeFilterId()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int medId = ((MaxId - MinId) / 2);
+
+            string minIP = Pad(MinId);
+            string maxIP = Pad(MaxId);
+            string medIP = Pad(medId);
+
+            int numDocs = reader.NumDocs;
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test id, bounded on both ends
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, maxIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, medIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, maxIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, maxIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, maxIP, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, medIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, minIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, medIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, maxIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, minIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, minIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, maxIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, medIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterRand()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            string minRP = Pad(SignedIndexDir.MinR);
+            string maxRP = Pad(SignedIndexDir.MaxR);
+
+            int numDocs = reader.NumDocs;
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test extremes, bounded on both ends
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but biggest");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but smallest");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but extremes");
+
+            // unbounded
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "smallest and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, maxRP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "biggest and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not smallest, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, maxRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not biggest, but down");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, minRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, maxRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, minRP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, minRP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, maxRP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+        }
+
+        // byte-ranges cannot be tested, because all ranges are too big for bytes, need an extra range for that
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterShorts()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            int medId = ((MaxId - MinId) / 2);
+            short? minIdO = Convert.ToInt16((short)MinId);
+            short? maxIdO = Convert.ToInt16((short)MaxId);
+            short? medIdO = Convert.ToInt16((short)medId);
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+#pragma warning disable 612, 618
+            // test id, bounded on both ends
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, medIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, minIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, medIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, minIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+            // special cases
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", Convert.ToInt16(short.MaxValue), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, Convert.ToInt16(short.MinValue), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "inverse range");
+#pragma warning restore 612, 618
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterInts()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            int medId = ((MaxId - MinId) / 2);
+            int? minIdO = Convert.ToInt32(MinId);
+            int? maxIdO = Convert.ToInt32(MaxId);
+            int? medIdO = Convert.ToInt32(medId);
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test id, bounded on both ends
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, medIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, minIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, medIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, minIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+            // special cases
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", Convert.ToInt32(int.MaxValue), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, Convert.ToInt32(int.MinValue), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "inverse range");
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterLongs()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            int medId = ((MaxId - MinId) / 2);
+            long? minIdO = Convert.ToInt64(MinId);
+            long? maxIdO = Convert.ToInt64(MaxId);
+            long? medIdO = Convert.ToInt64(medId);
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test id, bounded on both ends
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, medIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, minIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, medIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, minIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+            // special cases
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", Convert.ToInt64(long.MaxValue), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, Convert.ToInt64(long.MinValue), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "inverse range");
+        }
+
+        // float and double tests are a bit minimalistic, but its complicated, because missing precision
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterFloats()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            float? minIdO = Convert.ToSingle(MinId + .5f);
+            float? medIdO = Convert.ToSingle((float)minIdO + ((MaxId - MinId)) / 2.0f);
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs / 2, result.Length, "find all");
+            int count = 0;
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", null, medIdO, F, T), numDocs).ScoreDocs;
+            count += result.Length;
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", medIdO, null, F, F), numDocs).ScoreDocs;
+            count += result.Length;
+            Assert.AreEqual(numDocs, count, "sum of two concenatted ranges");
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", Convert.ToSingle(float.PositiveInfinity), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", null, Convert.ToSingle(float.NegativeInfinity), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterDoubles()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            double? minIdO = Convert.ToDouble(MinId + .5);
+            double? medIdO = Convert.ToDouble((float)minIdO + ((MaxId - MinId)) / 2.0);
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs / 2, result.Length, "find all");
+            int count = 0;
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, medIdO, F, T), numDocs).ScoreDocs;
+            count += result.Length;
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", medIdO, null, F, F), numDocs).ScoreDocs;
+            count += result.Length;
+            Assert.AreEqual(numDocs, count, "sum of two concenatted ranges");
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", Convert.ToDouble(double.PositiveInfinity), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, Convert.ToDouble(double.NegativeInfinity), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+        }
+
+        // test using a sparse index (with deleted docs).
+        [Test]
+        public virtual void TestSparseIndex()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            for (int d = -20; d <= 20; d++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", Convert.ToString(d), Field.Store.NO));
+                doc.Add(NewStringField("body", "body", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+
+            writer.ForceMerge(1);
+            writer.DeleteDocuments(new Term("id", "0"));
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher search = NewSearcher(reader);
+            Assert.IsTrue(reader.HasDeletions);
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+#pragma warning disable 612, 618
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)20, T, T), 100).ScoreDocs;
+            Assert.AreEqual(40, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)0, (sbyte?)20, T, T), 100).ScoreDocs;
+            Assert.AreEqual(20, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)0, T, T), 100).ScoreDocs;
+            Assert.AreEqual(20, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)10, (sbyte?)20, T, T), 100).ScoreDocs;
+            Assert.AreEqual(11, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)-10, T, T), 100).ScoreDocs;
+            Assert.AreEqual(11, result.Length, "find all");
+#pragma warning restore 612, 618
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+
+        #region SorterTestBase
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestPad()
+        {
+            base.TestPad();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
new file mode 100644
index 0000000..ee87c43
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
@@ -0,0 +1,86 @@
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Attributes;
+    using NUnit.Framework;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests the FieldcacheRewriteMethod with random regular expressions
+    /// </summary>
+    [TestFixture]
+    public class TestFieldCacheRewriteMethod : TestRegexpRandom2
+    {
+        /// <summary>
+        /// Test fieldcache rewrite against filter rewrite </summary>
+        protected internal override void AssertSame(string regexp)
+        {
+            RegexpQuery fieldCache = new RegexpQuery(new Term(FieldName, regexp), RegExp.NONE);
+            fieldCache.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+
+            RegexpQuery filter = new RegexpQuery(new Term(FieldName, regexp), RegExp.NONE);
+            filter.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+
+            TopDocs fieldCacheDocs = Searcher1.Search(fieldCache, 25);
+            TopDocs filterDocs = Searcher2.Search(filter, 25);
+
+            CheckHits.CheckEqual(fieldCache, fieldCacheDocs.ScoreDocs, filterDocs.ScoreDocs);
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            RegexpQuery a1 = new RegexpQuery(new Term(FieldName, "[aA]"), RegExp.NONE);
+            RegexpQuery a2 = new RegexpQuery(new Term(FieldName, "[aA]"), RegExp.NONE);
+            RegexpQuery b = new RegexpQuery(new Term(FieldName, "[bB]"), RegExp.NONE);
+            Assert.AreEqual(a1, a2);
+            Assert.IsFalse(a1.Equals(b));
+
+            a1.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+            a2.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+            b.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+            Assert.AreEqual(a1, a2);
+            Assert.IsFalse(a1.Equals(b));
+            QueryUtils.Check(a1);
+        }
+
+
+
+        #region TestSnapshotDeletionPolicy
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        /// <summary>
+        /// test a bunch of random regular expressions </summary>
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(60000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestRegexps()
+        {
+            base.TestRegexps();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
new file mode 100644
index 0000000..a7d231f
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
@@ -0,0 +1,80 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    /// <summary>
+    /// A basic unit test for FieldCacheTermsFilter
+    /// </summary>
+    /// <seealso cref= Lucene.Net.Search.FieldCacheTermsFilter </seealso>
+    [TestFixture]
+    public class TestFieldCacheTermsFilter : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestMissingTerms()
+        {
+            string fieldName = "field1";
+            Directory rd = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), rd, Similarity, TimeZone);
+            for (int i = 0; i < 100; i++)
+            {
+                Document doc = new Document();
+                int term = i * 10; //terms are units of 10;
+                doc.Add(NewStringField(fieldName, "" + term, Field.Store.YES));
+                w.AddDocument(doc);
+            }
+            IndexReader reader = w.Reader;
+            w.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+            int numDocs = reader.NumDocs;
+            ScoreDoc[] results;
+            MatchAllDocsQuery q = new MatchAllDocsQuery();
+
+            List<string> terms = new List<string>();
+            terms.Add("5");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
+            Assert.AreEqual(0, results.Length, "Must match nothing");
+
+            terms = new List<string>();
+            terms.Add("10");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
+            Assert.AreEqual(1, results.Length, "Must match 1");
+
+            terms = new List<string>();
+            terms.Add("10");
+            terms.Add("20");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
+            Assert.AreEqual(2, results.Length, "Must match 2");
+
+            reader.Dispose();
+            rd.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
new file mode 100644
index 0000000..4930c24
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
@@ -0,0 +1,127 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    ///
+    [TestFixture]
+    public class TestFieldValueFilter : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestFieldValueFilterNoValue()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int docs = AtLeast(10);
+            int[] docStates = BuildIndex(writer, docs);
+            int numDocsNoValue = 0;
+            for (int i = 0; i < docStates.Length; i++)
+            {
+                if (docStates[i] == 0)
+                {
+                    numDocsNoValue++;
+                }
+            }
+
+            IndexReader reader = DirectoryReader.Open(directory);
+            IndexSearcher searcher = NewSearcher(reader);
+            TopDocs search = searcher.Search(new TermQuery(new Term("all", "test")), new FieldValueFilter("some", true), docs);
+            Assert.AreEqual(search.TotalHits, numDocsNoValue);
+
+            ScoreDoc[] scoreDocs = search.ScoreDocs;
+            foreach (ScoreDoc scoreDoc in scoreDocs)
+            {
+                Assert.IsNull(reader.Document(scoreDoc.Doc).Get("some"));
+            }
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFieldValueFilter_Mem()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int docs = AtLeast(10);
+            int[] docStates = BuildIndex(writer, docs);
+            int numDocsWithValue = 0;
+            for (int i = 0; i < docStates.Length; i++)
+            {
+                if (docStates[i] == 1)
+                {
+                    numDocsWithValue++;
+                }
+            }
+            IndexReader reader = DirectoryReader.Open(directory);
+            IndexSearcher searcher = NewSearcher(reader);
+            TopDocs search = searcher.Search(new TermQuery(new Term("all", "test")), new FieldValueFilter("some"), docs);
+            Assert.AreEqual(search.TotalHits, numDocsWithValue);
+
+            ScoreDoc[] scoreDocs = search.ScoreDocs;
+            foreach (ScoreDoc scoreDoc in scoreDocs)
+            {
+                Assert.AreEqual("value", reader.Document(scoreDoc.Doc).Get("some"));
+            }
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        private int[] BuildIndex(RandomIndexWriter writer, int docs)
+        {
+            int[] docStates = new int[docs];
+            for (int i = 0; i < docs; i++)
+            {
+                Document doc = new Document();
+                if (Random().NextBoolean())
+                {
+                    docStates[i] = 1;
+                    doc.Add(NewTextField("some", "value", Field.Store.YES));
+                }
+                doc.Add(NewTextField("all", "test", Field.Store.NO));
+                doc.Add(NewTextField("id", "" + i, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+            int numDeletes = Random().Next(docs);
+            for (int i = 0; i < numDeletes; i++)
+            {
+                int docID = Random().Next(docs);
+                writer.DeleteDocuments(new Term("id", "" + docID));
+                docStates[docID] = 2;
+            }
+            writer.Dispose();
+            return docStates;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
new file mode 100644
index 0000000..9847b9a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
@@ -0,0 +1,719 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using System;
+using System.Collections;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.Reflection;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FilterStrategy = Lucene.Net.Search.FilteredQuery.FilterStrategy;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// FilteredQuery JUnit tests.
+    ///
+    /// <p>Created: Apr 21, 2004 1:21:46 PM
+    ///
+    ///
+    /// @since   1.4
+    /// </summary>
+    [TestFixture]
+    public class TestFilteredQuery : LuceneTestCase
+    {
+        private IndexSearcher Searcher;
+        private IndexReader Reader;
+        private Directory Directory;
+        private Query Query;
+        private Filter Filter;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "one two three four five", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "b", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "one two three four", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "d", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "one two three y", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "a", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "one two x", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "c", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            // tests here require single segment (eg try seed
+            // 8239472272678419952L), because SingleDocTestFilter(x)
+            // blindly accepts that docID in any sub-segment
+            writer.ForceMerge(1);
+
+            Reader = writer.Reader;
+            writer.Dispose();
+
+            Searcher = NewSearcher(Reader);
+
+            Query = new TermQuery(new Term("field", "three"));
+            Filter = NewStaticFilterB();
+        }
+
+        // must be static for serialization tests
+        private static Filter NewStaticFilterB()
+        {
+            return new FilterAnonymousInnerClassHelper();
+        }
+
+        private class FilterAnonymousInnerClassHelper : Filter
+        {
+            public FilterAnonymousInnerClassHelper()
+            {
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                if (acceptDocs == null)
+                {
+                    acceptDocs = new Bits.MatchAllBits(5);
+                }
+                BitArray bitset = new BitArray(5);
+                if (acceptDocs.Get(1))
+                {
+                    bitset.SafeSet(1, true);
+                }
+                if (acceptDocs.Get(3))
+                {
+                    bitset.SafeSet(3, true);
+                }
+                return new DocIdBitSet(bitset);
+            }
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestFilteredQuery_Mem()
+        {
+            // force the filter to be executed as bits
+            TFilteredQuery(true);
+            // force the filter to be executed as iterator
+            TFilteredQuery(false);
+        }
+
+        private void TFilteredQuery(bool useRandomAccess)
+        {
+            Query filteredquery = new FilteredQuery(Query, Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            ScoreDoc[] hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(1, hits[0].Doc);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            hits = Searcher.Search(filteredquery, null, 1000, new Sort(new SortField("sorter", SortFieldType.STRING))).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(1, hits[0].Doc);
+
+            filteredquery = new FilteredQuery(new TermQuery(new Term("field", "one")), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            filteredquery = new FilteredQuery(new MatchAllDocsQuery(), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            filteredquery = new FilteredQuery(new TermQuery(new Term("field", "x")), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(3, hits[0].Doc);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            filteredquery = new FilteredQuery(new TermQuery(new Term("field", "y")), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            // test boost
+            Filter f = NewStaticFilterA();
+
+            float boost = 2.5f;
+            BooleanQuery bq1 = new BooleanQuery();
+            TermQuery tq = new TermQuery(new Term("field", "one"));
+            tq.Boost = boost;
+            bq1.Add(tq, Occur.MUST);
+            bq1.Add(new TermQuery(new Term("field", "five")), Occur.MUST);
+
+            BooleanQuery bq2 = new BooleanQuery();
+            tq = new TermQuery(new Term("field", "one"));
+            filteredquery = new FilteredQuery(tq, f, RandomFilterStrategy(Random(), useRandomAccess));
+            filteredquery.Boost = boost;
+            bq2.Add(filteredquery, Occur.MUST);
+            bq2.Add(new TermQuery(new Term("field", "five")), Occur.MUST);
+            AssertScoreEquals(bq1, bq2);
+
+            Assert.AreEqual(boost, filteredquery.Boost, 0);
+            Assert.AreEqual(1.0f, tq.Boost, 0); // the boost value of the underlying query shouldn't have changed
+        }
+
+        // must be static for serialization tests
+        private static Filter NewStaticFilterA()
+        {
+            return new FilterAnonymousInnerClassHelper2();
+        }
+
+        private class FilterAnonymousInnerClassHelper2 : Filter
+        {
+            public FilterAnonymousInnerClassHelper2()
+            {
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                Assert.IsNull(acceptDocs, "acceptDocs should be null, as we have an index without deletions");
+                BitArray bitset = new BitArray(5, true);
+                return new DocIdBitSet(bitset);
+            }
+        }
+
+        /// <summary>
+        /// Tests whether the scores of the two queries are the same.
+        /// </summary>
+        public virtual void AssertScoreEquals(Query q1, Query q2)
+        {
+            ScoreDoc[] hits1 = Searcher.Search(q1, null, 1000).ScoreDocs;
+            ScoreDoc[] hits2 = Searcher.Search(q2, null, 1000).ScoreDocs;
+
+            Assert.AreEqual(hits1.Length, hits2.Length);
+
+            for (int i = 0; i < hits1.Length; i++)
+            {
+                Assert.AreEqual(hits1[i].Score, hits2[i].Score, 0.000001f);
+            }
+        }
+
+        /// <summary>
+        /// this tests FilteredQuery's rewrite correctness
+        /// </summary>
+        [Test]
+        public virtual void TestRangeQuery()
+        {
+            // force the filter to be executed as bits
+            TRangeQuery(true);
+            TRangeQuery(false);
+        }
+
+        private void TRangeQuery(bool useRandomAccess)
+        {
+            TermRangeQuery rq = TermRangeQuery.NewStringRange("sorter", "b", "d", true, true);
+
+            Query filteredquery = new FilteredQuery(rq, Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            ScoreDoc[] hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestBooleanMUST()
+        {
+            // force the filter to be executed as bits
+            TBooleanMUST(true);
+            // force the filter to be executed as iterator
+            TBooleanMUST(false);
+        }
+
+        private void TBooleanMUST(bool useRandomAccess)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(0), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.MUST);
+            query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.MUST);
+            ScoreDoc[] hits = Searcher.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestBooleanSHOULD()
+        {
+            // force the filter to be executed as bits
+            TBooleanSHOULD(true);
+            // force the filter to be executed as iterator
+            TBooleanSHOULD(false);
+        }
+
+        private void TBooleanSHOULD(bool useRandomAccess)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(0), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.SHOULD);
+            query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.SHOULD);
+            ScoreDoc[] hits = Searcher.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        // Make sure BooleanQuery, which does out-of-order
+        // scoring, inside FilteredQuery, works
+        [Test]
+        public virtual void TestBoolean2()
+        {
+            // force the filter to be executed as bits
+            TBoolean2(true);
+            // force the filter to be executed as iterator
+            TBoolean2(false);
+        }
+
+        private void TBoolean2(bool useRandomAccess)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(bq, new SingleDocTestFilter(0), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(new TermQuery(new Term("field", "one")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("field", "two")), Occur.SHOULD);
+            ScoreDoc[] hits = Searcher.Search(query, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestChainedFilters()
+        {
+            // force the filter to be executed as bits
+            TChainedFilters(true);
+            // force the filter to be executed as iterator
+            TChainedFilters(false);
+        }
+
+        private void TChainedFilters(bool useRandomAccess)
+        {
+            Query query = new FilteredQuery(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "three")))), RandomFilterStrategy(Random(), useRandomAccess)), new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "four")))), RandomFilterStrategy(Random(), useRandomAccess));
+            ScoreDoc[] hits = Searcher.Search(query, 10).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+
+            // one more:
+            query = new FilteredQuery(query, new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "five")))), RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(query, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestEqualsHashcode()
+        {
+            // some tests before, if the used queries and filters work:
+            Assert.AreEqual(new PrefixFilter(new Term("field", "o")), new PrefixFilter(new Term("field", "o")));
+            Assert.IsFalse((new PrefixFilter(new Term("field", "a"))).Equals(new PrefixFilter(new Term("field", "o"))));
+            QueryUtils.CheckHashEquals(new TermQuery(new Term("field", "one")));
+            QueryUtils.CheckUnequal(new TermQuery(new Term("field", "one")), new TermQuery(new Term("field", "two"))
+           );
+            // now test FilteredQuery equals/hashcode:
+            QueryUtils.CheckHashEquals(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o"))));
+            QueryUtils.CheckUnequal(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o"))), new FilteredQuery(new TermQuery(new Term("field", "two")), new PrefixFilter(new Term("field", "o")))
+           );
+            QueryUtils.CheckUnequal(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "a"))), new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")))
+           );
+        }
+
+        [Test]
+        public virtual void TestInvalidArguments()
+        {
+            try
+            {
+                new FilteredQuery(null, null);
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+            try
+            {
+                new FilteredQuery(new TermQuery(new Term("field", "one")), null);
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+            try
+            {
+                new FilteredQuery(null, new PrefixFilter(new Term("field", "o")));
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+        }
+
+        private FilterStrategy RandomFilterStrategy()
+        {
+            return RandomFilterStrategy(Random(), true);
+        }
+
+        private void AssertRewrite(FilteredQuery fq, Type clazz)
+        {
+            // assign crazy boost to FQ
+            float boost = (float)Random().NextDouble() * 100.0f;
+            fq.Boost = boost;
+
+            // assign crazy boost to inner
+            float innerBoost = (float)Random().NextDouble() * 100.0f;
+            fq.Query.Boost = innerBoost;
+
+            // check the class and boosts of rewritten query
+            Query rewritten = Searcher.Rewrite(fq);
+            Assert.IsTrue(clazz.IsInstanceOfType(rewritten), "is not instance of " + clazz.Name);
+            if (rewritten is FilteredQuery)
+            {
+                Assert.AreEqual(boost, rewritten.Boost, 1E-5f);
+                Assert.AreEqual(innerBoost, ((FilteredQuery)rewritten).Query.Boost, 1E-5f);
+                Assert.AreEqual(fq.Strategy, ((FilteredQuery)rewritten).Strategy);
+            }
+            else
+            {
+                Assert.AreEqual(boost * innerBoost, rewritten.Boost, 1E-5f);
+            }
+
+            // check that the original query was not modified
+            Assert.AreEqual(boost, fq.Boost, 1E-5f);
+            Assert.AreEqual(innerBoost, fq.Query.Boost, 1E-5f);
+        }
+
+        [Test]
+        public virtual void TestRewrite()
+        {
+            AssertRewrite(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")), RandomFilterStrategy()), typeof(FilteredQuery));
+            AssertRewrite(new FilteredQuery(new PrefixQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")), RandomFilterStrategy()), typeof(FilteredQuery));
+        }
+
+        [Test]
+        public virtual void TestGetFilterStrategy()
+        {
+            FilterStrategy randomFilterStrategy = RandomFilterStrategy();
+            FilteredQuery filteredQuery = new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")), randomFilterStrategy);
+            Assert.AreSame(randomFilterStrategy, filteredQuery.Strategy);
+        }
+
+        private static FilteredQuery.FilterStrategy RandomFilterStrategy(Random random, bool useRandomAccess)
+        {
+            if (useRandomAccess)
+            {
+                return new RandomAccessFilterStrategyAnonymousInnerClassHelper();
+            }
+            return TestUtil.RandomFilterStrategy(random);
+        }
+
+        private class RandomAccessFilterStrategyAnonymousInnerClassHelper : FilteredQuery.RandomAccessFilterStrategy
+        {
+            public RandomAccessFilterStrategyAnonymousInnerClassHelper()
+            {
+            }
+
+            protected override bool UseRandomAccess(IBits bits, int firstFilterDoc)
+            {
+                return true;
+            }
+        }
+
+        /*
+         * Test if the QueryFirst strategy calls the bits only if the document has
+         * been matched by the query and not otherwise
+         */
+
+        [Test]
+        public virtual void TestQueryFirstFilterStrategy()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int numDocs = AtLeast(50);
+            int totalDocsWithZero = 0;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int num = Random().Next(5);
+                if (num == 0)
+                {
+                    totalDocsWithZero++;
+                }
+                doc.Add(NewTextField("field", "" + num, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousInnerClassHelper3(this, reader), FilteredQuery.QUERY_FIRST_FILTER_STRATEGY);
+
+            TopDocs search = searcher.Search(query, 10);
+            Assert.AreEqual(totalDocsWithZero, search.TotalHits);
+            IOUtils.Close(reader, writer, directory);
+        }
+
+        private class FilterAnonymousInnerClassHelper3 : Filter
+        {
+            private readonly TestFilteredQuery OuterInstance;
+
+            private IndexReader Reader;
+
+            public FilterAnonymousInnerClassHelper3(TestFilteredQuery outerInstance, IndexReader reader)
+            {
+                this.OuterInstance = outerInstance;
+                this.Reader = reader;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                bool nullBitset = Random().Next(10) == 5;
+                AtomicReader reader = context.AtomicReader;
+                DocsEnum termDocsEnum = reader.TermDocsEnum(new Term("field", "0"));
+                if (termDocsEnum == null)
+                {
+                    return null; // no docs -- return null
+                }
+                BitArray bitSet = new BitArray(reader.MaxDoc);
+                int d;
+                while ((d = termDocsEnum.NextDoc()) != DocsEnum.NO_MORE_DOCS)
+                {
+                    bitSet.SafeSet(d, true);
+                }
+                return new DocIdSetAnonymousInnerClassHelper(this, nullBitset, reader, bitSet);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper3 OuterInstance;
+
+                private bool NullBitset;
+                private AtomicReader Reader;
+                private BitArray BitSet;
+
+                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper3 outerInstance, bool nullBitset, AtomicReader reader, BitArray bitSet)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.NullBitset = nullBitset;
+                    this.Reader = reader;
+                    this.BitSet = bitSet;
+                }
+
+                public override IBits Bits
+                {
+                    get
+                    {
+                        if (NullBitset)
+                        {
+                            return null;
+                        }
+                        return new BitsAnonymousInnerClassHelper(this);
+                    }
+                }
+
+                private class BitsAnonymousInnerClassHelper : IBits
+                {
+                    private readonly DocIdSetAnonymousInnerClassHelper OuterInstance;
+
+                    public BitsAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                    {
+                        this.OuterInstance = outerInstance;
+                    }
+
+                    public bool Get(int index)
+                    {
+                        Assert.IsTrue(OuterInstance.BitSet.SafeGet(index), "filter was called for a non-matching doc");
+                        return OuterInstance.BitSet.SafeGet(index);
+                    }
+
+                    public int Length
+                    {
+                        get { return OuterInstance.BitSet.Length; }
+                    }
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    Assert.IsTrue(NullBitset, "iterator should not be called if bitset is present");
+                    return Reader.TermDocsEnum(new Term("field", "0"));
+                }
+            }
+        }
+
+        /*
+         * Test if the leapfrog strategy works correctly in terms
+         * of advancing / next the right thing first
+         */
+
+        [Test]
+        public virtual void TestLeapFrogStrategy()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int numDocs = AtLeast(50);
+            int totalDocsWithZero = 0;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int num = Random().Next(10);
+                if (num == 0)
+                {
+                    totalDocsWithZero++;
+                }
+                doc.Add(NewTextField("field", "" + num, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+            bool queryFirst = Random().NextBoolean();
+            IndexSearcher searcher = NewSearcher(reader);
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousInnerClassHelper4(this, queryFirst), queryFirst ? FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY : Random()
+                  .NextBoolean() ? FilteredQuery.RANDOM_ACCESS_FILTER_STRATEGY : FilteredQuery.LEAP_FROG_FILTER_FIRST_STRATEGY); // if filterFirst, we can use random here since bits are null
+
+            TopDocs search = searcher.Search(query, 10);
+            Assert.AreEqual(totalDocsWithZero, search.TotalHits);
+            IOUtils.Close(reader, writer, directory);
+        }
+
+        private class FilterAnonymousInnerClassHelper4 : Filter
+        {
+            private readonly TestFilteredQuery OuterInstance;
+
+            private bool QueryFirst;
+
+            public FilterAnonymousInnerClassHelper4(TestFilteredQuery outerInstance, bool queryFirst)
+            {
+                this.OuterInstance = outerInstance;
+                this.QueryFirst = queryFirst;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return new DocIdSetAnonymousInnerClassHelper2(this, context);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper2 : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper4 OuterInstance;
+
+                private AtomicReaderContext Context;
+
+                public DocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper4 outerInstance, AtomicReaderContext context)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.Context = context;
+                }
+
+                public override IBits Bits
+                {
+                    get { return null; }
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    DocsEnum termDocsEnum = ((AtomicReader)Context.Reader).TermDocsEnum(new Term("field", "0"));
+                    if (termDocsEnum == null)
+                    {
+                        return null;
+                    }
+                    return new DocIdSetIteratorAnonymousInnerClassHelper(this, termDocsEnum);
+                }
+
+                private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+                {
+                    private readonly DocIdSetAnonymousInnerClassHelper2 OuterInstance;
+
+                    private DocsEnum TermDocsEnum;
+
+                    public DocIdSetIteratorAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper2 outerInstance, DocsEnum termDocsEnum)
+                    {
+                        this.OuterInstance = outerInstance;
+                        this.TermDocsEnum = termDocsEnum;
+                    }
+
+                    internal bool nextCalled;
+                    internal bool advanceCalled;
+
+                    public override int NextDoc()
+                    {
+                        Assert.IsTrue(nextCalled || advanceCalled ^ !OuterInstance.OuterInstance.QueryFirst, "queryFirst: " + OuterInstance.OuterInstance.QueryFirst + " advanced: " + advanceCalled + " next: " + nextCalled);
+                        nextCalled = true;
+                        return TermDocsEnum.NextDoc();
+                    }
+
+                    public override int DocID
+                    {
+                        get { return TermDocsEnum.DocID; }
+                    }
+
+                    public override int Advance(int target)
+                    {
+                        Assert.IsTrue(advanceCalled || nextCalled ^ OuterInstance.OuterInstance.QueryFirst, "queryFirst: " + OuterInstance.OuterInstance.QueryFirst + " advanced: " + advanceCalled + " next: " + nextCalled);
+                        advanceCalled = true;
+                        return TermDocsEnum.Advance(target);
+                    }
+
+                    public override long GetCost()
+                    {
+                        return TermDocsEnum.GetCost();
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs b/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs
new file mode 100644
index 0000000..88dada2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs
@@ -0,0 +1,112 @@
+using System;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestFilteredSearch : LuceneTestCase
+    {
+        private const string FIELD = "category";
+
+        [Test]
+        public virtual void TestFilteredSearch_Mem()
+        {
+            bool enforceSingleSegment = true;
+            Directory directory = NewDirectory();
+            int[] filterBits = new int[] { 1, 36 };
+            SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            SearchFiltered(writer, directory, filter, enforceSingleSegment);
+            // run the test on more than one segment
+            enforceSingleSegment = false;
+            writer.Dispose();
+            writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy()));
+            // we index 60 docs - this will create 6 segments
+            SearchFiltered(writer, directory, filter, enforceSingleSegment);
+            writer.Dispose();
+            directory.Dispose();
+        }
+
+        public virtual void SearchFiltered(IndexWriter writer, Directory directory, Filter filter, bool fullMerge)
+        {
+            for (int i = 0; i < 60; i++) //Simple docs
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField(FIELD, Convert.ToString(i), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            if (fullMerge)
+            {
+                writer.ForceMerge(1);
+            }
+            writer.Dispose();
+
+            BooleanQuery booleanQuery = new BooleanQuery();
+            booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), Occur.SHOULD);
+
+            IndexReader reader = DirectoryReader.Open(directory);
+            IndexSearcher indexSearcher = NewSearcher(reader);
+            ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "Number of matched documents");
+            reader.Dispose();
+        }
+
+        public sealed class SimpleDocIdSetFilter : Filter
+        {
+            internal readonly int[] Docs;
+
+            public SimpleDocIdSetFilter(int[] docs)
+            {
+                this.Docs = docs;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                Assert.IsNull(acceptDocs, "acceptDocs should be null, as we have an index without deletions");
+                FixedBitSet set = new FixedBitSet(context.Reader.MaxDoc);
+                int docBase = context.DocBase;
+                int limit = docBase + context.Reader.MaxDoc;
+                for (int index = 0; index < Docs.Length; index++)
+                {
+                    int docId = Docs[index];
+                    if (docId >= docBase && docId < limit)
+                    {
+                        set.Set(docId - docBase);
+                    }
+                }
+                return set.Cardinality() == 0 ? null : set;
+            }
+        }
+    }
+}
\ No newline at end of file


Mime
View raw message