lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dougs...@apache.org
Subject svn commit: r798995 [31/35] - in /incubator/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/QueryParser/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net...
Date Wed, 29 Jul 2009 18:04:24 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestConstantScoreRangeQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestConstantScoreRangeQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestConstantScoreRangeQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestConstantScoreRangeQuery.cs Wed Jul 29 18:04:12 2009
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -31,343 +32,550 @@
 namespace Lucene.Net.Search
 {
 
-	[TestFixture]
-	public class TestConstantScoreRangeQuery : BaseTestRangeFilter
-	{
-		private class AnonymousClassHitCollector : HitCollector
-		{
-			public AnonymousClassHitCollector(TestConstantScoreRangeQuery enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			private void  InitBlock(TestConstantScoreRangeQuery enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private TestConstantScoreRangeQuery enclosingInstance;
-			public TestConstantScoreRangeQuery Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			public override void  Collect(int doc, float score)
-			{
-				Enclosing_Instance.AssertEquals("score for doc " + doc + " was not correct", 1.0f, score);
-			}
-		}
-		
-		/// <summary>threshold for comparing floats </summary>
-		public const float SCORE_COMP_THRESH = 1e-6f;
-		
-		public TestConstantScoreRangeQuery(System.String name) : base(name)
-		{
-		}
-		public TestConstantScoreRangeQuery() : base()
-		{
-		}
-		
-		internal Directory small;
-		
-		internal virtual void  AssertEquals(System.String m, float e, float a)
-		{
-			Assert.AreEqual(e, a, m, SCORE_COMP_THRESH);
-		}
-		
-		static public void  AssertEquals(System.String m, int e, int a)
-		{
-			Assert.AreEqual(e, a, m);
-		}
-		
-		[SetUp]
-		public override void SetUp()
-		{
-			base.SetUp();	
-			System.String[] data = new System.String[]{"A 1 2 3 4 5 6", "Z       4 5 6", null, "B   2   4 5 6", "Y     3   5 6", null, "C     3     6", "X       4 5 6"};
-			
-			small = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(small, new WhitespaceAnalyzer(), true);
-			
-			for (int i = 0; i < data.Length; i++)
-			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-				doc.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id",String.valueOf(i)));
-				doc.Add(new Field("all", "all", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("all","all"));
-				if (null != data[i])
-				{
-					doc.Add(new Field("data", data[i], Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("data",data[i]));
-				}
-				writer.AddDocument(doc);
-			}
-			
-			writer.Optimize();
-			writer.Close();
-		}
-		
-		
-		
-		/// <summary>macro for readability </summary>
-		public static Query Csrq(System.String f, System.String l, System.String h, bool il, bool ih)
-		{
-			return new ConstantScoreRangeQuery(f, l, h, il, ih);
-		}
-		
-		[Test]
-		public virtual void  TestBasics()
-		{
-			QueryUtils.Check(Csrq("data", "1", "6", T, T));
-			QueryUtils.Check(Csrq("data", "A", "Z", T, T));
-			QueryUtils.CheckUnequal(Csrq("data", "1", "6", T, T), Csrq("data", "A", "Z", T, T));
-		}
-		
-		[Test]
-		public virtual void  TestEqualScores()
-		{
-			// NOTE: uses index build in *this* SetUp
-			
-			IndexReader reader = IndexReader.Open(small);
-			IndexSearcher search = new IndexSearcher(reader);
-			
-			Hits result;
-			
-			// some hits match more terms then others, score should be the same
-			
-			result = search.Search(Csrq("data", "1", "6", T, T));
-			int numHits = result.Length();
-			Assert.AreEqual(6, numHits, "wrong number of results");
-			float score = result.Score(0);
-			for (int i = 1; i < numHits; i++)
-			{
-				AssertEquals("score for " + i + " was not the same", score, result.Score(i));
-			}
-		}
-		
-		[Test]
-		public virtual void  TestBoost()
-		{
-			// NOTE: uses index build in *this* SetUp
-			
-			IndexReader reader = IndexReader.Open(small);
-			IndexSearcher search = new IndexSearcher(reader);
-			
-			// test for correct application of query normalization
-			// must use a non score normalizing method for this.
-			Query q = Csrq("data", "1", "6", T, T);
-			q.SetBoost(100);
-			search.Search(q, null, new AnonymousClassHitCollector(this));
-			
-			
-			//
-			// Ensure that boosting works to score one clause of a query higher
-			// than another.
-			//
-			Query q1 = Csrq("data", "A", "A", T, T); // matches document #0
-			q1.SetBoost(.1f);
-			Query q2 = Csrq("data", "Z", "Z", T, T); // matches document #1
-			BooleanQuery bq = new BooleanQuery(true);
-			bq.Add(q1, BooleanClause.Occur.SHOULD);
-			bq.Add(q2, BooleanClause.Occur.SHOULD);
-			
-			Hits hits = search.Search(bq);
-			Assert.AreEqual(1, hits.Id(0));
-			Assert.AreEqual(0, hits.Id(1));
-			Assert.IsTrue(hits.Score(0) > hits.Score(1));
-			
-			q1 = Csrq("data", "A", "A", T, T); // matches document #0
-			q1.SetBoost(10f);
-			q2 = Csrq("data", "Z", "Z", T, T); // matches document #1
-			bq = new BooleanQuery(true);
-			bq.Add(q1, BooleanClause.Occur.SHOULD);
-			bq.Add(q2, BooleanClause.Occur.SHOULD);
-			
-			hits = search.Search(bq);
-			Assert.AreEqual(0, hits.Id(0));
-			Assert.AreEqual(1, hits.Id(1));
-			Assert.IsTrue(hits.Score(0) > hits.Score(1));
-		}
-		
-		[Test]
-		public virtual void  TestBooleanOrderUnAffected()
-		{
-			// NOTE: uses index build in *this* SetUp
-			
-			IndexReader reader = IndexReader.Open(small);
-			IndexSearcher search = new IndexSearcher(reader);
-			
-			// first do a regular RangeQuery which uses term expansion so
-			// docs with more terms in range get higher scores
-			
-			Query rq = new RangeQuery(new Term("data", "1"), new Term("data", "4"), T);
-			
-			Hits expected = search.Search(rq);
-			int numHits = expected.Length();
-			
-			// now do a boolean where which also contains a
-			// ConstantScoreRangeQuery and make sure hte order is the same
-			
-			BooleanQuery q = new BooleanQuery();
-			q.Add(rq, BooleanClause.Occur.MUST); //T, F);
-			q.Add(Csrq("data", "1", "6", T, T), BooleanClause.Occur.MUST); //T, F);
-			
-			Hits actual = search.Search(q);
-			
-			Assert.AreEqual(numHits, actual.Length(), "wrong numebr of hits");
-			for (int i = 0; i < numHits; i++)
-			{
-				Assert.AreEqual(expected.Id(i), actual.Id(i), "mismatch in docid for hit#" + i);
-			}
-		}
-		
-		
-		
-		
-		[Test]
-		public virtual void  TestRangeQueryId()
-		{
-			// NOTE: uses index build in *super* SetUp
-			
-			IndexReader reader = IndexReader.Open(index);
-			IndexSearcher search = new IndexSearcher(reader);
-			
-			int medId = ((maxId - minId) / 2);
-			
-			System.String minIP = Pad(minId);
-			System.String maxIP = Pad(maxId);
-			System.String medIP = Pad(medId);
-			
-			int numDocs = reader.NumDocs();
-			
-			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
-			
-			Hits result;
-			
-			// test id, bounded on both ends
-			
-			result = search.Search(Csrq("id", minIP, maxIP, T, T));
-			Assert.AreEqual(numDocs, result.Length(), "find all");
-			
-			result = search.Search(Csrq("id", minIP, maxIP, T, F));
-			Assert.AreEqual(numDocs - 1, result.Length(), "all but last");
-			
-			result = search.Search(Csrq("id", minIP, maxIP, F, T));
-			Assert.AreEqual(numDocs - 1, result.Length(), "all but first");
-			
-			result = search.Search(Csrq("id", minIP, maxIP, F, F));
-			Assert.AreEqual(numDocs - 2, result.Length(), "all but ends");
-			
-			result = search.Search(Csrq("id", medIP, maxIP, T, T));
-			Assert.AreEqual(1 + maxId - medId, result.Length(), "med and up");
-			
-			result = search.Search(Csrq("id", minIP, medIP, T, T));
-			Assert.AreEqual(1 + medId - minId, result.Length(), "up to med");
-			
-			// unbounded id
-			
-			result = search.Search(Csrq("id", minIP, null, T, F));
-			Assert.AreEqual(numDocs, result.Length(), "min and up");
-			
-			result = search.Search(Csrq("id", null, maxIP, F, T));
-			Assert.AreEqual(numDocs, result.Length(), "max and down");
-			
-			result = search.Search(Csrq("id", minIP, null, F, F));
-			Assert.AreEqual(numDocs - 1, result.Length(), "not min, but up");
-			
-			result = search.Search(Csrq("id", null, maxIP, F, F));
-			Assert.AreEqual(numDocs - 1, result.Length(), "not max, but down");
-			
-			result = search.Search(Csrq("id", medIP, maxIP, T, F));
-			Assert.AreEqual(maxId - medId, result.Length(), "med and up, not max");
-			
-			result = search.Search(Csrq("id", minIP, medIP, F, T));
-			Assert.AreEqual(medId - minId, result.Length(), "not min, up to med");
-			
-			// very small sets
-			
-			result = search.Search(Csrq("id", minIP, minIP, F, F));
-			Assert.AreEqual(0, result.Length(), "min,min,F,F");
-			result = search.Search(Csrq("id", medIP, medIP, F, F));
-			Assert.AreEqual(0, result.Length(), "med,med,F,F");
-			result = search.Search(Csrq("id", maxIP, maxIP, F, F));
-			Assert.AreEqual(0, result.Length(), "max,max,F,F");
-			
-			result = search.Search(Csrq("id", minIP, minIP, T, T));
-			Assert.AreEqual(1, result.Length(), "min,min,T,T");
-			result = search.Search(Csrq("id", null, minIP, F, T));
-			Assert.AreEqual(1, result.Length(), "nul,min,F,T");
-			
-			result = search.Search(Csrq("id", maxIP, maxIP, T, T));
-			Assert.AreEqual(1, result.Length(), "max,max,T,T");
-			result = search.Search(Csrq("id", maxIP, null, T, F));
-			Assert.AreEqual(1, result.Length(), "max,nul,T,T");
-			
-			result = search.Search(Csrq("id", medIP, medIP, T, T));
-			Assert.AreEqual(1, result.Length(), "med,med,T,T");
-		}
-		
-		[Test]
-		public virtual void  TestRangeQueryRand()
-		{
-			// NOTE: uses index build in *super* SetUp
-			
-			IndexReader reader = IndexReader.Open(index);
-			IndexSearcher search = new IndexSearcher(reader);
-			
-			System.String minRP = Pad(minR);
-			System.String maxRP = Pad(maxR);
-			
-			int numDocs = reader.NumDocs();
-			
-			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
-			
-			Hits result;
-			Query q = new TermQuery(new Term("body", "body"));
-			
-			// test extremes, bounded on both ends
-			
-			result = search.Search(Csrq("rand", minRP, maxRP, T, T));
-			Assert.AreEqual(numDocs, result.Length(), "find all");
-			
-			result = search.Search(Csrq("rand", minRP, maxRP, T, F));
-			Assert.AreEqual(numDocs - 1, result.Length(), "all but biggest");
-			
-			result = search.Search(Csrq("rand", minRP, maxRP, F, T));
-			Assert.AreEqual(numDocs - 1, result.Length(), "all but smallest");
-			
-			result = search.Search(Csrq("rand", minRP, maxRP, F, F));
-			Assert.AreEqual(numDocs - 2, result.Length(), "all but extremes");
-			
-			// unbounded
-			
-			result = search.Search(Csrq("rand", minRP, null, T, F));
-			Assert.AreEqual(numDocs, result.Length(), "smallest and up");
-			
-			result = search.Search(Csrq("rand", null, maxRP, F, T));
-			Assert.AreEqual(numDocs, result.Length(), "biggest and down");
-			
-			result = search.Search(Csrq("rand", minRP, null, F, F));
-			Assert.AreEqual(numDocs - 1, result.Length(), "not smallest, but up");
-			
-			result = search.Search(Csrq("rand", null, maxRP, F, F));
-			Assert.AreEqual(numDocs - 1, result.Length(), "not biggest, but down");
-			
-			// very small sets
-			
-			result = search.Search(Csrq("rand", minRP, minRP, F, F));
-			Assert.AreEqual(0, result.Length(), "min,min,F,F");
-			result = search.Search(Csrq("rand", maxRP, maxRP, F, F));
-			Assert.AreEqual(0, result.Length(), "max,max,F,F");
-			
-			result = search.Search(Csrq("rand", minRP, minRP, T, T));
-			Assert.AreEqual(1, result.Length(), "min,min,T,T");
-			result = search.Search(Csrq("rand", null, minRP, F, T));
-			Assert.AreEqual(1, result.Length(), "nul,min,F,T");
-			
-			result = search.Search(Csrq("rand", maxRP, maxRP, T, T));
-			Assert.AreEqual(1, result.Length(), "max,max,T,T");
-			result = search.Search(Csrq("rand", maxRP, null, T, F));
-			Assert.AreEqual(1, result.Length(), "max,nul,T,T");
-		}
-	}
+    [TestFixture]
+    public class TestConstantScoreRangeQuery : BaseTestRangeFilter
+    {
+        private class AnonymousClassHitCollector : HitCollector
+        {
+            public AnonymousClassHitCollector(TestConstantScoreRangeQuery enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+            }
+            private void InitBlock(TestConstantScoreRangeQuery enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private TestConstantScoreRangeQuery enclosingInstance;
+            public TestConstantScoreRangeQuery Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+
+            }
+            public override void Collect(int doc, float score)
+            {
+                Enclosing_Instance.AssertEquals("score for doc " + doc + " was not correct", 1.0f, score);
+            }
+        }
+
+        /// <summary>threshold for comparing floats </summary>
+        public const float SCORE_COMP_THRESH = 1e-6f;
+
+        public TestConstantScoreRangeQuery(string name)
+            : base(name)
+        {
+        }
+        public TestConstantScoreRangeQuery()
+            : base()
+        {
+        }
+
+        internal Directory small;
+
+        internal virtual void AssertEquals(string m, float e, float a)
+        {
+            Assert.AreEqual(e, a, m, SCORE_COMP_THRESH);
+        }
+
+        static public void AssertEquals(string m, int e, int a)
+        {
+            Assert.AreEqual(e, a, m);
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            string[] data = new string[] { "A 1 2 3 4 5 6", "Z       4 5 6", null, "B   2   4 5 6", "Y     3   5 6", null, "C     3     6", "X       4 5 6" };
+
+            small = new RAMDirectory();
+            IndexWriter writer = new IndexWriter(small, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+
+            for (int i = 0; i < data.Length; i++)
+            {
+                Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+                doc.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id",string.valueOf(i)));
+                doc.Add(new Field("all", "all", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("all","all"));
+                if (null != data[i])
+                {
+                    doc.Add(new Field("data", data[i], Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("data",data[i]));
+                }
+                writer.AddDocument(doc);
+            }
+
+            writer.Optimize();
+            writer.Close();
+        }
+
+
+
+        /// <summary>macro for readability </summary>
+        public static Query Csrq(string f, string l, string h, bool il, bool ih)
+        {
+            return new ConstantScoreRangeQuery(f, l, h, il, ih);
+        }
+
+        /// <summary>macro for readability </summary>
+        public static Query Csrq(string f, string l, string h, bool il, bool ih, System.Globalization.CompareInfo c)
+        {
+            return new ConstantScoreRangeQuery(f, l, h, il, ih, c);
+        }
+
+        [Test]
+        public virtual void TestBasics()
+        {
+            QueryUtils.Check(Csrq("data", "1", "6", T, T));
+            QueryUtils.Check(Csrq("data", "A", "Z", T, T));
+            QueryUtils.CheckUnequal(Csrq("data", "1", "6", T, T), Csrq("data", "A", "Z", T, T));
+        }
+
+        [Test]
+        public void TestBasicsCollating()
+        {
+            System.Globalization.CompareInfo c = System.Globalization.CultureInfo.GetCultureInfo("en-us").CompareInfo;
+            QueryUtils.Check(Csrq("data", "1", "6", T, T, c));
+            QueryUtils.Check(Csrq("data", "A", "Z", T, T, c));
+            QueryUtils.CheckUnequal(Csrq("data", "1", "6", T, T, c), Csrq("data", "A", "Z", T, T, c));
+        }
+
+        [Test]
+        public virtual void TestEqualScores()
+        {
+            // NOTE: uses index build in *this* SetUp
+
+            IndexReader reader = IndexReader.Open(small);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            ScoreDoc[] result;
+
+            // some hits match more terms then others, score should be the same
+
+            result = search.Search(Csrq("data", "1", "6", T, T), null, 1000).scoreDocs;
+            int numHits = result.Length;
+            Assert.AreEqual(6, numHits, "wrong number of results");
+            float score = result[0].score;
+            for (int i = 1; i < numHits; i++)
+            {
+                AssertEquals("score for " + i + " was not the same", score, result[i].score);
+            }
+        }
+
+        [Test]
+        public virtual void TestBoost()
+        {
+            // NOTE: uses index build in *this* SetUp
+
+            IndexReader reader = IndexReader.Open(small);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            // test for correct application of query normalization
+            // must use a non score normalizing method for this.
+            Query q = Csrq("data", "1", "6", T, T);
+            q.SetBoost(100);
+            search.Search(q, null, new AnonymousClassHitCollector(this));
+
+
+            //
+            // Ensure that boosting works to score one clause of a query higher
+            // than another.
+            //
+            Query q1 = Csrq("data", "A", "A", T, T); // matches document #0
+            q1.SetBoost(.1f);
+            Query q2 = Csrq("data", "Z", "Z", T, T); // matches document #1
+            BooleanQuery bq = new BooleanQuery(true);
+            bq.Add(q1, BooleanClause.Occur.SHOULD);
+            bq.Add(q2, BooleanClause.Occur.SHOULD);
+
+            ScoreDoc[] hits = search.Search(bq, null, 1000).scoreDocs;
+            Assert.AreEqual(1, hits[0].doc);
+            Assert.AreEqual(0, hits[1].doc);
+            Assert.IsTrue(hits[0].score > hits[1].score);
+
+            q1 = Csrq("data", "A", "A", T, T); // matches document #0
+            q1.SetBoost(10f);
+            q2 = Csrq("data", "Z", "Z", T, T); // matches document #1
+            bq = new BooleanQuery(true);
+            bq.Add(q1, BooleanClause.Occur.SHOULD);
+            bq.Add(q2, BooleanClause.Occur.SHOULD);
+
+            hits = search.Search(bq, null, 1000).scoreDocs;
+            Assert.AreEqual(0, hits[0].doc);
+            Assert.AreEqual(1, hits[1].doc);
+            Assert.IsTrue(hits[0].score > hits[1].score);
+        }
+
+        [Test]
+        public virtual void TestBooleanOrderUnAffected()
+        {
+            // NOTE: uses index build in *this* SetUp
+
+            IndexReader reader = IndexReader.Open(small);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            // first do a regular RangeQuery which uses term expansion so
+            // docs with more terms in range get higher scores
+
+            Query rq = new RangeQuery(new Term("data", "1"), new Term("data", "4"), T);
+
+            ScoreDoc[] expected = search.Search(rq, null, 1000).scoreDocs;
+            int numHits = expected.Length;
+
+            // now do a bool where which also contains a
+            // ConstantScoreRangeQuery and make sure hte order is the same
+
+            BooleanQuery q = new BooleanQuery();
+            q.Add(rq, BooleanClause.Occur.MUST); //T, F);
+            q.Add(Csrq("data", "1", "6", T, T), BooleanClause.Occur.MUST); //T, F);
+
+            ScoreDoc[] actual = search.Search(q, null, 1000).scoreDocs;
+
+            Assert.AreEqual(numHits, actual.Length, "wrong number of hits");
+            for (int i = 0; i < numHits; i++)
+            {
+                Assert.AreEqual(expected[i].doc, actual[i].doc, "mismatch in docid for hit#" + i);
+            }
+        }
+
+        [Test]
+        public void TestRangeQueryId()
+        {
+            // NOTE: uses index build in *super* setUp
+
+            IndexReader reader = IndexReader.Open(signedIndex.index);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            int medId = ((maxId - minId) / 2);
+
+            string minIP = Pad(minId);
+            string maxIP = Pad(maxId);
+            string medIP = Pad(medId);
+
+            int numDocs = reader.NumDocs();
+
+            Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+
+            ScoreDoc[] result;
+
+            // test id, bounded on both ends
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+
+            result = search.Search(Csrq("id", minIP, medIP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1 + medId - minId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(Csrq("id", minIP, null, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(Csrq("id", null, maxIP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(Csrq("id", minIP, null, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(Csrq("id", null, maxIP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(Csrq("id", minIP, medIP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(Csrq("id", minIP, minIP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(Csrq("id", medIP, medIP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(Csrq("id", maxIP, maxIP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(Csrq("id", minIP, minIP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(Csrq("id", null, minIP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(Csrq("id", maxIP, maxIP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(Csrq("id", maxIP, null, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(Csrq("id", medIP, medIP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+        }
+
+
+        [Test]
+        public void TestRangeQueryIdCollating()
+        {
+            // NOTE: uses index build in *super* setUp
+
+            IndexReader reader = IndexReader.Open(signedIndex.index);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            int medId = ((maxId - minId) / 2);
+
+            string minIP = Pad(minId);
+            string maxIP = Pad(maxId);
+            string medIP = Pad(medId);
+
+            int numDocs = reader.NumDocs();
+
+            Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+
+            ScoreDoc[] result;
+
+            System.Globalization.CompareInfo c = System.Globalization.CultureInfo.GetCultureInfo("en-us").CompareInfo;
+
+            // test id, bounded on both ends
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+
+            result = search.Search(Csrq("id", minIP, medIP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1 + medId - minId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(Csrq("id", minIP, null, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(Csrq("id", null, maxIP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(Csrq("id", minIP, null, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(Csrq("id", null, maxIP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(Csrq("id", minIP, medIP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(Csrq("id", minIP, minIP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F,c");
+            result = search.Search(Csrq("id", medIP, medIP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F,c");
+            result = search.Search(Csrq("id", maxIP, maxIP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F,c");
+
+            result = search.Search(Csrq("id", minIP, minIP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T,c");
+            result = search.Search(Csrq("id", null, minIP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T,c");
+
+            result = search.Search(Csrq("id", maxIP, maxIP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T,c");
+            result = search.Search(Csrq("id", maxIP, null, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T,c");
+
+            result = search.Search(Csrq("id", medIP, medIP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T,c");
+        }
+
+
+        [Test]
+        public void TestRangeQueryRand()
+        {
+            // NOTE: uses index build in *super* setUp
+
+            IndexReader reader = IndexReader.Open(signedIndex.index);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            string minRP = Pad(signedIndex.minR);
+            string maxRP = Pad(signedIndex.maxR);
+
+            int numDocs = reader.NumDocs();
+
+            Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+
+            ScoreDoc[] result;
+
+            // test extremes, bounded on both ends
+
+            result = search.Search(Csrq("rand", minRP, maxRP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(Csrq("rand", minRP, maxRP, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but biggest");
+
+            result = search.Search(Csrq("rand", minRP, maxRP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but smallest");
+
+            result = search.Search(Csrq("rand", minRP, maxRP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but extremes");
+
+            // unbounded
+
+            result = search.Search(Csrq("rand", minRP, null, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "smallest and up");
+
+            result = search.Search(Csrq("rand", null, maxRP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "biggest and down");
+
+            result = search.Search(Csrq("rand", minRP, null, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not smallest, but up");
+
+            result = search.Search(Csrq("rand", null, maxRP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not biggest, but down");
+
+            // very small sets
+
+            result = search.Search(Csrq("rand", minRP, minRP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(Csrq("rand", maxRP, maxRP, F, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(Csrq("rand", minRP, minRP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(Csrq("rand", null, minRP, F, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(Csrq("rand", maxRP, maxRP, T, T), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(Csrq("rand", maxRP, null, T, F), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+        }
+
+        [Test]
+        public void TestRangeQueryRandCollating()
+        {
+            // NOTE: uses index build in *super* setUp
+
+            // using the unsigned index because collation seems to ignore hyphens
+            IndexReader reader = IndexReader.Open(unsignedIndex.index);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            string minRP = Pad(unsignedIndex.minR);
+            string maxRP = Pad(unsignedIndex.maxR);
+
+            int numDocs = reader.NumDocs();
+
+            Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+
+            ScoreDoc[] result;
+
+            System.Globalization.CompareInfo c = System.Globalization.CultureInfo.GetCultureInfo("en-us").CompareInfo;
+
+            // test extremes, bounded on both ends
+
+            result = search.Search(Csrq("rand", minRP, maxRP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(Csrq("rand", minRP, maxRP, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but biggest");
+
+            result = search.Search(Csrq("rand", minRP, maxRP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but smallest");
+
+            result = search.Search(Csrq("rand", minRP, maxRP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but extremes");
+
+            // unbounded
+
+            result = search.Search(Csrq("rand", minRP, null, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "smallest and up");
+
+            result = search.Search(Csrq("rand", null, maxRP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "biggest and down");
+
+            result = search.Search(Csrq("rand", minRP, null, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not smallest, but up");
+
+            result = search.Search(Csrq("rand", null, maxRP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not biggest, but down");
+
+            // very small sets
+
+            result = search.Search(Csrq("rand", minRP, minRP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F,c");
+            result = search.Search(Csrq("rand", maxRP, maxRP, F, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F,c");
+
+            result = search.Search(Csrq("rand", minRP, minRP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T,c");
+            result = search.Search(Csrq("rand", null, minRP, F, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T,c");
+
+            result = search.Search(Csrq("rand", maxRP, maxRP, T, T, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T,c");
+            result = search.Search(Csrq("rand", maxRP, null, T, F, c), null, numDocs).scoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T,c");
+        }
+
+        [Test]
+        public void TestFarsi()
+        {
+
+            /* build an index */
+            RAMDirectory farsiIndex = new RAMDirectory();
+            IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(), T,
+                                                 IndexWriter.MaxFieldLength.LIMITED);
+            Document doc = new Document();
+            doc.Add(new Field("content", "\u0633\u0627\u0628",
+                              Field.Store.YES, Field.Index.NOT_ANALYZED));
+            doc.Add(new Field("body", "body",
+                              Field.Store.YES, Field.Index.NOT_ANALYZED));
+            writer.AddDocument(doc);
+
+            writer.Optimize();
+            writer.Close();
+
+            IndexReader reader = IndexReader.Open(farsiIndex);
+            IndexSearcher search = new IndexSearcher(reader);
+
+            // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
+            // RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
+            // characters properly.
+            System.Globalization.CompareInfo c = System.Globalization.CultureInfo.GetCultureInfo("ar").CompareInfo;
+
+            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
+            // orders the U+0698 character before the U+0633 character, so the single
+            // index Term below should NOT be returned by a ConstantScoreRangeQuery
+            // with a Farsi Collator (or an Arabic one for the case when Farsi is 
+            // not supported).
+            ScoreDoc[] result = search.Search(Csrq("content", "\u062F", "\u0698", T, T, c), null, 1000).scoreDocs;
+            Assert.AreEqual(0, result.Length, "The index Term should not be included.");
+
+            result = search.Search(Csrq("content", "\u0633", "\u0638", T, T, c), null, 1000).scoreDocs;
+            Assert.AreEqual(1, result.Length, "The index Term should be included.");
+            search.Close();
+        }
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestCustomSearcherSort.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs Wed Jul 29 18:04:12 2009
@@ -35,9 +35,6 @@
 	/// <summary> Unit test for sorting code.
 	/// 
 	/// </summary>
-	/// <author>   Martin Seitz (T-Systems)
-	/// </author>
-	
 	[Serializable]
 	[TestFixture]
 	public class TestCustomSearcherSort
@@ -49,7 +46,7 @@
 			get
 			{
 				RAMDirectory indexStore = new RAMDirectory();
-				IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(), true);
+				IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 				RandomGen random = new RandomGen(this);
 				for (int i = 0; i < INDEX_SIZE; ++i)
 				{
@@ -58,15 +55,15 @@
 					if ((i % 5) != 0)
 					{
 						// some documents must not have an entry in the first sort field
-						doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+						doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.NOT_ANALYZED));
 					}
 					if ((i % 7) == 0)
 					{
 						// some documents to match the query (see below) 
-						doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.TOKENIZED));
+						doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.ANALYZED));
 					}
 					// every document has a defined 'mandant' field
-					doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.UN_TOKENIZED));
+					doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.NOT_ANALYZED));
 					writer.AddDocument(doc);
 				}
 				writer.Optimize();
@@ -135,24 +132,24 @@
 		private void  MatchHits(Searcher searcher, Sort sort)
 		{
 			// make a query without sorting first
-			Hits hitsByRank = searcher.Search(query);
+			ScoreDoc[] hitsByRank = searcher.Search(query, null, 1000).scoreDocs;
 			CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates
 			System.Collections.IDictionary resultMap = new System.Collections.SortedList();
 			// store hits in TreeMap - TreeMap does not allow duplicates; existing entries are silently overwritten
-			for (int hitid = 0; hitid < hitsByRank.Length(); ++hitid)
+			for (int hitid = 0; hitid < hitsByRank.Length; ++hitid)
 			{
-				resultMap[(System.Int32) hitsByRank.Id(hitid)] = (System.Int32) hitid; // Value: Hits-Objekt Index
+				resultMap[(System.Int32) hitsByRank[hitid].doc] = (System.Int32) hitid; // Value: Hits-Objekt Index
 			}
 			
 			// now make a query using the sort criteria
-			Hits resultSort = searcher.Search(query, sort);
+			ScoreDoc[] resultSort = searcher.Search(query, null, 1000, sort).scoreDocs;
 			CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates
 			
 			System.String lf = SupportClass.AppSettings.Get("line.separator", "\n");
 			// besides the sorting both sets of hits must be identical
-			for (int hitid = 0; hitid < resultSort.Length(); ++hitid)
+			for (int hitid = 0; hitid < resultSort.Length; ++hitid)
 			{
-				System.Int32 idHitDate = (System.Int32) resultSort.Id(hitid); // document ID from sorted search
+				System.Int32 idHitDate = (System.Int32) resultSort[hitid].doc; // document ID from sorted search
 				if (!resultMap.Contains(idHitDate))
 				{
 					Log("ID " + idHitDate + " not found. Possibliy a duplicate.");
@@ -176,44 +173,31 @@
 		/// <summary> Check the hits for duplicates.</summary>
 		/// <param name="hits">
 		/// </param>
-		private void  CheckHits(Hits hits, System.String prefix)
+		private void  CheckHits(ScoreDoc[] hits, System.String prefix)
 		{
 			if (hits != null)
 			{
 				System.Collections.IDictionary idMap = new System.Collections.SortedList();
-				for (int docnum = 0; docnum < hits.Length(); ++docnum)
+				for (int docnum = 0; docnum < hits.Length; ++docnum)
 				{
 					System.Int32 luceneId;
-					try
-					{
-						luceneId = (System.Int32) hits.Id(docnum);
-						if (idMap.Contains(luceneId))
-						{
-							System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
-							message.Append("Duplicate key for hit index = ");
-							message.Append(docnum);
-							message.Append(", previous index = ");
-							message.Append(((System.Int32) idMap[luceneId]).ToString());
-							message.Append(", Lucene ID = ");
-							message.Append(luceneId);
-							Log(message.ToString());
-						}
-						else
-						{
-							idMap[luceneId] = (System.Int32) docnum;
-						}
-					}
-					catch (System.IO.IOException ioe)
-					{
-						System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
-						message.Append("Error occurred for hit index = ");
-						message.Append(docnum);
-						message.Append(" (");
-						message.Append(ioe.Message);
-						message.Append(")");
-						Log(message.ToString());
-					}
-				}
+                    luceneId = (System.Int32)hits[docnum].doc;
+                    if (idMap.Contains(luceneId))
+                    {
+                        System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
+                        message.Append("Duplicate key for hit index = ");
+                        message.Append(docnum);
+                        message.Append(", previous index = ");
+                        message.Append(((System.Int32)idMap[luceneId]).ToString());
+                        message.Append(", Lucene ID = ");
+                        message.Append(luceneId);
+                        Log(message.ToString());
+                    }
+                    else
+                    {
+                        idMap[luceneId] = (System.Int32)docnum;
+                    }
+                }
 			}
 		}
 		

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs Wed Jul 29 18:04:12 2009
@@ -47,14 +47,14 @@
 		{
 			// create an index
 			RAMDirectory indexStore = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			long now = (long) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			// add time that is in the past
-			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 1000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.UN_TOKENIZED));
-			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 1000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Optimize();
 			writer.Close();
@@ -74,28 +74,28 @@
 			// search for something that does exists
 			Query query2 = new TermQuery(new Term("body", "sunny"));
 			
-			Hits result;
+			ScoreDoc[] result;
 			
 			// ensure that queries return expected results without DateFilter first
-			result = searcher.Search(query1);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query1, null, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 			
-			result = searcher.Search(query2);
-			Assert.AreEqual(1, result.Length());
+			result = searcher.Search(query2, null, 1000).scoreDocs;
+			Assert.AreEqual(1, result.Length);
 			
 			
 			// run queries with DateFilter
-			result = searcher.Search(query1, df1);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query1, df1, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 			
-			result = searcher.Search(query1, df2);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query1, df2, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 			
-			result = searcher.Search(query2, df1);
-			Assert.AreEqual(1, result.Length());
+			result = searcher.Search(query2, df1, 1000).scoreDocs;
+			Assert.AreEqual(1, result.Length);
 			
-			result = searcher.Search(query2, df2);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query2, df2, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 		}
 		
 		/// <summary> </summary>
@@ -104,14 +104,14 @@
 		{
 			// create an index
 			RAMDirectory indexStore = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			long now = (long) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			// add time that is in the future
-			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 888888, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.UN_TOKENIZED));
-			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 888888, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Optimize();
 			writer.Close();
@@ -131,28 +131,28 @@
 			// search for something that does exists
 			Query query2 = new TermQuery(new Term("body", "sunny"));
 			
-			Hits result;
+			ScoreDoc[] result;
 			
 			// ensure that queries return expected results without DateFilter first
-			result = searcher.Search(query1);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query1, null, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 			
-			result = searcher.Search(query2);
-			Assert.AreEqual(1, result.Length());
+			result = searcher.Search(query2, null, 1000).scoreDocs;
+			Assert.AreEqual(1, result.Length);
 			
 			
 			// run queries with DateFilter
-			result = searcher.Search(query1, df1);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query1, df1, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 			
-			result = searcher.Search(query1, df2);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query1, df2, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 			
-			result = searcher.Search(query2, df1);
-			Assert.AreEqual(1, result.Length());
+			result = searcher.Search(query2, df1, 1000).scoreDocs;
+			Assert.AreEqual(1, result.Length);
 			
-			result = searcher.Search(query2, df2);
-			Assert.AreEqual(0, result.Length());
+			result = searcher.Search(query2, df2, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateSort.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs Wed Jul 29 18:04:12 2009
@@ -48,7 +48,7 @@
 		{
 			// Create an index writer.
 			directory = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 
 			// oldest doc:
 			// Add the first document.  text = "Document 1"  dateTime = Oct 10 03:25:22 EDT 2007
@@ -94,10 +94,10 @@
 			
 			// Execute the search and process the search results.
 			System.String[] actualOrder = new System.String[5];
-			Hits hits = searcher.Search(query, sort);
-			for (int i = 0; i < hits.Length(); i++)
+			ScoreDoc[] hits = searcher.Search(query, null, 1000, sort).scoreDocs;
+			for (int i = 0; i < hits.Length; i++)
 			{
-				Document document = hits.Doc(i);
+				Document document = searcher.Doc(hits[i].doc);
 				System.String text = document.Get(TEXT_FIELD);
 				actualOrder[i] = text;
 			}
@@ -122,12 +122,12 @@
 			Document document = new Document();
 			
 			// Add the text field.
-			Field textField = new Field(TEXT_FIELD, text, Field.Store.YES, Field.Index.TOKENIZED);
+			Field textField = new Field(TEXT_FIELD, text, Field.Store.YES, Field.Index.ANALYZED);
 			document.Add(textField);
 			
 			// Add the date/time field.
 			System.String dateTimeString = DateTools.TimeToString(time, DateTools.Resolution.SECOND);
-			Field dateTimeField = new Field(DATE_TIME_FIELD, dateTimeString, Field.Store.YES, Field.Index.UN_TOKENIZED);
+			Field dateTimeField = new Field(DATE_TIME_FIELD, dateTimeString, Field.Store.YES, Field.Index.NOT_ANALYZED);
 			document.Add(dateTimeField);
 			
 			return document;

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDisjunctionMaxQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs Wed Jul 29 18:04:12 2009
@@ -96,7 +96,7 @@
 			base.SetUp();
 			
 			index = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(index, new WhitespaceAnalyzer(), true);
+            IndexWriter writer = new IndexWriter(index, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetSimilarity(sim);
 			
 			// hed is the most important field, dek is secondary
@@ -104,38 +104,38 @@
 			// d1 is an "ok" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
-				d1.Add(new Field("id", "d1", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d1"));
-				d1.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-				d1.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "elephant"));
+				d1.Add(new Field("id", "d1", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d1"));
+				d1.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
+				d1.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("dek", "elephant"));
 				writer.AddDocument(d1);
 			}
 			
 			// d2 is a "good" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
-				d2.Add(new Field("id", "d2", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d2"));
-				d2.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-				d2.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "albino"));
-				d2.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "elephant"));
+				d2.Add(new Field("id", "d2", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d2"));
+				d2.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
+				d2.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("dek", "albino"));
+				d2.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("dek", "elephant"));
 				writer.AddDocument(d2);
 			}
 			
 			// d3 is a "better" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
-				d3.Add(new Field("id", "d3", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d3"));
-				d3.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "albino"));
-				d3.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
+				d3.Add(new Field("id", "d3", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d3"));
+				d3.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "albino"));
+				d3.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
 				writer.AddDocument(d3);
 			}
 			
 			// d4 is the "best" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
-				d4.Add(new Field("id", "d4", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d4"));
-				d4.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "albino"));
-				d4.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-				d4.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "albino"));
+				d4.Add(new Field("id", "d4", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d4"));
+				d4.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "albino"));
+				d4.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
+				d4.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("dek", "albino"));
 				writer.AddDocument(d4);
 			}
 			
@@ -190,21 +190,21 @@
 			q.Add(Tq("hed", "elephant"));
 			QueryUtils.Check(q, s);
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
-				Assert.AreEqual(4, h.Length(), "all docs should match " + q.ToString());
+				Assert.AreEqual(4, h.Length, "all docs should match " + q.ToString());
 				
-				float score = h.Score(0);
-				for (int i = 1; i < h.Length(); i++)
+				float score = h[0].score;
+				for (int i = 1; i < h.Length; i++)
 				{
-					Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+					Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
 				}
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testSimpleEqualScores1", h);
+				PrintHits("testSimpleEqualScores1", h, s);
 				throw e;
 			}
 		}
@@ -219,20 +219,20 @@
 			QueryUtils.Check(q, s);
 
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
-				Assert.AreEqual(3, h.Length(), "3 docs should match " + q.ToString());
-				float score = h.Score(0);
-				for (int i = 1; i < h.Length(); i++)
+				Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
+				float score = h[0].score;
+				for (int i = 1; i < h.Length; i++)
 				{
-					Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+					Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
 				}
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testSimpleEqualScores2", h);
+				PrintHits("testSimpleEqualScores2", h, s);
 				throw e;
 			}
 		}
@@ -249,20 +249,20 @@
 			QueryUtils.Check(q, s);
 
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
-				Assert.AreEqual(4, h.Length(), "all docs should match " + q.ToString());
-				float score = h.Score(0);
-				for (int i = 1; i < h.Length(); i++)
+				Assert.AreEqual(4, h.Length, "all docs should match " + q.ToString());
+				float score = h[0].score;
+				for (int i = 1; i < h.Length; i++)
 				{
-					Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+                    Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
 				}
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testSimpleEqualScores3", h);
+				PrintHits("testSimpleEqualScores3", h, s);
 				throw e;
 			}
 		}
@@ -277,21 +277,21 @@
 			QueryUtils.Check(q, s);
 			
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
-				Assert.AreEqual(3, h.Length(), "3 docs should match " + q.ToString());
-				Assert.AreEqual("d2", h.Doc(0).Get("id"), "wrong first");
-				float score0 = h.Score(0);
-				float score1 = h.Score(1);
-				float score2 = h.Score(2);
+				Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
+				Assert.AreEqual("d2", s.Doc(h[0].doc).Get("id"), "wrong first");
+				float score0 = h[0].score;
+				float score1 = h[1].score;
+				float score2 = h[2].score;
 				Assert.IsTrue(score0 > score1, "d2 does not have better score then others: " + score0 + " >? " + score1);
 				Assert.AreEqual(score1, score2, SCORE_COMP_THRESH, "d4 and d1 don't have equal scores");
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testSimpleTiebreaker", h);
+				PrintHits("testSimpleTiebreaker", h, s);
 				throw e;
 			}
 		}
@@ -318,20 +318,20 @@
 			
 			QueryUtils.Check(q, s);
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
-				Assert.AreEqual(3, h.Length(), "3 docs should match " + q.ToString());
-				float score = h.Score(0);
-				for (int i = 1; i < h.Length(); i++)
+				Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
+				float score = h[0].score;
+				for (int i = 1; i < h.Length; i++)
 				{
-					Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+					Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
 				}
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testBooleanRequiredEqualScores1", h);
+				PrintHits("testBooleanRequiredEqualScores1", h, s);
 				throw e;
 			}
 		}
@@ -356,24 +356,24 @@
 			QueryUtils.Check(q, s);
 			
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
-				Assert.AreEqual(4, h.Length(), "4 docs should match " + q.ToString());
-				float score = h.Score(0);
-				for (int i = 1; i < h.Length() - 1; i++)
+				Assert.AreEqual(4, h.Length, "4 docs should match " + q.ToString());
+				float score = h[0].score;
+				for (int i = 1; i < h.Length - 1; i++)
 				{
 					/* note: -1 */
-					Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+					Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
 				}
-				Assert.AreEqual("d1", h.Doc(h.Length() - 1).Get("id"), "wrong last");
-				float score1 = h.Score(h.Length() - 1);
+				Assert.AreEqual("d1", s.Doc(h[h.Length - 1].doc).Get("id"), "wrong last");
+				float score1 = h[h.Length - 1].score;
 				Assert.IsTrue(score > score1, "d1 does not have worse score then others: " + score + " >? " + score1);
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testBooleanOptionalNoTiebreaker", h);
+				PrintHits("testBooleanOptionalNoTiebreaker", h, s);
 				throw e;
 			}
 		}
@@ -398,22 +398,22 @@
 			QueryUtils.Check(q, s);
 			
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
 				
-				Assert.AreEqual(4, h.Length(), "4 docs should match " + q.ToString());
+				Assert.AreEqual(4, h.Length, "4 docs should match " + q.ToString());
 				
-				float score0 = h.Score(0);
-				float score1 = h.Score(1);
-				float score2 = h.Score(2);
-				float score3 = h.Score(3);
-				
-				System.String doc0 = h.Doc(0).Get("id");
-				System.String doc1 = h.Doc(1).Get("id");
-				System.String doc2 = h.Doc(2).Get("id");
-				System.String doc3 = h.Doc(3).Get("id");
+				float score0 = h[0].score;
+				float score1 = h[1].score;
+				float score2 = h[2].score;
+				float score3 = h[3].score;
+				
+				System.String doc0 = s.Doc(h[0].doc).Get("id");
+				System.String doc1 = s.Doc(h[1].doc).Get("id");
+				System.String doc2 = s.Doc(h[2].doc).Get("id");
+				System.String doc3 = s.Doc(h[3].doc).Get("id");
 				
 				Assert.IsTrue(doc0.Equals("d2") || doc0.Equals("d4"), "doc0 should be d2 or d4: " + doc0);
 				Assert.IsTrue(doc1.Equals("d2") || doc1.Equals("d4"), "doc1 should be d2 or d4: " + doc0);
@@ -426,7 +426,7 @@
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testBooleanOptionalWithTiebreaker", h);
+                PrintHits("testBooleanOptionalWithTiebreaker", h, s);
 				throw e;
 			}
 		}
@@ -451,22 +451,22 @@
 			QueryUtils.Check(q, s);
 			
 			
-			Hits h = s.Search(q);
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
 			try
 			{
 				
-				Assert.AreEqual(4, h.Length(), "4 docs should match " + q.ToString());
+				Assert.AreEqual(4, h.Length, "4 docs should match " + q.ToString());
 				
-				float score0 = h.Score(0);
-				float score1 = h.Score(1);
-				float score2 = h.Score(2);
-				float score3 = h.Score(3);
-				
-				System.String doc0 = h.Doc(0).Get("id");
-				System.String doc1 = h.Doc(1).Get("id");
-				System.String doc2 = h.Doc(2).Get("id");
-				System.String doc3 = h.Doc(3).Get("id");
+				float score0 = h[0].score;
+				float score1 = h[1].score;
+				float score2 = h[2].score;
+				float score3 = h[3].score;
+				
+				System.String doc0 = s.Doc(h[0].doc).Get("id");
+				System.String doc1 = s.Doc(h[1].doc).Get("id");
+				System.String doc2 = s.Doc(h[2].doc).Get("id");
+				System.String doc3 = s.Doc(h[3].doc).Get("id");
 				
 				Assert.AreEqual("d4", doc0, "doc0 should be d4: ");
 				Assert.AreEqual("d3", doc1, "doc1 should be d3: ");
@@ -479,17 +479,11 @@
 			}
 			catch (System.ApplicationException e)
 			{
-				PrintHits("testBooleanOptionalWithTiebreakerAndBoost", h);
+                PrintHits("testBooleanOptionalWithTiebreakerAndBoost", h, s);
 				throw e;
 			}
 		}
 		
-		
-		
-		
-		
-		
-		
 		/// <summary>macro </summary>
 		protected internal virtual Query Tq(System.String f, System.String t)
 		{
@@ -503,16 +497,15 @@
 			return q;
 		}
 		
-		
-		protected internal virtual void  PrintHits(System.String test, Hits h)
+		protected internal virtual void  PrintHits(System.String test, ScoreDoc[] h, Searcher s)
 		{
 			
 			System.Console.Error.WriteLine("------- " + test + " -------");
 			
-			for (int i = 0; i < h.Length(); i++)
+			for (int i = 0; i < h.Length; i++)
 			{
-				Lucene.Net.Documents.Document d = h.Doc(i);
-				float score = h.Score(i);
+				Lucene.Net.Documents.Document d = s.Doc(h[i].doc);
+				float score = h[i].score;
 				System.Console.Error.WriteLine("#" + i + ": {0.000000000}" + score + " - " + d.Get("id"));
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDocBoost.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs Wed Jul 29 18:04:12 2009
@@ -69,10 +69,10 @@
 		public virtual void  TestDocBoost_Renamed_Method()
 		{
 			RAMDirectory store = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
-			Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
+			Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
+			Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
 			f2.SetBoost(2.0f);
 			
 			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestExplanations.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs Wed Jul 29 18:04:12 2009
@@ -30,6 +30,7 @@
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Lucene.Net.Search.Spans;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 
 namespace Lucene.Net.Search
 {
@@ -70,11 +71,11 @@
 		{
 			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			for (int i = 0; i < docFields.Length; i++)
 			{
 				Document doc = new Document();
-				doc.Add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.TOKENIZED));
+				doc.Add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
 			writer.Close();
@@ -133,15 +134,25 @@
 			{
 				this.docs = docs;
 			}
-			public override System.Collections.BitArray Bits(IndexReader r)
-			{
-				System.Collections.BitArray b = new System.Collections.BitArray((r.MaxDoc() % 64 == 0?r.MaxDoc() / 64:r.MaxDoc() / 64 + 1) * 64);
-				for (int i = 0; i < docs.Length; i++)
-				{
-					b.Set(docs[i], true);
-				}
-				return b;
-			}
+            public override DocIdSet GetDocIdSet(IndexReader r)
+            {
+                System.Collections.BitArray b = new System.Collections.BitArray((r.MaxDoc() % 64 == 0 ? r.MaxDoc() / 64 : r.MaxDoc() / 64 + 1) * 64);
+                for (int i = 0; i < docs.Length; i++)
+                {
+                    b.Set(docs[i], true);
+                }
+                return new DocIdBitSet(b);
+            }
+            [System.Obsolete()]
+            public override System.Collections.BitArray Bits(IndexReader r)
+            {
+                System.Collections.BitArray b = new System.Collections.BitArray((r.MaxDoc() % 64 == 0 ? r.MaxDoc() / 64 : r.MaxDoc() / 64 + 1) * 64);
+                for (int i = 0; i < docs.Length; i++)
+                {
+                    b.Set(docs[i], true);
+                }
+                return b;
+            }
 		}
 		
 		/// <summary>helper for generating MultiPhraseQueries </summary>

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestExtendedFieldCache.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestExtendedFieldCache.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestExtendedFieldCache.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestExtendedFieldCache.cs Wed Jul 29 18:04:12 2009
@@ -42,16 +42,16 @@
 		{
 			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			long theLong = System.Int64.MaxValue;
 			double theDouble = System.Double.MaxValue;
 			for (int i = 0; i < NUM_DOCS; i++)
 			{
 				Document doc = new Document();
-				doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.UN_TOKENIZED));
-				//doc.Add(new Field("theDouble", System.Convert.ToString(theDouble--), Field.Store.NO, Field.Index.UN_TOKENIZED));
-				doc.Add(new Field("theDouble", (theDouble--).ToString("R"), Field.Store.NO, Field.Index.UN_TOKENIZED));
-				doc.Add(new Field("text", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+				doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				//doc.Add(new Field("theDouble", System.Convert.ToString(theDouble--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("theDouble", (theDouble--).ToString("R"), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("text", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
 			writer.Close();

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs Wed Jul 29 18:04:12 2009
@@ -28,6 +28,7 @@
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Occur = Lucene.Net.Search.BooleanClause.Occur;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 
 namespace Lucene.Net.Search
 {
@@ -48,9 +49,17 @@
 		[Serializable]
 		private class AnonymousClassFilter : Filter
 		{
-			public override System.Collections.BitArray Bits(IndexReader reader)
+            public override DocIdSet GetDocIdSet(IndexReader reader)
+            {
+                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
+                bitset.Set(1, true);
+                bitset.Set(3, true);
+                return new DocIdBitSet(bitset);
+            }
+            [System.Obsolete()]
+            public override System.Collections.BitArray Bits(IndexReader reader)
 			{
-				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64);
+                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
 				bitset.Set(1, true);
 				bitset.Set(3, true);
 				return bitset;
@@ -59,9 +68,19 @@
 		[Serializable]
 		private class AnonymousClassFilter1 : Filter
 		{
-			public override System.Collections.BitArray Bits(IndexReader reader)
+            public override DocIdSet GetDocIdSet(IndexReader reader)
+            {
+                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
+                for (int i = 0; i < 5; i++)
+                {
+                    bitset.Set(i, true);
+                }
+                return new DocIdBitSet(bitset);
+            }
+            [System.Obsolete()]
+            public override System.Collections.BitArray Bits(IndexReader reader)
 			{
-				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64);
+                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
 				for (int i = 0; i < 5; i++)
 				{
 					bitset.Set(i, true);
@@ -79,26 +98,26 @@
 		public override void SetUp()
 		{
 			directory = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("sorter", "b", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("sorter", "b", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
 			doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("field", "one two three four", Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("sorter", "d", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("field", "one two three four", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("sorter", "d", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
 			doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("field", "one two three y", Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("sorter", "a", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("field", "one two three y", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("sorter", "a", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
 			doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("field", "one two x", Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("sorter", "c", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("field", "one two x", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("sorter", "c", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
 			writer.Optimize();
@@ -126,29 +145,29 @@
 		public virtual void  TestFilteredQuery_Renamed_Method()
 		{
 			Query filteredquery = new FilteredQuery(query, filter);
-			Hits hits = searcher.Search(filteredquery);
-			Assert.AreEqual(1, hits.Length());
-			Assert.AreEqual(1, hits.Id(0));
+			ScoreDoc[] hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
+			Assert.AreEqual(1, hits[0].doc);
 			QueryUtils.Check(filteredquery, searcher);
 			
-			hits = searcher.Search(filteredquery, new Sort("sorter"));
-			Assert.AreEqual(1, hits.Length());
-			Assert.AreEqual(1, hits.Id(0));
+			hits = searcher.Search(filteredquery, null, 1000, new Sort("sorter")).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
+			Assert.AreEqual(1, hits[0].doc);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "one")), filter);
-			hits = searcher.Search(filteredquery);
-			Assert.AreEqual(2, hits.Length());
+			hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length);
 			QueryUtils.Check(filteredquery, searcher);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "x")), filter);
-			hits = searcher.Search(filteredquery);
-			Assert.AreEqual(1, hits.Length());
-			Assert.AreEqual(3, hits.Id(0));
+			hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
+			Assert.AreEqual(3, hits[0].doc);
 			QueryUtils.Check(filteredquery, searcher);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "y")), filter);
-			hits = searcher.Search(filteredquery);
-			Assert.AreEqual(0, hits.Length());
+			hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length);
 			QueryUtils.Check(filteredquery, searcher);
 			
 			// test boost
@@ -182,14 +201,14 @@
 		/// <summary> Tests whether the scores of the two queries are the same.</summary>
 		public virtual void  AssertScoreEquals(Query q1, Query q2)
 		{
-			Hits hits1 = searcher.Search(q1);
-			Hits hits2 = searcher.Search(q2);
+			ScoreDoc[] hits1 = searcher.Search(q1, null, 1000).scoreDocs;
+			ScoreDoc[] hits2 = searcher.Search(q2, null, 1000).scoreDocs;
 			
-			Assert.AreEqual(hits1.Length(), hits2.Length());
+			Assert.AreEqual(hits1.Length, hits2.Length);
 			
-			for (int i = 0; i < hits1.Length(); i++)
+			for (int i = 0; i < hits1.Length; i++)
 			{
-				Assert.AreEqual(hits1.Score(i), hits2.Score(i), 0.0000001f);
+				Assert.AreEqual(hits1[i].score, hits2[i].score, 0.0000001f);
 			}
 		}
 		
@@ -200,8 +219,8 @@
 			RangeQuery rq = new RangeQuery(new Term("sorter", "b"), new Term("sorter", "d"), true);
 			
 			Query filteredquery = new FilteredQuery(rq, filter);
-			Hits hits = searcher.Search(filteredquery);
-			Assert.AreEqual(2, hits.Length());
+			ScoreDoc[] hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length);
 			QueryUtils.Check(filteredquery, searcher);
 		}
 
@@ -209,12 +228,12 @@
 		public virtual void  TestBoolean()
 		{
 			BooleanQuery bq = new BooleanQuery();
-			Query query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(0));
+			Query query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.Search.SingleDocTestFilter(0));
 			bq.Add(query, BooleanClause.Occur.MUST);
-			query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(1));
+			query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.Search.SingleDocTestFilter(1));
 			bq.Add(query, BooleanClause.Occur.MUST);
-			Hits hits = searcher.Search(bq);
-			Assert.AreEqual(0, hits.Length());
+			ScoreDoc[] hits = searcher.Search(bq, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length);
 			QueryUtils.Check(query, searcher);
 		}
 	}

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredSearch.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using OpenBitSet = Lucene.Net.Util.OpenBitSet;
+
+namespace Lucene.Net.Search
+{
+    /**
+     *
+     */
+    [TestFixture]
+    public class TestFilteredSearch
+    {
+        private const string FIELD = "category";
+
+        [Test]
+        public void TestFilteredSearch_Renamed()
+        {
+            RAMDirectory directory = new RAMDirectory();
+            int[] filterBits = { 1, 36 };
+            Filter filter = new SimpleDocIdSetFilter(filterBits);
+
+            try
+            {
+                IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+                for (int i = 0; i < 60; i++)
+                {//Simple docs
+                    Document doc = new Document();
+                    doc.Add(new Field(FIELD, "" + i, Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    writer.AddDocument(doc);
+                }
+                writer.Close();
+
+                BooleanQuery booleanQuery = new BooleanQuery();
+                booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), BooleanClause.Occur.SHOULD);
+
+
+                IndexSearcher indexSearcher = new IndexSearcher(directory);
+                ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).scoreDocs;
+                Assert.AreEqual(1, hits.Length, "Number of matched documents");
+            }
+            catch (System.IO.IOException e)
+            {
+                Assert.Fail(e.Message);
+            }
+        }
+
+        public class SimpleDocIdSetFilter : Filter
+        {
+            private OpenBitSet bits;
+
+            public SimpleDocIdSetFilter(int[] docs)
+            {
+                bits = new OpenBitSet();
+                for (int i = 0; i < docs.Length; i++)
+                {
+                    bits.Set(docs[i]);
+                }
+            }
+
+            public override DocIdSet GetDocIdSet(IndexReader reader)
+            {
+                return bits;
+            }
+
+            [System.Obsolete()]
+            public override System.Collections.BitArray Bits(IndexReader reader)
+            {
+                return null;
+            }
+        }
+    }
+}



Mime
View raw message