lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r677059 [16/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/...
Date Tue, 15 Jul 2008 21:44:10 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhraseQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
-using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Lucene.Net.Analysis;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -37,7 +38,7 @@
 	/// <author>  Erik Hatcher
 	/// </author>
 	[TestFixture]
-    public class TestPhraseQuery
+	public class TestPhraseQuery : LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer : Analyzer
 		{
@@ -59,7 +60,7 @@
 				
 			}
 
-            public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
 				return new WhitespaceTokenizer(reader);
 			}
@@ -69,14 +70,18 @@
 				return 100;
 			}
 		}
-
-        private IndexSearcher searcher;
+		
+		/// <summary>threshold for comparing floats </summary>
+		public const float SCORE_COMP_THRESH = 1e-6f;
+		
+		private IndexSearcher searcher;
 		private PhraseQuery query;
 		private RAMDirectory directory;
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
+			base.SetUp();
 			directory = new RAMDirectory();
 			Analyzer analyzer = new AnonymousClassAnalyzer(this);
 			IndexWriter writer = new IndexWriter(directory, analyzer, true);
@@ -86,6 +91,15 @@
 			doc.Add(new Field("repeated", "this is a repeated field - first part", Field.Store.YES, Field.Index.TOKENIZED));
 			Fieldable repeatedField = new Field("repeated", "second part of a repeated field", Field.Store.YES, Field.Index.TOKENIZED);
 			doc.Add(repeatedField);
+			doc.Add(new Field("palindrome", "one two three two one", Field.Store.YES, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
+			
+			doc = new Document();
+			doc.Add(new Field("nonexist", "phrase exist notexist exist found", Field.Store.YES, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
+			
+			doc = new Document();
+			doc.Add(new Field("nonexist", "phrase exist notexist exist found", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
 			
 			writer.Optimize();
@@ -96,44 +110,45 @@
 		}
 		
 		[TearDown]
-        public virtual void  TearDown()
+		public override void TearDown()
 		{
+			base.TearDown();
 			searcher.Close();
 			directory.Close();
 		}
 		
 		[Test]
-        public virtual void  TestNotCloseEnough()
+		public virtual void  TestNotCloseEnough()
 		{
 			query.SetSlop(2);
 			query.Add(new Term("field", "one"));
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length());
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		[Test]
-        public virtual void  TestBarelyCloseEnough()
+		public virtual void  TestBarelyCloseEnough()
 		{
 			query.SetSlop(3);
 			query.Add(new Term("field", "one"));
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length());
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		/// <summary> Ensures slop of 0 works for exact matches, but not reversed</summary>
 		[Test]
-        public virtual void  TestExact()
+		public virtual void  TestExact()
 		{
 			// slop is zero by default
 			query.Add(new Term("field", "four"));
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "exact match");
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			query = new PhraseQuery();
@@ -141,11 +156,11 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "reverse not exact");
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		[Test]
-        public virtual void  TestSlop1()
+		public virtual void  TestSlop1()
 		{
 			// Ensures slop of 1 works with terms in order.
 			query.SetSlop(1);
@@ -153,7 +168,7 @@
 			query.Add(new Term("field", "two"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "in order");
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			// Ensures slop of 1 does not work for phrases out of order;
@@ -164,19 +179,19 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "reversed, slop not 2 or more");
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		/// <summary> As long as slop is at least 2, terms can be reversed</summary>
 		[Test]
-        public virtual void  TestOrderDoesntMatter()
+		public virtual void  TestOrderDoesntMatter()
 		{
 			query.SetSlop(2); // must be at least two for reverse order match
 			query.Add(new Term("field", "two"));
 			query.Add(new Term("field", "one"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			query = new PhraseQuery();
@@ -185,14 +200,14 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "not sloppy enough");
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		/// <summary> slop is the total number of positional moves allowed
 		/// to line up a phrase
 		/// </summary>
 		[Test]
-        public virtual void  TestMulipleTerms()
+		public virtual void  TestMulipleTerms()
 		{
 			query.SetSlop(2);
 			query.Add(new Term("field", "one"));
@@ -200,7 +215,7 @@
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "two total moves");
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			query = new PhraseQuery();
@@ -210,17 +225,17 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "slop of 5 not close enough");
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			query.SetSlop(6);
 			hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "slop of 6 just right");
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		[Test]
-        public virtual void  TestPhraseQueryWithStopAnalyzer()
+		public virtual void  TestPhraseQueryWithStopAnalyzer()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			StopAnalyzer stopAnalyzer = new StopAnalyzer();
@@ -238,7 +253,7 @@
 			query.Add(new Term("field", "words"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length());
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			// currently StopAnalyzer does not leave "holes", so this matches.
@@ -247,14 +262,14 @@
 			query.Add(new Term("field", "here"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length());
-            QueryUtils.Check(query, searcher);
+			QueryUtils.Check(query, searcher);
 
 			
 			searcher.Close();
 		}
 		
 		[Test]
-        public virtual void  TestPhraseQueryInConjunctionScorer()
+		public virtual void  TestPhraseQueryInConjunctionScorer()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
@@ -278,7 +293,7 @@
 			phraseQuery.Add(new Term("source", "info"));
 			Hits hits = searcher.Search(phraseQuery);
 			Assert.AreEqual(2, hits.Length());
-            QueryUtils.Check(phraseQuery, searcher);
+			QueryUtils.Check(phraseQuery, searcher);
 
 			
 			TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
@@ -287,7 +302,7 @@
 			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
 			hits = searcher.Search(booleanQuery);
 			Assert.AreEqual(1, hits.Length());
-            QueryUtils.Check(termQuery, searcher);
+			QueryUtils.Check(termQuery, searcher);
 
 			
 			searcher.Close();
@@ -332,7 +347,7 @@
 			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
 			hits = searcher.Search(booleanQuery);
 			Assert.AreEqual(2, hits.Length());
-            QueryUtils.Check(booleanQuery, searcher);
+			QueryUtils.Check(booleanQuery, searcher);
 
 			
 			searcher.Close();
@@ -340,7 +355,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestSlopScoring()
+		public virtual void  TestSlopScoring()
 		{
 			Directory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
@@ -375,21 +390,200 @@
 			Assert.AreEqual(1, hits.Id(1));
 			Assert.AreEqual(0.31, hits.Score(2), 0.01);
 			Assert.AreEqual(2, hits.Id(2));
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
 		[Test]
-        public virtual void  TestWrappedPhrase()
+		public virtual void  TestWrappedPhrase()
 		{
 			query.Add(new Term("repeated", "first"));
 			query.Add(new Term("repeated", "part"));
 			query.Add(new Term("repeated", "second"));
 			query.Add(new Term("repeated", "part"));
+			query.SetSlop(100);
+			
+			Hits hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "slop of 100 just right");
+			QueryUtils.Check(query, searcher);
+			
 			query.SetSlop(99);
 			
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length(), "slop of 99 not enough");
+			QueryUtils.Check(query, searcher);
+		}
+		
+		// work on two docs like this: "phrase exist notexist exist found"
+		[Test]
+		public virtual void  TestNonExistingPhrase()
+		{
+			// phrase without repetitions that exists in 2 docs
+			query.Add(new Term("nonexist", "phrase"));
+			query.Add(new Term("nonexist", "notexist"));
+			query.Add(new Term("nonexist", "found"));
+			query.SetSlop(2); // would be found this way
+			
 			Hits hits = searcher.Search(query);
-			Assert.AreEqual(0, hits.Length());
-            QueryUtils.Check(query, searcher);
-        }
+			Assert.AreEqual(2, hits.Length(), "phrase without repetitions exists in 2 docs");
+			QueryUtils.Check(query, searcher);
+			
+			// phrase with repetitions that exists in 2 docs
+			query = new PhraseQuery();
+			query.Add(new Term("nonexist", "phrase"));
+			query.Add(new Term("nonexist", "exist"));
+			query.Add(new Term("nonexist", "exist"));
+			query.SetSlop(1); // would be found 
+			
+			hits = searcher.Search(query);
+			Assert.AreEqual(2, hits.Length(), "phrase with repetitions exists in two docs");
+			QueryUtils.Check(query, searcher);
+			
+			// phrase I with repetitions that does not exist in any doc
+			query = new PhraseQuery();
+			query.Add(new Term("nonexist", "phrase"));
+			query.Add(new Term("nonexist", "notexist"));
+			query.Add(new Term("nonexist", "phrase"));
+			query.SetSlop(1000); // would not be found no matter how high the slop is
+			
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length(), "nonexisting phrase with repetitions does not exist in any doc");
+			QueryUtils.Check(query, searcher);
+			
+			// phrase II with repetitions that does not exist in any doc
+			query = new PhraseQuery();
+			query.Add(new Term("nonexist", "phrase"));
+			query.Add(new Term("nonexist", "exist"));
+			query.Add(new Term("nonexist", "exist"));
+			query.Add(new Term("nonexist", "exist"));
+			query.SetSlop(1000); // would not be found no matter how high the slop is
+			
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length(), "nonexisting phrase with repetitions does not exist in any doc");
+			QueryUtils.Check(query, searcher);
+		}
+		
+		/// <summary> Working on a 2 fields like this:
+		/// Field("field", "one two three four five")
+		/// Field("palindrome", "one two three two one")
+		/// Phrase of size 2 occuriong twice, once in order and once in reverse, 
+		/// because doc is a palyndrome, is counted twice. 
+		/// Also, in this case order in query does not matter. 
+		/// Also, when an exact match is found, both sloppy scorer and exact scorer scores the same.   
+		/// </summary>
+		public virtual void  TestPalyndrome2()
+		{
+			
+			// search on non palyndrome, find phrase with no slop, using exact phrase scorer
+			query.SetSlop(0); // to use exact phrase scorer
+			query.Add(new Term("field", "two"));
+			query.Add(new Term("field", "three"));
+			Hits hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "phrase found with exact phrase scorer");
+			float score0 = hits.Score(0);
+			//System.out.println("(exact) field: two three: "+score0);
+			QueryUtils.Check(query, searcher);
+			
+			// search on non palyndrome, find phrase with slop 2, though no slop required here.
+			query.SetSlop(2); // to use sloppy scorer 
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+			float score1 = hits.Score(0);
+			//System.out.println("(sloppy) field: two three: "+score1);
+			Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "exact scorer and sloppy scorer score the same when slop does not matter");
+			QueryUtils.Check(query, searcher);
+			
+			// search ordered in palyndrome, find it twice
+			query = new PhraseQuery();
+			query.SetSlop(2); // must be at least two for both ordered and reversed to match
+			query.Add(new Term("palindrome", "two"));
+			query.Add(new Term("palindrome", "three"));
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+			float score2 = hits.Score(0);
+			//System.out.println("palindrome: two three: "+score2);
+			QueryUtils.Check(query, searcher);
+			
+			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
+			//assertTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
+			
+			// search reveresed in palyndrome, find it twice
+			query = new PhraseQuery();
+			query.SetSlop(2); // must be at least two for both ordered and reversed to match
+			query.Add(new Term("palindrome", "three"));
+			query.Add(new Term("palindrome", "two"));
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+			float score3 = hits.Score(0);
+			//System.out.println("palindrome: three two: "+score3);
+			QueryUtils.Check(query, searcher);
+			
+			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
+			//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
+			//assertEquals("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
+		}
+		
+		/// <summary> Working on a 2 fields like this:
+		/// Field("field", "one two three four five")
+		/// Field("palindrome", "one two three two one")
+		/// Phrase of size 3 occuriong twice, once in order and once in reverse, 
+		/// because doc is a palyndrome, is counted twice. 
+		/// Also, in this case order in query does not matter. 
+		/// Also, when an exact match is found, both sloppy scorer and exact scorer scores the same.   
+		/// </summary>
+		[Test]
+		public virtual void  TestPalyndrome3()
+		{
+			
+			// search on non palyndrome, find phrase with no slop, using exact phrase scorer
+			query.SetSlop(0); // to use exact phrase scorer
+			query.Add(new Term("field", "one"));
+			query.Add(new Term("field", "two"));
+			query.Add(new Term("field", "three"));
+			Hits hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "phrase found with exact phrase scorer");
+			float score0 = hits.Score(0);
+			//System.out.println("(exact) field: one two three: "+score0);
+			QueryUtils.Check(query, searcher);
+			
+			// search on non palyndrome, find phrase with slop 3, though no slop required here.
+			query.SetSlop(4); // to use sloppy scorer 
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+			float score1 = hits.Score(0);
+			//System.out.println("(sloppy) field: one two three: "+score1);
+			Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "exact scorer and sloppy scorer score the same when slop does not matter");
+			QueryUtils.Check(query, searcher);
+			
+			// search ordered in palyndrome, find it twice
+			query = new PhraseQuery();
+			query.SetSlop(4); // must be at least four for both ordered and reversed to match
+			query.Add(new Term("palindrome", "one"));
+			query.Add(new Term("palindrome", "two"));
+			query.Add(new Term("palindrome", "three"));
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+			float score2 = hits.Score(0);
+			//System.out.println("palindrome: one two three: "+score2);
+			QueryUtils.Check(query, searcher);
+			
+			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
+			//assertTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
+			
+			// search reveresed in palyndrome, find it twice
+			query = new PhraseQuery();
+			query.SetSlop(4); // must be at least four for both ordered and reversed to match
+			query.Add(new Term("palindrome", "three"));
+			query.Add(new Term("palindrome", "two"));
+			query.Add(new Term("palindrome", "one"));
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+			float score3 = hits.Score(0);
+			//System.out.println("palindrome: three two one: "+score3);
+			QueryUtils.Check(query, searcher);
+			
+			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
+			//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
+			//assertEquals("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPositionIncrement.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs Tue Jul 15 14:44:04 2008
@@ -19,28 +19,31 @@
 
 using NUnit.Framework;
 
-using Term = Lucene.Net.Index.Term;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
+using StopAnalyzer = Lucene.Net.Analysis.StopAnalyzer;
+using StopFilter = Lucene.Net.Analysis.StopFilter;
 using Token = Lucene.Net.Analysis.Token;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> Term position unit test.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Doug Cutting
-	/// </author>
-	/// <version>  $Revision: 150585 $
+	/// <version>  $Revision: 607591 $
 	/// </version>
 	[TestFixture]
-    public class TestPositionIncrement
+	public class TestPositionIncrement : LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer : Analyzer
 		{
@@ -49,7 +52,7 @@
 				InitBlock(enclosingInstance);
 			}
 
-            private class AnonymousClassTokenStream : TokenStream
+			private class AnonymousClassTokenStream : TokenStream
 			{
 				public AnonymousClassTokenStream(AnonymousClassAnalyzer enclosingInstance)
 				{
@@ -69,7 +72,7 @@
 					
 				}
 
-                private System.String[] TOKENS = new System.String[]{"1", "2", "3", "4", "5"};
+				private System.String[] TOKENS = new System.String[]{"1", "2", "3", "4", "5"};
 				private int[] INCREMENTS = new int[]{1, 2, 1, 0, 1};
 				private int i = 0;
 				
@@ -102,13 +105,40 @@
 			}
 		}
 		
+		private class AnonymousClassAnalyzer1 : Analyzer
+		{
+			public AnonymousClassAnalyzer1(TestPositionIncrement enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestPositionIncrement enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestPositionIncrement enclosingInstance;
+			public TestPositionIncrement Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal WhitespaceAnalyzer a = new WhitespaceAnalyzer();
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				TokenStream ts = a.TokenStream(fieldName, reader);
+				return new StopFilter(ts, new System.String[]{"stop"});
+			}
+		}
+		
 		[Test]
-        public virtual void  TestSetPosition()
+		public virtual void  TestSetPosition()
 		{
 			Analyzer analyzer = new AnonymousClassAnalyzer(this);
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, analyzer, true);
-			Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
+			Document d = new Document();
 			d.Add(new Field("field", "bogus", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(d);
 			writer.Optimize();
@@ -124,6 +154,20 @@
 			hits = searcher.Search(q);
 			Assert.AreEqual(0, hits.Length());
 			
+			// same as previous, just specify positions explicitely.
+			q = new PhraseQuery();
+			q.Add(new Term("field", "1"), 0);
+			q.Add(new Term("field", "2"), 1);
+			hits = searcher.Search(q);
+			Assert.AreEqual(0, hits.Length());
+			
+			// specifying correct positions should find the phrase.
+			q = new PhraseQuery();
+			q.Add(new Term("field", "1"), 0);
+			q.Add(new Term("field", "2"), 2);
+			hits = searcher.Search(q);
+			Assert.AreEqual(1, hits.Length());
+			
 			q = new PhraseQuery();
 			q.Add(new Term("field", "2"));
 			q.Add(new Term("field", "3"));
@@ -136,6 +180,28 @@
 			hits = searcher.Search(q);
 			Assert.AreEqual(0, hits.Length());
 			
+			// phrase query would find it when correct positions are specified. 
+			q = new PhraseQuery();
+			q.Add(new Term("field", "3"), 0);
+			q.Add(new Term("field", "4"), 0);
+			hits = searcher.Search(q);
+			Assert.AreEqual(1, hits.Length());
+			
+			// phrase query should fail for non existing searched term 
+			// even if there exist another searched terms in the same searched position. 
+			q = new PhraseQuery();
+			q.Add(new Term("field", "3"), 0);
+			q.Add(new Term("field", "9"), 0);
+			hits = searcher.Search(q);
+			Assert.AreEqual(0, hits.Length());
+			
+			// multi-phrase query should succed for non existing searched term
+			// because there exist another searched terms in the same searched position. 
+			MultiPhraseQuery mq = new MultiPhraseQuery();
+			mq.Add(new Term[]{new Term("field", "3"), new Term("field", "9")}, 0);
+			hits = searcher.Search(mq);
+			Assert.AreEqual(1, hits.Length());
+			
 			q = new PhraseQuery();
 			q.Add(new Term("field", "2"));
 			q.Add(new Term("field", "4"));
@@ -159,13 +225,54 @@
 			q.Add(new Term("field", "5"));
 			hits = searcher.Search(q);
 			Assert.AreEqual(0, hits.Length());
+			
+			// analyzer to introduce stopwords and increment gaps 
+			Analyzer stpa = new AnonymousClassAnalyzer1(this);
+			
+			// should not find "1 2" because there is a gap of 1 in the index
+			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", stpa);
+			q = (PhraseQuery) qp.Parse("\"1 2\"");
+			hits = searcher.Search(q);
+			Assert.AreEqual(0, hits.Length());
+			
+			// omitted stop word cannot help because stop filter swallows the increments. 
+			q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
+			hits = searcher.Search(q);
+			Assert.AreEqual(0, hits.Length());
+			
+			// query parser alone won't help, because stop filter swallows the increments. 
+			qp.SetEnablePositionIncrements(true);
+			q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
+			hits = searcher.Search(q);
+			Assert.AreEqual(0, hits.Length());
+			
+			bool dflt = StopFilter.GetEnablePositionIncrementsDefault();
+			try
+			{
+				// stop filter alone won't help, because query parser swallows the increments. 
+				qp.SetEnablePositionIncrements(false);
+				StopFilter.SetEnablePositionIncrementsDefault(true);
+				q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
+				hits = searcher.Search(q);
+				Assert.AreEqual(0, hits.Length());
+				
+				// when both qp qnd stopFilter propagate increments, we should find the doc.
+				qp.SetEnablePositionIncrements(true);
+				q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
+				hits = searcher.Search(q);
+				Assert.AreEqual(1, hits.Length());
+			}
+			finally
+			{
+				StopFilter.SetEnablePositionIncrementsDefault(dflt);
+			}
 		}
 		
 		/// <summary> Basic analyzer behavior should be to keep sequential terms in one
 		/// increment from one another.
 		/// </summary>
 		[Test]
-        public virtual void  TestIncrementingPositions()
+		public virtual void  TestIncrementingPositions()
 		{
 			Analyzer analyzer = new WhitespaceAnalyzer();
 			TokenStream ts = analyzer.TokenStream("field", new System.IO.StringReader("one two three four five"));

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixFilter.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -36,11 +37,11 @@
 	/// </author>
 	/// <author>  yonik
 	/// </author>
-    [TestFixture]
-    public class TestPrefixFilter
+	[TestFixture]
+	public class TestPrefixFilter : LuceneTestCase
 	{
-        [Test]
-		public virtual void  _TestPrefixFilter()
+		[Test]
+		public virtual void  TestPrefixFilter_Renamed_Method()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -35,10 +36,10 @@
 	/// <author>  Erik Hatcher
 	/// </author>
 	[TestFixture]
-    public class TestPrefixQuery
+	public class TestPrefixQuery : LuceneTestCase
 	{
 		[Test]
-        public virtual void  TestPrefixQuery_Renamed_Method()
+		public virtual void  TestPrefixQuery_Renamed_Method()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestQueryTermVector.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs Tue Jul 15 14:44:04 2008
@@ -20,25 +20,15 @@
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	[TestFixture]
-	public class TestQueryTermVector
+	public class TestQueryTermVector : LuceneTestCase
 	{
 		
-		[SetUp]
-        public virtual void  SetUp()
-		{
-		}
-		
-		[TearDown]
-        public virtual void  TearDown()
-		{
-			
-		}
-		
-        [Test]
+		[Test]
 		public virtual void  TestConstructor()
 		{
 			System.String[] queryTerm = new System.String[]{"foo", "bar", "foo", "again", "foo", "bar", "go", "go", "go"};

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRangeFilter.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeFilter.cs Tue Jul 15 14:44:04 2008
@@ -33,12 +33,12 @@
 	/// nor does it adequately test 'negative' results.  It also does not test
 	/// that garbage in results in an Exception.
 	/// </summary>
-    [TestFixture]
-    public class TestRangeFilter : BaseTestRangeFilter
+	[TestFixture]
+	public class TestRangeFilter : BaseTestRangeFilter
 	{
 		
-        [Test]
-        public virtual void  TestRangeFilterId()
+		[Test]
+		public virtual void  TestRangeFilterId()
 		{
 			
 			IndexReader reader = IndexReader.Open(index);
@@ -120,8 +120,8 @@
 			Assert.AreEqual(1, result.Length(), "med,med,T,T");
 		}
 		
-        [Test]
-        public virtual void  TestRangeFilterRand()
+		[Test]
+		public virtual void  TestRangeFilterRand()
 		{
 			
 			IndexReader reader = IndexReader.Open(index);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRangeQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -32,20 +33,21 @@
 	/// <author>  goller
 	/// </author>
 	[TestFixture]
-    public class TestRangeQuery
+	public class TestRangeQuery : LuceneTestCase
 	{
 		
 		private int docCount = 0;
 		private RAMDirectory dir;
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
+			base.SetUp();
 			dir = new RAMDirectory();
 		}
 		
 		[Test]
-        public virtual void  TestExclusive()
+		public virtual void  TestExclusive()
 		{
 			Query query = new RangeQuery(new Term("content", "A"), new Term("content", "C"), false);
 			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
@@ -68,7 +70,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestInclusive()
+		public virtual void  TestInclusive()
 		{
 			Query query = new RangeQuery(new Term("content", "A"), new Term("content", "C"), true);
 			
@@ -92,7 +94,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestEqualsHashcode()
+		public virtual void  TestEqualsHashcode()
 		{
 			Query query = new RangeQuery(new Term("content", "A"), new Term("content", "C"), true);
 			query.SetBoost(1.0f);

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteCachingWrapperFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRemoteCachingWrapperFilter.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteCachingWrapperFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteCachingWrapperFilter.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,227 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> Tests that the index is cached on the searcher side of things.
+	/// NOTE: This is copied from TestRemoteSearchable since it already had a remote index set up.
+	/// </summary>
+	/// <author>  Matt Ericson
+	/// </author>
+	[TestFixture]
+	public class TestRemoteCachingWrapperFilter : LuceneTestCase
+	{
+		private static System.Runtime.Remoting.Channels.Http.HttpChannel httpChannel;
+		private static int port;
+		private static bool serverStarted;
+
+		[SetUp]
+		public override void SetUp()
+		{
+			base.SetUp();
+			Random rnd = new Random();
+			port = rnd.Next(1099, 9999);
+			httpChannel = new System.Runtime.Remoting.Channels.Http.HttpChannel(port);
+		}
+
+		[TearDown]
+		public override void TearDown()
+		{
+			httpChannel = null;
+			base.TearDown();
+		}
+
+		private static Lucene.Net.Search.Searchable GetRemote()
+		{
+			try
+			{
+				if (!serverStarted)
+					StartServer();
+				return LookupRemote();
+			}
+			catch (System.Exception)
+			{
+				StartServer();
+				return LookupRemote();
+			}
+		}
+
+		private static Lucene.Net.Search.Searchable LookupRemote()
+		{
+			return (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), string.Format("http://localhost:{0}/Searchable", port));
+		}
+
+		private static void StartServer()
+		{
+			// construct an index
+			RAMDirectory indexStore = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+
+			Document doc = new Document();
+			doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("type", "A", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
+
+			//Need a second document to search for
+			doc = new Document();
+			doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("type", "B", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
+
+			writer.Optimize();
+			writer.Close();
+
+			try
+			{
+				System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(httpChannel, false);
+			}
+			catch (System.Net.Sockets.SocketException ex)
+			{
+				if (ex.ErrorCode == 10048)
+					return;     // EADDRINUSE?
+				throw ex;
+			}
+
+			// publish it
+			Lucene.Net.Search.Searchable local = new IndexSearcher(indexStore);
+			RemoteSearchable impl = new RemoteSearchable(local);
+			System.Runtime.Remoting.RemotingServices.Marshal(impl, "Searchable");
+			serverStarted = true;
+		}
+
+		
+		//private static Lucene.Net.Search.Searchable GetRemote()
+		//{
+		//    try
+		//    {
+		//        return LookupRemote();
+		//    }
+		//    catch (System.Exception)
+		//    {
+		//        StartServer();
+		//        return LookupRemote();
+		//    }
+		//}
+		
+		//private static Lucene.Net.Search.Searchable LookupRemote()
+		//{
+		//    return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), "http://" + "//localhost/Searchable");
+		//}
+		
+		//private static void  StartServer()
+		//{
+		//    // construct an index
+		//    RAMDirectory indexStore = new RAMDirectory();
+		//    IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+		//    Document doc = new Document();
+		//    doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
+		//    doc.Add(new Field("type", "A", Field.Store.YES, Field.Index.TOKENIZED));
+		//    doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));
+		//    writer.AddDocument(doc);
+		//    //Need a second document to search for
+		//    doc = new Document();
+		//    doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
+		//    doc.Add(new Field("type", "B", Field.Store.YES, Field.Index.TOKENIZED));
+		//    doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));
+		//    writer.AddDocument(doc);
+		//    writer.Optimize();
+		//    writer.Close();
+			
+		//    try
+		//    {
+		//        System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099), false);
+		//    }
+		//    catch (System.Net.Sockets.SocketException ex)
+		//    {
+		//        if (ex.ErrorCode == 10048)
+		//            return;     // EADDRINUSE?
+		//        throw ex;
+		//    }
+
+		//    // publish it
+		//    Lucene.Net.Search.Searchable local = new IndexSearcher(indexStore);
+		//    RemoteSearchable impl = new RemoteSearchable(local);
+		//    System.Runtime.Remoting.RemotingServices.Marshal(impl, "Searchable");
+		//}
+		
+		private static void  Search(Query query, Filter filter, int hitNumber, System.String typeValue)
+		{
+			Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()};
+			Searcher searcher = new MultiSearcher(searchables);
+			Hits result = searcher.Search(query, filter);
+			Assert.AreEqual(1, result.Length());
+			Document document = result.Doc(hitNumber);
+			Assert.IsTrue(document != null, "document is null and it shouldn't be");
+			Assert.AreEqual(typeValue, document.Get("type"));
+			Assert.IsTrue(document.GetFields().Count == 3, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 3);
+		}
+		
+		
+		[Test]
+		public virtual void  TestTermRemoteFilter()
+		{
+			CachingWrapperFilterHelper cwfh = new CachingWrapperFilterHelper(new QueryFilter(new TermQuery(new Term("type", "a"))));
+			
+			// This is what we are fixing - if one uses a CachingWrapperFilter(Helper) it will never 
+			// cache the filter on the remote site
+			cwfh.SetShouldHaveCache(false);
+			Search(new TermQuery(new Term("test", "test")), cwfh, 0, "A");
+			cwfh.SetShouldHaveCache(false);
+			Search(new TermQuery(new Term("test", "test")), cwfh, 0, "A");
+			
+			// This is how we fix caching - we wrap a Filter in the RemoteCachingWrapperFilter(Handler - for testing)
+			// to cache the Filter on the searcher (remote) side
+			RemoteCachingWrapperFilterHelper rcwfh = new RemoteCachingWrapperFilterHelper(cwfh, false);
+			Search(new TermQuery(new Term("test", "test")), rcwfh, 0, "A");
+			
+			// 2nd time we do the search, we should be using the cached Filter
+			rcwfh.ShouldHaveCache(true);
+			Search(new TermQuery(new Term("test", "test")), rcwfh, 0, "A");
+			
+			// assert that we get the same cached Filter, even if we create a new instance of RemoteCachingWrapperFilter(Helper)
+			// this should pass because the Filter parameters are the same, and the cache uses Filter's hashCode() as cache keys,
+			// and Filters' hashCode() builds on Filter parameters, not the Filter instance itself
+			rcwfh = new RemoteCachingWrapperFilterHelper(new QueryFilter(new TermQuery(new Term("type", "a"))), false);
+			rcwfh.ShouldHaveCache(false);
+			Search(new TermQuery(new Term("test", "test")), rcwfh, 0, "A");
+			
+			rcwfh = new RemoteCachingWrapperFilterHelper(new QueryFilter(new TermQuery(new Term("type", "a"))), false);
+			rcwfh.ShouldHaveCache(true);
+			Search(new TermQuery(new Term("test", "test")), rcwfh, 0, "A");
+			
+			// assert that we get a non-cached version of the Filter because this is a new Query (type:b)
+			rcwfh = new RemoteCachingWrapperFilterHelper(new QueryFilter(new TermQuery(new Term("type", "b"))), false);
+			rcwfh.ShouldHaveCache(false);
+			Search(new TermQuery(new Term("type", "b")), rcwfh, 0, "B");
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteSearchable.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRemoteSearchable.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteSearchable.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteSearchable.cs Tue Jul 15 14:44:04 2008
@@ -19,61 +19,141 @@
 
 using NUnit.Framework;
 
-using Term = Lucene.Net.Index.Term;
+using Lucene.Net.Documents;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
-	/// <version>  $Id: TestRemoteSearchable.java 150500 2004-09-08 18:10:09Z dnaber $
+	/// <version>  $Id: TestRemoteSearchable.java 583534 2007-10-10 16:46:35Z mikemccand $
 	/// </version>
 	[TestFixture]
-    public class TestRemoteSearchable
+	public class TestRemoteSearchable : LuceneTestCase
 	{
-		
-		private Lucene.Net.Search.Searchable GetRemote()
+		private static System.Runtime.Remoting.Channels.Http.HttpChannel httpChannel;
+		private static int port;
+		private static bool serverStarted;
+
+		[SetUp]
+		public override void SetUp()
 		{
-            return LookupRemote();
+			base.SetUp();
+			Random rnd = new Random();
+			port = rnd.Next(1099, 9999);
+			httpChannel = new System.Runtime.Remoting.Channels.Http.HttpChannel(port);
 		}
-		
-		private  Lucene.Net.Search.Searchable LookupRemote()
+
+		[TearDown]
+		public override void TearDown()
 		{
-            return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"http://localhost:1099/Searchable");
+			httpChannel = null;
+			base.TearDown();
 		}
-		
-		[SetUp]
-        public void StartServer()
+
+		private static Lucene.Net.Search.Searchable GetRemote()
+		{
+			try
+			{
+				if (!serverStarted)
+					StartServer();
+				return LookupRemote();
+			}
+			catch (System.Exception)
+			{
+				StartServer();
+				return LookupRemote();
+			}
+		}
+
+		private static Lucene.Net.Search.Searchable LookupRemote()
+		{
+			return (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), string.Format("http://localhost:{0}/Searchable", port));
+		}
+
+		public static void StartServer()
 		{
-            try
-            {
-                System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099));
-            }
-            catch (System.Net.Sockets.SocketException ex)
-            {
-                if (ex.ErrorCode == 10048)
-                    return;     // EADDRINUSE?
-                throw ex;
-            }
+			try
+			{
+				System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(httpChannel, false);
+			}
+			catch (System.Net.Sockets.SocketException ex)
+			{
+				if (ex.ErrorCode == 10048)
+					return;     // EADDRINUSE?
+				throw ex;
+			}
 
 			// construct an index
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
+
 			writer.Optimize();
 			writer.Close();
-			
+
 			// publish it
 			Lucene.Net.Search.Searchable local = new IndexSearcher(indexStore);
 			RemoteSearchable impl = new RemoteSearchable(local);
 			System.Runtime.Remoting.RemotingServices.Marshal(impl, "Searchable");
+			serverStarted = true;
 		}
 		
+		//private Lucene.Net.Search.Searchable GetRemote()
+		//{
+		//    try
+		//    {
+		//        return LookupRemote();
+		//    }
+		//    catch (System.Exception)
+		//    {
+		//        StartServer();
+		//        return LookupRemote();
+		//    }
+		//}
+		
+		//private  Lucene.Net.Search.Searchable LookupRemote()
+		//{
+		//    return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"http://localhost:1099/Searchable");
+		//}
+		
+		//[SetUp]
+		//public void StartServer()
+		//{
+		//    try
+		//    {
+		//        System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099), false);
+		//    }
+		//    catch (System.Net.Sockets.SocketException ex)
+		//    {
+		//        if (ex.ErrorCode == 10048)
+		//            return;     // EADDRINUSE?
+		//        throw ex;
+		//    }
+
+		//    // construct an index
+		//    RAMDirectory indexStore = new RAMDirectory();
+		//    IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+		//    Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+		//    doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
+		//    doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));
+		//    writer.AddDocument(doc);
+		//    writer.Optimize();
+		//    writer.Close();
+			
+		//    // publish it
+		//    Lucene.Net.Search.Searchable local = new IndexSearcher(indexStore);
+		//    RemoteSearchable impl = new RemoteSearchable(local);
+		//    System.Runtime.Remoting.RemotingServices.Marshal(impl, "Searchable");
+		//}
+		
 		private void  Search(Query query)
 		{
 			// try to search the published index
@@ -82,17 +162,30 @@
 			Hits result = searcher.Search(query);
 			
 			Assert.AreEqual(1, result.Length());
-			Assert.AreEqual("test text", result.Doc(0).Get("test"));
+			Document document = result.Doc(0);
+			Assert.IsTrue(document != null, "document is null and it shouldn't be");
+			Assert.AreEqual(document.Get("test"), "test text");
+			Assert.IsTrue(document.GetFields().Count == 2, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 2);
+			System.Collections.Hashtable ftl = new System.Collections.Hashtable();
+			ftl.Add("other", "other");
+			FieldSelector fs = new SetBasedFieldSelector(ftl, new System.Collections.Hashtable());
+			document = searcher.Doc(0, fs);
+			Assert.IsTrue(document != null, "document is null and it shouldn't be");
+			Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1);
+			fs = new MapFieldSelector(new System.String[]{"other"});
+			document = searcher.Doc(0, fs);
+			Assert.IsTrue(document != null, "document is null and it shouldn't be");
+			Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1);
 		}
 		
 		[Test]
-        public virtual void  TestTermQuery()
+		public virtual void  TestTermQuery()
 		{
 			Search(new TermQuery(new Term("test", "test")));
 		}
 		
 		[Test]
-        public virtual void  TestBooleanQuery()
+		public virtual void  TestBooleanQuery()
 		{
 			BooleanQuery query = new BooleanQuery();
 			query.Add(new TermQuery(new Term("test", "test")), BooleanClause.Occur.MUST);
@@ -100,7 +193,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestPhraseQuery()
+		public virtual void  TestPhraseQuery()
 		{
 			PhraseQuery query = new PhraseQuery();
 			query.Add(new Term("test", "test"));
@@ -110,7 +203,7 @@
 		
 		// Tests bug fix at http://nagoya.apache.org/bugzilla/show_bug.cgi?id=20290
 		[Test]
-        public virtual void  TestQueryFilter()
+		public virtual void  TestQueryFilter()
 		{
 			// try to search the published index
 			Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()};
@@ -121,14 +214,14 @@
 			Assert.AreEqual(0, nohits.Length());
 		}
 		
-        [Test]
-        public virtual void  TestConstantScoreQuery()
-        {
-            // try to search the published index
-            Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()};
-            Searcher searcher = new MultiSearcher(searchables);
-            Hits hits = searcher.Search(new ConstantScoreQuery(new QueryFilter(new TermQuery(new Term("test", "test")))));
-            Assert.AreEqual(1, hits.Length());
-        }
-    }
+		[Test]
+		public virtual void  TestConstantScoreQuery()
+		{
+			// try to search the published index
+			Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()};
+			Searcher searcher = new MultiSearcher(searchables);
+			Hits hits = searcher.Search(new ConstantScoreQuery(new QueryFilter(new TermQuery(new Term("test", "test")))));
+			Assert.AreEqual(1, hits.Length());
+		}
+	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestScorerPerf.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs Tue Jul 15 14:44:04 2008
@@ -19,29 +19,30 @@
 
 using NUnit.Framework;
 
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
-	/// <author>  yonik
-	/// </author>
+	/// <summary> </summary>
 	/// <version>  $Id$
 	/// </version>
-    [TestFixture]
-    public class TestScorerPerf
+	[TestFixture]
+	public class TestScorerPerf : LuceneTestCase
 	{
 		internal System.Random r = new System.Random((System.Int32) 0);
 		internal bool validate = true; // set to false when doing performance testing
 		
 		internal System.Collections.BitArray[] sets;
+		internal Term[] terms;
 		internal IndexSearcher s;
 		
 		public virtual void  CreateDummySearcher()
@@ -61,10 +62,10 @@
 			{
 				int f = (nTerms + 1) - i; // make first terms less frequent
 				freq[i] = (int) System.Math.Ceiling(System.Math.Pow(f, power));
+				terms[i] = new Term("f", System.Convert.ToString((char) ('A' + i)));
 			}
 			
 			IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			iw.SetMaxBufferedDocs(123);
 			for (int i = 0; i < nDocs; i++)
 			{
 				Document d = new Document();
@@ -72,11 +73,13 @@
 				{
 					if (r.Next(freq[j]) == 0)
 					{
-						d.Add(new Field("f", j.ToString(), Field.Store.NO, Field.Index.UN_TOKENIZED));
+						d.Add(new Field("f", terms[j].Text(), Field.Store.NO, Field.Index.UN_TOKENIZED));
+						//System.out.println(d);
 					}
 				}
 				iw.AddDocument(d);
 			}
+			iw.Optimize();
 			iw.Close();
 		}
 		
@@ -86,7 +89,7 @@
 			System.Collections.BitArray set_Renamed = new System.Collections.BitArray((sz % 64 == 0 ? sz / 64 : sz / 64 + 1) * 64);
 			for (int i = 0; i < numBitsToSet; i++)
 			{
-                set_Renamed.Set(r.Next(sz), true);
+				set_Renamed.Set(r.Next(sz), true);
 			}
 			return set_Renamed;
 		}
@@ -117,22 +120,6 @@
 		
 		public class CountingHitCollector : HitCollector
 		{
-			virtual public int Count
-			{
-				get
-				{
-					return count;
-				}
-				
-			}
-			virtual public int Sum
-			{
-				get
-				{
-					return sum;
-				}
-				
-			}
 			internal int count = 0;
 			internal int sum = 0;
 			
@@ -141,6 +128,15 @@
 				count++;
 				sum += doc; // use it to avoid any possibility of being optimized away
 			}
+			
+			public virtual int GetCount()
+			{
+				return count;
+			}
+			public virtual int GetSum()
+			{
+				return sum;
+			}
 		}
 		
 		
@@ -199,9 +195,9 @@
 				
 				CountingHitCollector hc = validate?new MatchingHitCollector(result):new CountingHitCollector();
 				s.Search(bq, hc);
-				ret += hc.Sum;
+				ret += hc.GetSum();
 				if (validate)
-					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.Count);
+					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.GetCount());
 				// System.out.println(hc.getCount());
 			}
 			
@@ -211,6 +207,7 @@
 		public virtual int DoNestedConjunctions(int iter, int maxOuterClauses, int maxClauses)
 		{
 			int ret = 0;
+			long nMatches = 0;
 			
 			for (int i = 0; i < iter; i++)
 			{
@@ -231,15 +228,15 @@
 					oq.Add(bq, BooleanClause.Occur.MUST);
 				} // outer
 				
-				
-				CountingHitCollector hc = validate ? new MatchingHitCollector(result) : new CountingHitCollector();
+				CountingHitCollector hc = validate?new MatchingHitCollector(result):new CountingHitCollector();
 				s.Search(oq, hc);
-				ret += hc.Sum;
+				nMatches += hc.GetCount();
+				ret += hc.GetSum();
 				if (validate)
-					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.Count);
+					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.GetCount());
 				// System.out.println(hc.getCount());
 			}
-			
+			System.Console.Out.WriteLine("Average number of matches=" + (nMatches / iter));
 			return ret;
 		}
 		
@@ -248,28 +245,54 @@
 		{
 			int ret = 0;
 			
+			long nMatches = 0;
 			for (int i = 0; i < iter; i++)
 			{
 				int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
 				BooleanQuery bq = new BooleanQuery();
-				System.Collections.BitArray terms = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
+				System.Collections.BitArray termflag = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
 				for (int j = 0; j < nClauses; j++)
 				{
 					int tnum;
 					// don't pick same clause twice
-					do 
+					tnum = r.Next(termsInIndex);
+					if (termflag.Get(tnum))
 					{
-						tnum = r.Next(termsInIndex);
+						int nextClearBit = -1;
+						for (int k = tnum + 1; k < termflag.Count; k++)
+						{
+							if (!termflag.Get(k))
+							{
+								nextClearBit = k;
+								break;
+							}
+						}
+						tnum = nextClearBit;
 					}
-					while (terms.Get(tnum));
-					Query tq = new TermQuery(new Term("f", tnum.ToString()));
+					if (tnum < 0 || tnum >= termsInIndex)
+					{
+						int nextClearBit = -1;
+						for (int k = 0; k < termflag.Count; k++)
+						{
+							if (!termflag.Get(k))
+							{
+								nextClearBit = k;
+								break;
+							}
+						}
+						tnum = nextClearBit;
+					}
+					termflag.Set(tnum, true);
+					Query tq = new TermQuery(terms[tnum]);
 					bq.Add(tq, BooleanClause.Occur.MUST);
 				}
 				
 				CountingHitCollector hc = new CountingHitCollector();
 				s.Search(bq, hc);
-				ret += hc.Sum;
+				nMatches += hc.GetCount();
+				ret += hc.GetSum();
 			}
+			System.Console.Out.WriteLine("Average number of matches=" + (nMatches / iter));
 			
 			return ret;
 		}
@@ -278,7 +301,7 @@
 		public virtual int DoNestedTermConjunctions(IndexSearcher s, int termsInIndex, int maxOuterClauses, int maxClauses, int iter)
 		{
 			int ret = 0;
-			
+			long nMatches = 0;
 			for (int i = 0; i < iter; i++)
 			{
 				int oClauses = r.Next(maxOuterClauses - 1) + 2;
@@ -288,17 +311,40 @@
 					
 					int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
 					BooleanQuery bq = new BooleanQuery();
-					System.Collections.BitArray terms = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
+					System.Collections.BitArray termflag = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
 					for (int j = 0; j < nClauses; j++)
 					{
 						int tnum;
 						// don't pick same clause twice
-						do 
+						tnum = r.Next(termsInIndex);
+						if (termflag.Get(tnum))
+						{
+							int nextClearBit = -1;
+							for (int k = tnum + 1; k < termflag.Count; k++)
+							{
+								if (!termflag.Get(k))
+								{
+									nextClearBit = k;
+									break;
+								}
+							}
+							tnum = nextClearBit;
+						}
+						if (tnum < 0 || tnum >= 25)
 						{
-							tnum = r.Next(termsInIndex);
+							int nextClearBit = -1;
+							for (int k = 0; k < termflag.Count; k++)
+							{
+								if (!termflag.Get(k))
+								{
+									nextClearBit = k;
+									break;
+								}
+							}
+							tnum = nextClearBit;
 						}
-						while (terms.Get(tnum));
-						Query tq = new TermQuery(new Term("f", tnum.ToString()));
+						termflag.Set(tnum, true);
+						Query tq = new TermQuery(terms[tnum]);
 						bq.Add(tq, BooleanClause.Occur.MUST);
 					} // inner
 					
@@ -308,9 +354,10 @@
 				
 				CountingHitCollector hc = new CountingHitCollector();
 				s.Search(oq, hc);
-				ret += hc.Sum;
+				nMatches += hc.GetCount();
+				ret += hc.GetSum();
 			}
-			
+			System.Console.Out.WriteLine("Average number of matches=" + (nMatches / iter));
 			return ret;
 		}
 		
@@ -326,13 +373,13 @@
 				for (int j = 0; j < nClauses; j++)
 				{
 					int tnum = r.Next(termsInIndex);
-					q.Add(new Term("f", tnum.ToString()), j);
+					q.Add(new Term("f", System.Convert.ToString((char)(tnum + 'A'))), j);
 				}
 				q.SetSlop(termsInIndex); // this could be random too
 				
 				CountingHitCollector hc = new CountingHitCollector();
 				s.Search(q, hc);
-				ret += hc.Sum;
+				ret += hc.GetSum();
 			}
 			
 			return ret;
@@ -351,7 +398,7 @@
 		}
 		
 		/// <summary> 
-		/// int bigIter=6;
+		/// int bigIter=10;
 		/// public void testConjunctionPerf() throws Exception {
 		/// CreateDummySearcher();
 		/// validate=false;
@@ -380,7 +427,7 @@
 		/// validate=false;
 		/// RAMDirectory dir = new RAMDirectory();
 		/// System.out.println("Creating index");
-		/// CreateRandomTerms(100000,25,2, dir);
+		/// createRandomTerms(100000,25,.5, dir);
 		/// s = new IndexSearcher(dir);
 		/// System.out.println("Starting performance test");
 		/// for (int i=0; i<bigIter; i++) {
@@ -395,12 +442,12 @@
 		/// validate=false;    
 		/// RAMDirectory dir = new RAMDirectory();
 		/// System.out.println("Creating index");
-		/// CreateRandomTerms(100000,25,2, dir);
+		/// createRandomTerms(100000,25,.2, dir);
 		/// s = new IndexSearcher(dir);
 		/// System.out.println("Starting performance test");
 		/// for (int i=0; i<bigIter; i++) {
 		/// long start = System.currentTimeMillis();
-		/// DoNestedTermConjunctions(s,25,5,5,1000);
+		/// doNestedTermConjunctions(s,25,3,3,200);
 		/// long end = System.currentTimeMillis();
 		/// System.out.println("milliseconds="+(end-start));
 		/// }

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestSearchHitsWithDeletions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSearchHitsWithDeletions.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSearchHitsWithDeletions.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSearchHitsWithDeletions.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,199 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> Test Hits searches with interleaved deletions.
+	/// 
+	/// See {@link http://issues.apache.org/jira/browse/LUCENE-1096}.
+	/// </summary>
+	[TestFixture]
+	public class TestSearchHitsWithDeletions
+	{
+		
+		private static bool VERBOSE = false;
+		private const System.String TEXT_FIELD = "text";
+		private const int N = 16100;
+		
+		private static Directory directory;
+		
+		[SetUp]
+		public virtual void  SetUp()
+		{
+			// Create an index writer.
+			directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			for (int i = 0; i < N; i++)
+			{
+				writer.AddDocument(CreateDocument(i));
+			}
+			writer.Optimize();
+			writer.Close();
+		}
+		
+		/// <summary> Deletions during search should not alter previously retrieved hits.</summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteAll()
+		{
+			DoTestSearchHitsDeleteEvery(1, false);
+		}
+		
+		/// <summary> Deletions during search should not alter previously retrieved hits.</summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteEvery2ndHit()
+		{
+			DoTestSearchHitsDeleteEvery(2, false);
+		}
+		
+		/// <summary> Deletions during search should not alter previously retrieved hits.</summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteEvery4thHit()
+		{
+			DoTestSearchHitsDeleteEvery(4, false);
+		}
+		
+		/// <summary> Deletions during search should not alter previously retrieved hits.</summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteEvery8thHit()
+		{
+			DoTestSearchHitsDeleteEvery(8, false);
+		}
+		
+		/// <summary> Deletions during search should not alter previously retrieved hits.</summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteEvery90thHit()
+		{
+			DoTestSearchHitsDeleteEvery(90, false);
+		}
+		
+		/// <summary> Deletions during search should not alter previously retrieved hits,
+		/// and deletions that affect total number of hits should throw the 
+		/// correct exception when trying to fetch "too many".
+		/// </summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteEvery8thHitAndInAdvance()
+		{
+			DoTestSearchHitsDeleteEvery(8, true);
+		}
+		
+		/// <summary> Verify that ok also with no deletions at all.</summary>
+		[Test]
+		public virtual void  TestSearchHitsNoDeletes()
+		{
+			DoTestSearchHitsDeleteEvery(N + 100, false);
+		}
+		
+		/// <summary> Deletions that affect total number of hits should throw the 
+		/// correct exception when trying to fetch "too many".
+		/// </summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteInAdvance()
+		{
+			DoTestSearchHitsDeleteEvery(N + 100, true);
+		}
+		
+		/// <summary> Intermittent deletions during search, should not alter previously retrieved hits.
+		/// (Using a debugger to verify that the check in Hits is performed only  
+		/// </summary>
+		[Test]
+		public virtual void  TestSearchHitsDeleteIntermittent()
+		{
+			DoTestSearchHitsDeleteEvery(- 1, false);
+		}
+		
+		
+		private void  DoTestSearchHitsDeleteEvery(int k, bool deleteInFront)
+		{
+			bool intermittent = k < 0;
+			Log("Test search hits with " + (intermittent?"intermittent deletions.":"deletions of every " + k + " hit."));
+			IndexSearcher searcher = new IndexSearcher(directory);
+			IndexReader reader = searcher.GetIndexReader();
+			Query q = new TermQuery(new Term(TEXT_FIELD, "text")); // matching all docs
+			Hits hits = searcher.Search(q);
+			Log("Got " + hits.Length() + " results");
+			Assert.AreEqual(N, hits.Length(), "must match all " + N + " docs, not only " + hits.Length() + " docs!");
+			if (deleteInFront)
+			{
+				Log("deleting hits that was not yet retrieved!");
+				reader.DeleteDocument(reader.MaxDoc() - 1);
+				reader.DeleteDocument(reader.MaxDoc() - 2);
+				reader.DeleteDocument(reader.MaxDoc() - 3);
+			}
+			try
+			{
+				for (int i = 0; i < hits.Length(); i++)
+				{
+					int id = hits.Id(i);
+					Assert.AreEqual(i, hits.Id(i), "Hit " + i + " has doc id " + hits.Id(i) + " instead of " + i);
+					if ((intermittent && (i == 50 || i == 250 || i == 950)) || (!intermittent && (k < 2 || (i > 0 && i % k == 0))))
+					{
+						Document doc = hits.Doc(id);
+						Log("Deleting hit " + i + " - doc " + doc + " with id " + id);
+						reader.DeleteDocument(id);
+					}
+					if (intermittent)
+					{
+						// check internal behavior of Hits (go 50 ahead of getMoreDocs points because the deletions cause to use more of the available hits)
+						if (i == 150 || i == 450 || i == 1650)
+						{
+							Assert.IsTrue(hits.debugCheckedForDeletions, "Hit " + i + ": hits should have checked for deletions in last call to getMoreDocs()");
+						}
+						else if (i == 50 || i == 250 || i == 850)
+						{
+							Assert.IsFalse(hits.debugCheckedForDeletions, "Hit " + i + ": hits should have NOT checked for deletions in last call to getMoreDocs()");
+						}
+					}
+				}
+			}
+			catch (System.Exception e)
+			{
+				// this is the only valid exception, and only when deletng in front.
+				Assert.IsTrue(deleteInFront, e.Message + " not expected unless deleting hits that were not yet seen!");
+			}
+			searcher.Close();
+		}
+		
+		private static Document CreateDocument(int id)
+		{
+			Document doc = new Document();
+			doc.Add(new Field(TEXT_FIELD, "text of document" + id, Field.Store.YES, Field.Index.TOKENIZED));
+			return doc;
+		}
+		
+		private static void  Log(System.String s)
+		{
+			if (VERBOSE)
+			{
+				System.Console.Out.WriteLine(s);
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSetNorm.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs Tue Jul 15 14:44:04 2008
@@ -19,25 +19,25 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary>Document boost unit test.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Doug Cutting
-	/// </author>
-	/// <version>  $Revision: 150497 $
+	/// <version>  $Revision: 583534 $
 	/// </version>
 	[TestFixture]
-    public class TestSetNorm
+	public class TestSetNorm : LuceneTestCase
 	{
 		private class AnonymousClassHitCollector : HitCollector
 		{
@@ -51,7 +51,7 @@
 				this.enclosingInstance = enclosingInstance;
 			}
 
-            private float[] scores;
+			private float[] scores;
 			private TestSetNorm enclosingInstance;
 			public TestSetNorm Enclosing_Instance
 			{
@@ -68,7 +68,7 @@
 		}
 
 		[Test]
-        public virtual void  TestSetNorm_Renamed_Method()
+		public virtual void  TestSetNorm_Renamed_Method()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimilarity.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs Tue Jul 15 14:44:04 2008
@@ -19,25 +19,25 @@
 
 using NUnit.Framework;
 
-using Term = Lucene.Net.Index.Term;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary>Similarity unit test.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Doug Cutting
-	/// </author>
-	/// <version>  $Revision: 150497 $
+	/// <version>  $Revision: 583534 $
 	/// </version>
 	[TestFixture]
-    public class TestSimilarity
+	public class TestSimilarity : LuceneTestCase
 	{
 		private class AnonymousClassHitCollector : HitCollector
 		{
@@ -64,7 +64,7 @@
 			}
 		}
 
-        private class AnonymousClassHitCollector1 : HitCollector
+		private class AnonymousClassHitCollector1 : HitCollector
 		{
 			public AnonymousClassHitCollector1(TestSimilarity enclosingInstance)
 			{
@@ -90,7 +90,7 @@
 			}
 		}
 
-        private class AnonymousClassHitCollector2 : HitCollector
+		private class AnonymousClassHitCollector2 : HitCollector
 		{
 			public AnonymousClassHitCollector2(TestSimilarity enclosingInstance)
 			{
@@ -116,7 +116,7 @@
 			}
 		}
 
-        private class AnonymousClassHitCollector3 : HitCollector
+		private class AnonymousClassHitCollector3 : HitCollector
 		{
 			public AnonymousClassHitCollector3(TestSimilarity enclosingInstance)
 			{
@@ -177,7 +177,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestSimilarity_Renamed_Method()
+		public virtual void  TestSimilarity_Renamed_Method()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimpleExplanations.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs Tue Jul 15 14:44:04 2008
@@ -19,22 +19,12 @@
 
 using NUnit.Framework;
 
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using IndexReader = Lucene.Net.Index.IndexReader;
-using Term = Lucene.Net.Index.Term;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
-using QueryParser = Lucene.Net.QueryParsers.QueryParser;
-using ParseException = Lucene.Net.QueryParsers.ParseException;
-
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> TestExplanations subclass focusing on basic query types</summary>
-    [TestFixture]
-    public class TestSimpleExplanations : TestExplanations
+	[TestFixture]
+	public class TestSimpleExplanations : TestExplanations
 	{
 		
 		// we focus on queries that don't rewrite to other queries.
@@ -50,22 +40,22 @@
 			Qtest("w1", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestT2()
+		[Test]
+		public virtual void  TestT2()
 		{
 			Qtest("w1^1000", new int[]{0, 1, 2, 3});
 		}
 		
 		/* MatchAllDocs */
 		
-        [Test]
-        public virtual void  TestMA1()
+		[Test]
+		public virtual void  TestMA1()
 		{
 			Qtest(new MatchAllDocsQuery(), new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestMA2()
+		[Test]
+		public virtual void  TestMA2()
 		{
 			Query q = new MatchAllDocsQuery();
 			q.SetBoost(1000);
@@ -74,84 +64,84 @@
 		
 		/* some simple phrase tests */
 		
-        [Test]
-        public virtual void  TestP1()
+		[Test]
+		public virtual void  TestP1()
 		{
 			Qtest("\"w1 w2\"", new int[]{0});
 		}
 
-        [Test]
-        public virtual void  TestP2()
+		[Test]
+		public virtual void  TestP2()
 		{
 			Qtest("\"w1 w3\"", new int[]{1, 3});
 		}
 
-        [Test]
-        public virtual void  TestP3()
+		[Test]
+		public virtual void  TestP3()
 		{
 			Qtest("\"w1 w2\"~1", new int[]{0, 1, 2});
 		}
 
-        [Test]
-        public virtual void  TestP4()
+		[Test]
+		public virtual void  TestP4()
 		{
 			Qtest("\"w2 w3\"~1", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestP5()
+		[Test]
+		public virtual void  TestP5()
 		{
 			Qtest("\"w3 w2\"~1", new int[]{1, 3});
 		}
 
-        [Test]
-        public virtual void  TestP6()
+		[Test]
+		public virtual void  TestP6()
 		{
 			Qtest("\"w3 w2\"~2", new int[]{0, 1, 3});
 		}
 
-        [Test]
-        public virtual void  TestP7()
+		[Test]
+		public virtual void  TestP7()
 		{
 			Qtest("\"w3 w2\"~3", new int[]{0, 1, 2, 3});
 		}
 		
 		/* some simple filtered query tests */
 		
-        [Test]
-        public virtual void  TestFQ1()
+		[Test]
+		public virtual void  TestFQ1()
 		{
 			Qtest(new FilteredQuery(qp.Parse("w1"), new ItemizedFilter(new int[]{0, 1, 2, 3})), new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestFQ2()
+		[Test]
+		public virtual void  TestFQ2()
 		{
 			Qtest(new FilteredQuery(qp.Parse("w1"), new ItemizedFilter(new int[]{0, 2, 3})), new int[]{0, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestFQ3()
+		[Test]
+		public virtual void  TestFQ3()
 		{
 			Qtest(new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3})), new int[]{3});
 		}
 
-        [Test]
-        public virtual void  TestFQ4()
+		[Test]
+		public virtual void  TestFQ4()
 		{
 			Qtest(new FilteredQuery(qp.Parse("xx^1000"), new ItemizedFilter(new int[]{1, 3})), new int[]{3});
 		}
 
-        [Test]
-        public virtual void  TestFQ6()
+		[Test]
+		public virtual void  TestFQ6()
 		{
 			Query q = new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3}));
 			q.SetBoost(1000);
 			Qtest(q, new int[]{3});
 		}
 
-        [Test]
-        public virtual void  TestFQ7()
+		[Test]
+		public virtual void  TestFQ7()
 		{
 			Query q = new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3}));
 			q.SetBoost(0);
@@ -160,22 +150,22 @@
 		
 		/* ConstantScoreQueries */
 		
-        [Test]
-        public virtual void  TestCSQ1()
+		[Test]
+		public virtual void  TestCSQ1()
 		{
 			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 1, 2, 3}));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestCSQ2()
+		[Test]
+		public virtual void  TestCSQ2()
 		{
 			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{1, 3}));
 			Qtest(q, new int[]{1, 3});
 		}
 
-        [Test]
-        public virtual void  TestCSQ3()
+		[Test]
+		public virtual void  TestCSQ3()
 		{
 			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 2}));
 			q.SetBoost(1000);
@@ -184,8 +174,8 @@
 		
 		/* DisjunctionMaxQuery */
 		
-        [Test]
-        public virtual void  TestDMQ1()
+		[Test]
+		public virtual void  TestDMQ1()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
 			q.Add(qp.Parse("w1"));
@@ -193,8 +183,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ2()
+		[Test]
+		public virtual void  TestDMQ2()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("w1"));
@@ -202,8 +192,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ3()
+		[Test]
+		public virtual void  TestDMQ3()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("QQ"));
@@ -211,8 +201,8 @@
 			Qtest(q, new int[]{0});
 		}
 
-        [Test]
-        public virtual void  TestDMQ4()
+		[Test]
+		public virtual void  TestDMQ4()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("QQ"));
@@ -220,8 +210,8 @@
 			Qtest(q, new int[]{2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ5()
+		[Test]
+		public virtual void  TestDMQ5()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("yy -QQ"));
@@ -229,8 +219,8 @@
 			Qtest(q, new int[]{2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ6()
+		[Test]
+		public virtual void  TestDMQ6()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("-yy w3"));
@@ -238,8 +228,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ7()
+		[Test]
+		public virtual void  TestDMQ7()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("-yy w3"));
@@ -247,8 +237,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ8()
+		[Test]
+		public virtual void  TestDMQ8()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("yy w5^100"));
@@ -256,8 +246,8 @@
 			Qtest(q, new int[]{0, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestDMQ9()
+		[Test]
+		public virtual void  TestDMQ9()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("yy w5^100"));
@@ -267,8 +257,8 @@
 		
 		/* MultiPhraseQuery */
 		
-        [Test]
-        public virtual void  TestMPQ1()
+		[Test]
+		public virtual void  TestMPQ1()
 		{
 			MultiPhraseQuery q = new MultiPhraseQuery();
 			q.Add(Ta(new System.String[]{"w1"}));
@@ -276,8 +266,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestMPQ2()
+		[Test]
+		public virtual void  TestMPQ2()
 		{
 			MultiPhraseQuery q = new MultiPhraseQuery();
 			q.Add(Ta(new System.String[]{"w1"}));
@@ -285,8 +275,8 @@
 			Qtest(q, new int[]{0, 1, 3});
 		}
 
-        [Test]
-        public virtual void  TestMPQ3()
+		[Test]
+		public virtual void  TestMPQ3()
 		{
 			MultiPhraseQuery q = new MultiPhraseQuery();
 			q.Add(Ta(new System.String[]{"w1", "xx"}));
@@ -294,8 +284,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestMPQ4()
+		[Test]
+		public virtual void  TestMPQ4()
 		{
 			MultiPhraseQuery q = new MultiPhraseQuery();
 			q.Add(Ta(new System.String[]{"w1"}));
@@ -303,8 +293,8 @@
 			Qtest(q, new int[]{0});
 		}
 
-        [Test]
-        public virtual void  TestMPQ5()
+		[Test]
+		public virtual void  TestMPQ5()
 		{
 			MultiPhraseQuery q = new MultiPhraseQuery();
 			q.Add(Ta(new System.String[]{"w1"}));
@@ -313,8 +303,8 @@
 			Qtest(q, new int[]{0, 1, 2});
 		}
 
-        [Test]
-        public virtual void  TestMPQ6()
+		[Test]
+		public virtual void  TestMPQ6()
 		{
 			MultiPhraseQuery q = new MultiPhraseQuery();
 			q.Add(Ta(new System.String[]{"w1", "w3"}));
@@ -325,74 +315,74 @@
 		
 		/* some simple tests of boolean queries containing term queries */
 		
-        [Test]
-        public virtual void  TestBQ1()
+		[Test]
+		public virtual void  TestBQ1()
 		{
 			Qtest("+w1 +w2", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ2()
+		[Test]
+		public virtual void  TestBQ2()
 		{
 			Qtest("+yy +w3", new int[]{2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ3()
+		[Test]
+		public virtual void  TestBQ3()
 		{
 			Qtest("yy +w3", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ4()
+		[Test]
+		public virtual void  TestBQ4()
 		{
 			Qtest("w1 (-xx w2)", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ5()
+		[Test]
+		public virtual void  TestBQ5()
 		{
 			Qtest("w1 (+qq w2)", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ6()
+		[Test]
+		public virtual void  TestBQ6()
 		{
 			Qtest("w1 -(-qq w5)", new int[]{1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ7()
+		[Test]
+		public virtual void  TestBQ7()
 		{
 			Qtest("+w1 +(qq (xx -w2) (+w3 +w4))", new int[]{0});
 		}
 
-        [Test]
-        public virtual void  TestBQ8()
+		[Test]
+		public virtual void  TestBQ8()
 		{
 			Qtest("+w1 (qq (xx -w2) (+w3 +w4))", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ9()
+		[Test]
+		public virtual void  TestBQ9()
 		{
 			Qtest("+w1 (qq (-xx w2) -(+w3 +w4))", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ10()
+		[Test]
+		public virtual void  TestBQ10()
 		{
 			Qtest("+w1 +(qq (-xx w2) -(+w3 +w4))", new int[]{1});
 		}
 
-        [Test]
-        public virtual void  TestBQ11()
+		[Test]
+		public virtual void  TestBQ11()
 		{
 			Qtest("w1 w2^1000.0", new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ14()
+		[Test]
+		public virtual void  TestBQ14()
 		{
 			BooleanQuery q = new BooleanQuery(true);
 			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.SHOULD);
@@ -400,8 +390,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ15()
+		[Test]
+		public virtual void  TestBQ15()
 		{
 			BooleanQuery q = new BooleanQuery(true);
 			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.MUST_NOT);
@@ -409,8 +399,8 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ16()
+		[Test]
+		public virtual void  TestBQ16()
 		{
 			BooleanQuery q = new BooleanQuery(true);
 			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.SHOULD);
@@ -418,8 +408,8 @@
 			Qtest(q, new int[]{0, 1});
 		}
 
-        [Test]
-        public virtual void  TestBQ17()
+		[Test]
+		public virtual void  TestBQ17()
 		{
 			BooleanQuery q = new BooleanQuery(true);
 			q.Add(qp.Parse("w2"), BooleanClause.Occur.SHOULD);
@@ -427,14 +417,14 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 
-        [Test]
-        public virtual void  TestBQ19()
+		[Test]
+		public virtual void  TestBQ19()
 		{
 			Qtest("-yy w3", new int[]{0, 1});
 		}
 		
-        [Test]
-        public virtual void  TestBQ20()
+		[Test]
+		public virtual void  TestBQ20()
 		{
 			BooleanQuery q = new BooleanQuery();
 			q.SetMinimumNumberShouldMatch(2);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs Tue Jul 15 14:44:04 2008
@@ -33,8 +33,8 @@
 {
 	
 	/// <summary> subclass of TestSimpleExplanations that verifies non matches.</summary>
-    [TestFixture]
-    public class TestSimpleExplanationsOfNonMatches : TestSimpleExplanations
+	[TestFixture]
+	public class TestSimpleExplanationsOfNonMatches : TestSimpleExplanations
 	{
 		
 		/// <summary> Overrides superclass to ignore matches and focus on non-matches



Mime
View raw message