lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r677059 [15/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/...
Date Tue, 15 Jul 2008 21:44:10 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateFilter.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs Tue Jul 15 14:44:04 2008
@@ -19,31 +19,31 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> DateFilter JUnit tests.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Otis Gospodnetic
-	/// </author>
-    /// <version>  $Revision: 472959 $
-    /// </version>
+	/// <version>  $Revision: 583534 $
+	/// </version>
 	[TestFixture]
-    public class TestDateFilter
+	public class TestDateFilter : LuceneTestCase
 	{
 		
 		/// <summary> </summary>
 		[Test]
-        public virtual void  TestBefore()
+		public virtual void  TestBefore()
 		{
 			// create an index
 			RAMDirectory indexStore = new RAMDirectory();
@@ -53,8 +53,8 @@
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			// add time that is in the past
-            doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 1000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.UN_TOKENIZED));
-            doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 1000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
 			writer.Optimize();
 			writer.Close();
@@ -63,10 +63,10 @@
 			
 			// filter that should preserve matches
 			//DateFilter df1 = DateFilter.Before("datefield", now);
-            RangeFilter df1 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 2000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), false, true);
-            // filter that should discard matches
+			RangeFilter df1 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 2000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), false, true);
+			// filter that should discard matches
 			//DateFilter df2 = DateFilter.Before("datefield", now - 999999);
-            RangeFilter df2 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(0, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now - 2000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), true, false);
+			RangeFilter df2 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(0, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now - 2000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), true, false);
 			
 			// search something that doesn't exist with DateFilter
 			Query query1 = new TermQuery(new Term("body", "NoMatchForThis"));
@@ -100,7 +100,7 @@
 		
 		/// <summary> </summary>
 		[Test]
-        public virtual void  TestAfter()
+		public virtual void  TestAfter()
 		{
 			// create an index
 			RAMDirectory indexStore = new RAMDirectory();
@@ -110,8 +110,8 @@
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			// add time that is in the future
-            doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 888888, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.UN_TOKENIZED));
-            doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 888888, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
 			writer.Optimize();
 			writer.Close();
@@ -120,10 +120,10 @@
 			
 			// filter that should preserve matches
 			//DateFilter df1 = DateFilter.After("datefield", now);
-            RangeFilter df1 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now + 999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), true, false);
-            // filter that should discard matches
+			RangeFilter df1 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now + 999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), true, false);
+			// filter that should discard matches
 			//DateFilter df2 = DateFilter.After("datefield", now + 999999);
-            RangeFilter df2 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now + 999999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), false, true);
+			RangeFilter df2 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now + 999999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), false, true);
 			
 			// search something that doesn't exist with DateFilter
 			Query query1 = new TermQuery(new Term("body", "NoMatchForThis"));

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateSort.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using DateTools = Lucene.Net.Documents.DateTools;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> Test date sorting, i.e. auto-sorting of fields with type "long".
+	/// See http://issues.apache.org/jira/browse/LUCENE-1045 
+	/// </summary>
+	[TestFixture]
+	public class TestDateSort
+	{
+		
+		private const System.String TEXT_FIELD = "text";
+		private const System.String DATE_TIME_FIELD = "dateTime";
+		
+		private static Directory directory;
+		
+		[SetUp]
+		public virtual void  SetUp()
+		{
+			// Create an index writer.
+			directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+
+			// oldest doc:
+			// Add the first document.  text = "Document 1"  dateTime = Oct 10 03:25:22 EDT 2007
+			writer.AddDocument(CreateDocument("Document 1", 633275835220000000L));
+			// Add the second document.  text = "Document 2"  dateTime = Oct 10 03:25:26 EDT 2007 
+			writer.AddDocument(CreateDocument("Document 2", 633275835260000000L));
+			// Add the third document.  text = "Document 3"  dateTime = Oct 11 07:12:13 EDT 2007 
+			writer.AddDocument(CreateDocument("Document 3", 633276835330000000L));
+			// Add the fourth document.  text = "Document 4"  dateTime = Oct 11 08:02:09 EDT 2007
+			writer.AddDocument(CreateDocument("Document 4", 633276865290000000L));
+			// latest doc:
+			// Add the fifth document.  text = "Document 5"  dateTime = Oct 12 13:25:43 EDT 2007
+			writer.AddDocument(CreateDocument("Document 5", 633277923430000000L));
+
+			//// oldest doc:
+			//// Add the first document.  text = "Document 1"  dateTime = Oct 10 03:25:22 EDT 2007
+			//writer.AddDocument(CreateDocument("Document 1", 1192001122000L));
+			//// Add the second document.  text = "Document 2"  dateTime = Oct 10 03:25:26 EDT 2007 
+			//writer.AddDocument(CreateDocument("Document 2", 1192001126000L));
+			//// Add the third document.  text = "Document 3"  dateTime = Oct 11 07:12:13 EDT 2007 
+			//writer.AddDocument(CreateDocument("Document 3", 1192101133000L));
+			//// Add the fourth document.  text = "Document 4"  dateTime = Oct 11 08:02:09 EDT 2007
+			//writer.AddDocument(CreateDocument("Document 4", 1192104129000L));
+			//// latest doc:
+			//// Add the fifth document.  text = "Document 5"  dateTime = Oct 12 13:25:43 EDT 2007
+			//writer.AddDocument(CreateDocument("Document 5", 1192209943000L));
+			
+			writer.Optimize();
+			writer.Close();
+		}
+		
+		[Test]
+		public virtual void  TestReverseDateSort()
+		{
+			IndexSearcher searcher = new IndexSearcher(directory);
+			
+			// Create a Sort object.  reverse is set to true.
+			// problem occurs only with SortField.AUTO:
+			Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.AUTO, true));
+
+			Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser(TEXT_FIELD, new WhitespaceAnalyzer());
+			Query query = queryParser.Parse("Document");
+			
+			// Execute the search and process the search results.
+			System.String[] actualOrder = new System.String[5];
+			Hits hits = searcher.Search(query, sort);
+			for (int i = 0; i < hits.Length(); i++)
+			{
+				Document document = hits.Doc(i);
+				System.String text = document.Get(TEXT_FIELD);
+				actualOrder[i] = text;
+			}
+			searcher.Close();
+			
+			// Set up the expected order (i.e. Document 5, 4, 3, 2, 1).
+			System.String[] expectedOrder = new System.String[5];
+			expectedOrder[0] = "Document 5";
+			expectedOrder[1] = "Document 4";
+			expectedOrder[2] = "Document 3";
+			expectedOrder[3] = "Document 2";
+			expectedOrder[4] = "Document 1";
+			
+			Assert.AreEqual(new System.Collections.ArrayList(expectedOrder), new System.Collections.ArrayList(actualOrder));
+		}
+		
+		private static Document CreateDocument(System.String text, long time)
+		{
+			Document document = new Document();
+			
+			// Add the text field.
+			Field textField = new Field(TEXT_FIELD, text, Field.Store.YES, Field.Index.TOKENIZED);
+			document.Add(textField);
+			
+			// Add the date/time field.
+			System.String dateTimeString = DateTools.TimeToString(time, DateTools.Resolution.SECOND);
+			Field dateTimeField = new Field(DATE_TIME_FIELD, dateTimeString, Field.Store.YES, Field.Index.UN_TOKENIZED);
+			document.Add(dateTimeField);
+			
+			return document;
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDisjunctionMaxQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,7 +19,6 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -27,6 +26,8 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -35,7 +36,7 @@
 	/// 
 	/// </summary>
 	[TestFixture]
-    public class TestDisjunctionMaxQuery
+	public class TestDisjunctionMaxQuery : LuceneTestCase
 	{
 		public TestDisjunctionMaxQuery()
 		{
@@ -60,7 +61,7 @@
 		/// <author>  Williams
 		/// </author>
 		[Serializable]
-		private class TestSimilarity:DefaultSimilarity
+		private class TestSimilarity : DefaultSimilarity
 		{
 			
 			public TestSimilarity()
@@ -89,9 +90,11 @@
 		public IndexSearcher s;
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
 			
+			base.SetUp();
+			
 			index = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(index, new WhitespaceAnalyzer(), true);
 			writer.SetSimilarity(sim);
@@ -101,39 +104,39 @@
 			// d1 is an "ok" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
-                d1.Add(new Field("id", "d1", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d1"));
-                d1.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-                d1.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "elephant"));
-                writer.AddDocument(d1);
+				d1.Add(new Field("id", "d1", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d1"));
+				d1.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
+				d1.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "elephant"));
+				writer.AddDocument(d1);
 			}
 			
 			// d2 is a "good" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
-                d2.Add(new Field("id", "d2", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d2"));
-                d2.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-                d2.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "albino"));
-                d2.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "elephant"));
-                writer.AddDocument(d2);
+				d2.Add(new Field("id", "d2", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d2"));
+				d2.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
+				d2.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "albino"));
+				d2.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "elephant"));
+				writer.AddDocument(d2);
 			}
 			
 			// d3 is a "better" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
-                d3.Add(new Field("id", "d3", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d3"));
-                d3.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "albino"));
-                d3.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-                writer.AddDocument(d3);
+				d3.Add(new Field("id", "d3", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d3"));
+				d3.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "albino"));
+				d3.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
+				writer.AddDocument(d3);
 			}
 			
 			// d4 is the "best" match for:  albino elephant
 			{
 				Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
-                d4.Add(new Field("id", "d4", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d4"));
-                d4.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "albino"));
-                d4.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
-                d4.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "albino"));
-                writer.AddDocument(d4);
+				d4.Add(new Field("id", "d4", Field.Store.YES, Field.Index.UN_TOKENIZED)); //Field.Keyword("id", "d4"));
+				d4.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "albino"));
+				d4.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("hed", "elephant"));
+				d4.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.TOKENIZED)); //Field.Text("dek", "albino"));
+				writer.AddDocument(d4);
 			}
 			
 			writer.Close();
@@ -144,13 +147,48 @@
 		}
 		
 		[Test]
+		public virtual void  TestSkipToFirsttimeMiss()
+		{
+			DisjunctionMaxQuery dq = new DisjunctionMaxQuery(0.0f);
+			dq.Add(Tq("id", "d1"));
+			dq.Add(Tq("dek", "DOES_NOT_EXIST"));
+			
+			QueryUtils.Check(dq, s);
+			
+			Weight dw = dq.Weight(s);
+			Scorer ds = dw.Scorer(r);
+			bool skipOk = ds.SkipTo(3);
+			if (skipOk)
+			{
+				Assert.Fail("firsttime skipTo found a match? ... " + r.Document(ds.Doc()).Get("id"));
+			}
+		}
+		
+		[Test]
+		public virtual void  TestSkipToFirsttimeHit()
+		{
+			DisjunctionMaxQuery dq = new DisjunctionMaxQuery(0.0f);
+			dq.Add(Tq("dek", "albino"));
+			dq.Add(Tq("dek", "DOES_NOT_EXIST"));
+			
+			QueryUtils.Check(dq, s);
+			
+			Weight dw = dq.Weight(s);
+			Scorer ds = dw.Scorer(r);
+			Assert.IsTrue(ds.SkipTo(3), "firsttime skipTo found no match");
+			Assert.AreEqual("d4", r.Document(ds.Doc()).Get("id"), "found wrong docid");
+		}
+		
+		
+		
+		[Test]
 		public virtual void  TestSimpleEqualScores1()
 		{
 			
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
 			q.Add(Tq("hed", "albino"));
 			q.Add(Tq("hed", "elephant"));
-            QueryUtils.Check(q, s);
+			QueryUtils.Check(q, s);
 			
 			Hits h = s.Search(q);
 			
@@ -172,13 +210,13 @@
 		}
 		
 		[Test]
-        public virtual void  TestSimpleEqualScores2()
+		public virtual void  TestSimpleEqualScores2()
 		{
 			
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
-            QueryUtils.Check(q, s);
+			QueryUtils.Check(q, s);
 
 			
 			Hits h = s.Search(q);
@@ -200,7 +238,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestSimpleEqualScores3()
+		public virtual void  TestSimpleEqualScores3()
 		{
 			
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
@@ -208,7 +246,7 @@
 			q.Add(Tq("hed", "elephant"));
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
-            QueryUtils.Check(q, s);
+			QueryUtils.Check(q, s);
 
 			
 			Hits h = s.Search(q);
@@ -230,13 +268,13 @@
 		}
 		
 		[Test]
-        public virtual void  TestSimpleTiebreaker()
+		public virtual void  TestSimpleTiebreaker()
 		{
 			
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.01f);
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
-            QueryUtils.Check(q, s);
+			QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);
@@ -259,7 +297,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestBooleanRequiredEqualScores()
+		public virtual void  TestBooleanRequiredEqualScores()
 		{
 			
 			BooleanQuery q = new BooleanQuery();
@@ -267,18 +305,18 @@
 				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
 				q1.Add(Tq("hed", "albino"));
 				q1.Add(Tq("dek", "albino"));
-                q.Add(q1, BooleanClause.Occur.MUST); //false,false);
-                QueryUtils.Check(q1, s);
-            }
+				q.Add(q1, BooleanClause.Occur.MUST); //false,false);
+				QueryUtils.Check(q1, s);
+			}
 			{
 				DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
 				q2.Add(Tq("hed", "elephant"));
 				q2.Add(Tq("dek", "elephant"));
-                q.Add(q2, BooleanClause.Occur.MUST); //false,false);
-                QueryUtils.Check(q2, s);
-            }
+				q.Add(q2, BooleanClause.Occur.MUST); //false,false);
+				QueryUtils.Check(q2, s);
+			}
 			
-            QueryUtils.Check(q, s);
+			QueryUtils.Check(q, s);
 			
 			Hits h = s.Search(q);
 			
@@ -307,15 +345,15 @@
 				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
 				q1.Add(Tq("hed", "albino"));
 				q1.Add(Tq("dek", "albino"));
-                q.Add(q1, BooleanClause.Occur.SHOULD); //false,false);
-            }
+				q.Add(q1, BooleanClause.Occur.SHOULD); //false,false);
+			}
 			{
 				DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
 				q2.Add(Tq("hed", "elephant"));
 				q2.Add(Tq("dek", "elephant"));
-                q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
-            }
-            QueryUtils.Check(q, s);
+				q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
+			}
+			QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);
@@ -349,15 +387,15 @@
 				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.01f);
 				q1.Add(Tq("hed", "albino"));
 				q1.Add(Tq("dek", "albino"));
-                q.Add(q1, BooleanClause.Occur.SHOULD); //false,false);
-            }
+				q.Add(q1, BooleanClause.Occur.SHOULD); //false,false);
+			}
 			{
 				DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.01f);
 				q2.Add(Tq("hed", "elephant"));
 				q2.Add(Tq("dek", "elephant"));
-                q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
-            }
-            QueryUtils.Check(q, s);
+				q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
+			}
+			QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);
@@ -402,15 +440,15 @@
 				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.01f);
 				q1.Add(Tq("hed", "albino", 1.5f));
 				q1.Add(Tq("dek", "albino"));
-                q.Add(q1, BooleanClause.Occur.SHOULD); //false,false);
-            }
+				q.Add(q1, BooleanClause.Occur.SHOULD); //false,false);
+			}
 			{
 				DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.01f);
 				q2.Add(Tq("hed", "elephant", 1.5f));
 				q2.Add(Tq("dek", "elephant"));
-                q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
-            }
-            QueryUtils.Check(q, s);
+				q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
+			}
+			QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);
@@ -475,8 +513,8 @@
 			{
 				Lucene.Net.Documents.Document d = h.Doc(i);
 				float score = h.Score(i);
-                System.Console.Error.WriteLine("#" + i + ": {0.000000000}" + score + " - " + d.Get("id"));
-            }
+				System.Console.Error.WriteLine("#" + i + ": {0.000000000}" + score + " - " + d.Get("id"));
+			}
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDocBoost.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs Tue Jul 15 14:44:04 2008
@@ -19,24 +19,24 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary>Document boost unit test.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Doug Cutting
-	/// </author>
-	/// <version>  $Revision: 150492 $
+	/// <version>  $Revision: 583534 $
 	/// </version>
 	[TestFixture]
-    public class TestDocBoost
+	public class TestDocBoost : LuceneTestCase
 	{
 		private class AnonymousClassHitCollector : HitCollector
 		{
@@ -66,7 +66,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestDocBoost_Renamed_Method()
+		public virtual void  TestDocBoost_Renamed_Method()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestExplanations.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs Tue Jul 15 14:44:04 2008
@@ -19,16 +19,17 @@
 
 using NUnit.Framework;
 
-using Lucene.Net.Search.Spans;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using IndexReader = Lucene.Net.Index.IndexReader;
-using Term = Lucene.Net.Index.Term;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
 using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Lucene.Net.Search.Spans;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -46,26 +47,28 @@
 	/// <seealso cref=""Subclasses for actual tests"">
 	/// </seealso>
 	[TestFixture]
-    public class TestExplanations
+	public class TestExplanations : LuceneTestCase
 	{
 		protected internal IndexSearcher searcher;
 		
 		public const System.String FIELD = "field";
 		public static readonly Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(FIELD, new WhitespaceAnalyzer());
 		
-        [TearDown]
-		public virtual void  TearDown()
+		[TearDown]
+		public override void TearDown()
 		{
-            if (searcher != null)
-            {
-                searcher.Close();
-                searcher = null;
-            }
+			base.TearDown();
+			if (searcher != null)
+			{
+				searcher.Close();
+				searcher = null;
+			}
 		}
 		
-        [SetUp]
-		public virtual void  SetUp()
+		[SetUp]
+		public override void SetUp()
 		{
+			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
 			for (int i = 0; i < docFields.Length; i++)
@@ -85,15 +88,16 @@
 			return qp.Parse(queryText);
 		}
 		
+		/// <summary>check the expDocNrs first, then check the query (and the explanations) </summary>
 		public virtual void  Qtest(System.String queryText, int[] expDocNrs)
 		{
 			Qtest(MakeQuery(queryText), expDocNrs);
 		}
+		
+		/// <summary>check the expDocNrs first, then check the query (and the explanations) </summary>
 		public virtual void  Qtest(Query q, int[] expDocNrs)
 		{
-			// check that the expDocNrs first, then check the explanations
 			CheckHits.CheckHitCollector(q, FIELD, searcher, expDocNrs);
-			CheckHits.CheckExplanations(q, FIELD, searcher);
 		}
 		
 		/// <summary> Tests a query using qtest after wrapping it with both optB and reqB</summary>
@@ -253,8 +257,8 @@
 		/// <summary> Placeholder: JUnit freaks if you don't have one test ... making
 		/// class abstract doesn't help
 		/// </summary>
-        [Test]
-        public virtual void  TestNoop()
+		[Test]
+		public virtual void  TestNoop()
 		{
 			/* NOOP */
 		}

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestExtendedFieldCache.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestExtendedFieldCache.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestExtendedFieldCache.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestExtendedFieldCache.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+	[TestFixture]
+	public class TestExtendedFieldCache : LuceneTestCase
+	{
+		protected internal IndexReader reader;
+		private const int NUM_DOCS = 1000;
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			RAMDirectory directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			long theLong = System.Int64.MaxValue;
+			double theDouble = System.Double.MaxValue;
+			for (int i = 0; i < NUM_DOCS; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.UN_TOKENIZED));
+				//doc.Add(new Field("theDouble", System.Convert.ToString(theDouble--), Field.Store.NO, Field.Index.UN_TOKENIZED));
+				doc.Add(new Field("theDouble", (theDouble--).ToString("R"), Field.Store.NO, Field.Index.UN_TOKENIZED));
+				doc.Add(new Field("text", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+				writer.AddDocument(doc);
+			}
+			writer.Close();
+			reader = IndexReader.Open(directory);
+		}
+		
+		
+		[Test]
+		public virtual void  Test()
+		{
+			ExtendedFieldCache cache = new ExtendedFieldCacheImpl();
+			double[] doubles = cache.GetDoubles(reader, "theDouble");
+			Assert.IsTrue(doubles.Length == NUM_DOCS, "doubles Size: " + doubles.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < doubles.Length; i++)
+			{
+				Assert.IsTrue(doubles[i] == (System.Double.MaxValue - i), doubles[i] + " does not equal: " + (System.Double.MaxValue - i));
+			}
+			long[] longs = cache.GetLongs(reader, "theLong");
+			Assert.IsTrue(longs.Length == NUM_DOCS, "longs Size: " + longs.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < longs.Length; i++)
+			{
+				Assert.IsTrue(longs[i] == (System.Int64.MaxValue - i), longs[i] + " does not equal: " + (System.Int64.MaxValue - i));
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,13 +19,15 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
 using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -34,44 +36,39 @@
 	/// 
 	/// <p>Created: Apr 21, 2004 1:21:46 PM
 	/// 
+	/// 
 	/// </summary>
-	/// <author>   Tim Jones
-	/// </author>
-    /// <version>  $Id: TestFilteredQuery.java 472959 2006-11-09 16:21:50Z yonik $
-    /// </version>
+	/// <version>  $Id: TestFilteredQuery.java 587050 2007-10-22 09:58:48Z doronc $
+	/// </version>
 	/// <since>   1.4
 	/// </since>
 	[TestFixture]
-    public class TestFilteredQuery
+	public class TestFilteredQuery : LuceneTestCase
 	{
 		[Serializable]
 		private class AnonymousClassFilter : Filter
 		{
-			public AnonymousClassFilter(TestFilteredQuery enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			private void  InitBlock(TestFilteredQuery enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private TestFilteredQuery enclosingInstance;
-			public TestFilteredQuery Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
 			public override System.Collections.BitArray Bits(IndexReader reader)
 			{
-				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0?5 / 64:5 / 64 + 1) * 64);
+				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64);
 				bitset.Set(1, true);
 				bitset.Set(3, true);
 				return bitset;
 			}
 		}
+		[Serializable]
+		private class AnonymousClassFilter1 : Filter
+		{
+			public override System.Collections.BitArray Bits(IndexReader reader)
+			{
+				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64);
+				for (int i = 0; i < 5; i++)
+				{
+					bitset.Set(i, true);
+				} 
+				return bitset;
+			}
+		}
 		
 		private IndexSearcher searcher;
 		private RAMDirectory directory;
@@ -79,7 +76,7 @@
 		private Filter filter;
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
 			directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
@@ -109,24 +106,30 @@
 			
 			searcher = new IndexSearcher(directory);
 			query = new TermQuery(new Term("field", "three"));
-			filter = new AnonymousClassFilter(this);
+			filter = new AnonymousClassFilter();
+		}
+		
+		// must be static for serialization tests
+		private static Filter NewStaticFilterB()
+		{
+			return new AnonymousClassFilter();
 		}
 		
 		[TearDown]
-        public virtual void  TearDown()
+		public override void TearDown()
 		{
 			searcher.Close();
 			directory.Close();
 		}
 		
 		[Test]
-        public virtual void  TestFilteredQuery_Renamed_Method()
+		public virtual void  TestFilteredQuery_Renamed_Method()
 		{
 			Query filteredquery = new FilteredQuery(query, filter);
 			Hits hits = searcher.Search(filteredquery);
 			Assert.AreEqual(1, hits.Length());
 			Assert.AreEqual(1, hits.Id(0));
-            QueryUtils.Check(filteredquery, searcher);
+			QueryUtils.Check(filteredquery, searcher);
 			
 			hits = searcher.Search(filteredquery, new Sort("sorter"));
 			Assert.AreEqual(1, hits.Length());
@@ -135,43 +138,84 @@
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "one")), filter);
 			hits = searcher.Search(filteredquery);
 			Assert.AreEqual(2, hits.Length());
-            QueryUtils.Check(filteredquery, searcher);
+			QueryUtils.Check(filteredquery, searcher);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "x")), filter);
 			hits = searcher.Search(filteredquery);
 			Assert.AreEqual(1, hits.Length());
 			Assert.AreEqual(3, hits.Id(0));
-            QueryUtils.Check(filteredquery, searcher);
+			QueryUtils.Check(filteredquery, searcher);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "y")), filter);
 			hits = searcher.Search(filteredquery);
 			Assert.AreEqual(0, hits.Length());
-            QueryUtils.Check(filteredquery, searcher);
-        }
+			QueryUtils.Check(filteredquery, searcher);
+			
+			// test boost
+			Filter f = NewStaticFilterA();
+			
+			float boost = 2.5f;
+			BooleanQuery bq1 = new BooleanQuery();
+			TermQuery tq = new TermQuery(new Term("field", "one"));
+			tq.SetBoost(boost);
+			bq1.Add(tq, Occur.MUST);
+			bq1.Add(new TermQuery(new Term("field", "five")), Occur.MUST);
+			
+			BooleanQuery bq2 = new BooleanQuery();
+			tq = new TermQuery(new Term("field", "one"));
+			filteredquery = new FilteredQuery(tq, f);
+			filteredquery.SetBoost(boost);
+			bq2.Add(filteredquery, Occur.MUST);
+			bq2.Add(new TermQuery(new Term("field", "five")), Occur.MUST);
+			AssertScoreEquals(bq1, bq2);
+			
+			Assert.AreEqual(boost, filteredquery.GetBoost(), 0);
+			Assert.AreEqual(1.0f, tq.GetBoost(), 0); // the boost value of the underlying query shouldn't have changed 
+		}
+		
+		// must be static for serialization tests 
+		private static Filter NewStaticFilterA()
+		{
+			return new AnonymousClassFilter1();
+		}
+		
+		/// <summary> Tests whether the scores of the two queries are the same.</summary>
+		public virtual void  AssertScoreEquals(Query q1, Query q2)
+		{
+			Hits hits1 = searcher.Search(q1);
+			Hits hits2 = searcher.Search(q2);
+			
+			Assert.AreEqual(hits1.Length(), hits2.Length());
+			
+			for (int i = 0; i < hits1.Length(); i++)
+			{
+				Assert.AreEqual(hits1.Score(i), hits2.Score(i), 0.0000001f);
+			}
+		}
 		
 		/// <summary> This tests FilteredQuery's rewrite correctness</summary>
 		[Test]
-        public virtual void  TestRangeQuery()
+		public virtual void  TestRangeQuery()
 		{
 			RangeQuery rq = new RangeQuery(new Term("sorter", "b"), new Term("sorter", "d"), true);
 			
 			Query filteredquery = new FilteredQuery(rq, filter);
 			Hits hits = searcher.Search(filteredquery);
 			Assert.AreEqual(2, hits.Length());
-            QueryUtils.Check(filteredquery, searcher);
-        }
+			QueryUtils.Check(filteredquery, searcher);
+		}
 
-        [Test]		
-        public virtual void  TestBoolean()
-        {
-            BooleanQuery bq = new BooleanQuery();
-            Query query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(0));
-            bq.Add(query, BooleanClause.Occur.MUST);
-            query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(1));
-            bq.Add(query, BooleanClause.Occur.MUST);
-            Hits hits = searcher.Search(bq);
-            Assert.AreEqual(0, hits.Length());
-            QueryUtils.Check(query, searcher);
-        }
-    }
+		[Test]		
+		public virtual void  TestBoolean()
+		{
+			BooleanQuery bq = new BooleanQuery();
+			Query query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(0));
+			bq.Add(query, BooleanClause.Occur.MUST);
+			query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(1));
+			bq.Add(query, BooleanClause.Occur.MUST);
+			Hits hits = searcher.Search(bq);
+			Assert.AreEqual(0, hits.Length());
+			QueryUtils.Check(query, searcher);
+		}
+	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFuzzyQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -35,7 +36,7 @@
 	/// <author>  Daniel Naber
 	/// </author>
 	[TestFixture]
-    public class TestFuzzyQuery
+	public class TestFuzzyQuery : LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestFuzziness()
@@ -168,7 +169,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestFuzzinessLong()
+		public virtual void  TestFuzzinessLong()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
@@ -245,7 +246,7 @@
 				query = new FuzzyQuery(new Term("field", "student"), 1.1f);
 				Assert.Fail("Expected IllegalArgumentException");
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				// expecting exception
 			}
@@ -254,7 +255,7 @@
 				query = new FuzzyQuery(new Term("field", "student"), - 0.1f);
 				Assert.Fail("Expected IllegalArgumentException");
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				// expecting exception
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMatchAllDocsQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -35,7 +36,7 @@
 	/// <author>  Daniel Naber
 	/// </author>
 	[TestFixture]
-    public class TestMatchAllDocsQuery
+	public class TestMatchAllDocsQuery : LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestQuery()
@@ -74,7 +75,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestEquals()
+		public virtual void  TestEquals()
 		{
 			Query q1 = new MatchAllDocsQuery();
 			Query q2 = new MatchAllDocsQuery();

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiPhraseQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,32 +19,32 @@
 
 using NUnit.Framework;
 
-using Term = Lucene.Net.Index.Term;
-using TermEnum = Lucene.Net.Index.TermEnum;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using TermEnum = Lucene.Net.Index.TermEnum;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
-	
+
 	/// <summary> This class tests the MultiPhraseQuery class.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Otis Gospodnetic, Daniel Naber
-	/// </author>
-	/// <version>  $Id: TestMultiPhraseQuery.java 219387 2005-07-17 10:47:14Z dnaber $
+	/// <version>  $Id: TestMultiPhraseQuery.java 583534 2007-10-10 16:46:35Z mikemccand $
 	/// </version>
 	[TestFixture]
-    public class TestMultiPhraseQuery
+	public class TestMultiPhraseQuery : LuceneTestCase
 	{
 		
 		[Test]
-        public virtual void  TestPhrasePrefix()
+		public virtual void  TestPhrasePrefix()
 		{
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
@@ -125,7 +125,7 @@
 				query4.Add(new Term("field2", "foobar"));
 				Assert.Fail();
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				// okay, all terms must belong to the same field
 			}
@@ -142,7 +142,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestBooleanQueryContainingSingleTermPrefixQuery()
+		public virtual void  TestBooleanQueryContainingSingleTermPrefixQuery()
 		{
 			// this tests against bug 33161 (now fixed)
 			// In order to cause the bug, the outer query must have more than one term 
@@ -174,7 +174,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestPhrasePrefixWithBooleanQuery()
+		public virtual void  TestPhrasePrefixWithBooleanQuery()
 		{
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.String[]{}), true);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcher.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs Tue Jul 15 14:44:04 2008
@@ -19,16 +19,19 @@
 
 using NUnit.Framework;
 
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using SetBasedFieldSelector = Lucene.Net.Documents.SetBasedFieldSelector;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -36,11 +39,61 @@
 	/// <summary> Tests {@link MultiSearcher} class.
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestMultiSearcher.java 354819 2005-12-07 17:48:37Z yonik $
+	/// <version>  $Id: TestMultiSearcher.java 583534 2007-10-10 16:46:35Z mikemccand $
 	/// </version>
 	[TestFixture]
-    public class TestMultiSearcher
+	public class TestMultiSearcher : LuceneTestCase
 	{
+		[Serializable]
+		private class AnonymousClassDefaultSimilarity : DefaultSimilarity
+		{
+			public AnonymousClassDefaultSimilarity(TestMultiSearcher enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestMultiSearcher enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestMultiSearcher enclosingInstance;
+			public TestMultiSearcher Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			// overide all
+			public override float Idf(int docFreq, int numDocs)
+			{
+				return 100.0f;
+			}
+			public override float Coord(int overlap, int maxOverlap)
+			{
+				return 1.0f;
+			}
+			public override float LengthNorm(System.String fieldName, int numTokens)
+			{
+				return 1.0f;
+			}
+			public override float QueryNorm(float sumOfSquaredWeights)
+			{
+				return 1.0f;
+			}
+			public override float SloppyFreq(int distance)
+			{
+				return 1.0f;
+			}
+			public override float Tf(float freq)
+			{
+				return 1.0f;
+			}
+		}
+
+		public TestMultiSearcher() : base()
+		{
+		}
 		
 		/// <summary> ReturnS a new instance of the concrete MultiSearcher class
 		/// used in this test.
@@ -51,26 +104,26 @@
 		}
 		
 		[Test]
-        public virtual void  TestEmptyIndex()
+		public virtual void  TestEmptyIndex()
 		{
 			// creating two directories for indices
-			Directory indexStoreA = new RAMDirectory();
-			Directory indexStoreB = new RAMDirectory();
+			Directory indexStoreA = new MockRAMDirectory();
+			Directory indexStoreB = new MockRAMDirectory();
 			
 			// creating a document to store
-			Lucene.Net.Documents.Document lDoc = new Lucene.Net.Documents.Document();
+			Document lDoc = new Document();
 			lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.TOKENIZED));
 			lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.UN_TOKENIZED));
 			lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
 			
 			// creating a document to store
-			Lucene.Net.Documents.Document lDoc2 = new Lucene.Net.Documents.Document();
+			Document lDoc2 = new Document();
 			lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.TOKENIZED));
 			lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.UN_TOKENIZED));
 			lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
 			
 			// creating a document to store
-			Lucene.Net.Documents.Document lDoc3 = new Lucene.Net.Documents.Document();
+			Document lDoc3 = new Document();
 			lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.TOKENIZED));
 			lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.UN_TOKENIZED));
 			lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
@@ -95,8 +148,8 @@
 			writerB.Close();
 			
 			// creating the query
-            Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("fulltext", new StandardAnalyzer());
-            Query query = parser.Parse("handle:1");
+			Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("fulltext", new StandardAnalyzer());
+			Query query = parser.Parse("handle:1");
 			
 			// building the searchables
 			Searcher[] searchers = new Searcher[2];
@@ -113,7 +166,7 @@
 			// iterating over the hit documents
 			for (int i = 0; i < hits.Length(); i++)
 			{
-				Lucene.Net.Documents.Document d = hits.Doc(i);
+				Document d = hits.Doc(i);
 			}
 			mSearcher.Close();
 			
@@ -144,22 +197,22 @@
 			for (int i = 0; i < hits2.Length(); i++)
 			{
 				// no exception should happen at this point
-				Lucene.Net.Documents.Document d = hits2.Doc(i);
+				Document d = hits2.Doc(i);
 			}
-			mSearcher2.Close();
 			
-            // test the subSearcher() method:
-            Query subSearcherQuery = parser.Parse("id:doc1");
-            hits2 = mSearcher2.Search(subSearcherQuery);
-            Assert.AreEqual(2, hits2.Length());
-            Assert.AreEqual(0, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[0]
-            Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(1))); // hit from searchers2[1]
-            subSearcherQuery = parser.Parse("id:doc2");
-            hits2 = mSearcher2.Search(subSearcherQuery);
-            Assert.AreEqual(1, hits2.Length());
-            Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[1]
+			// test the subSearcher() method:
+			Query subSearcherQuery = parser.Parse("id:doc1");
+			hits2 = mSearcher2.Search(subSearcherQuery);
+			Assert.AreEqual(2, hits2.Length());
+			Assert.AreEqual(0, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[0]
+			Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(1))); // hit from searchers2[1]
+			subSearcherQuery = parser.Parse("id:doc2");
+			hits2 = mSearcher2.Search(subSearcherQuery);
+			Assert.AreEqual(1, hits2.Length());
+			Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[1]
+			mSearcher2.Close();
 			
-            //--------------------------------------------------------------------
+			//--------------------------------------------------------------------
 			// scenario 3
 			//--------------------------------------------------------------------
 			
@@ -189,17 +242,19 @@
 			// iterating over the hit documents
 			for (int i = 0; i < hits3.Length(); i++)
 			{
-				Lucene.Net.Documents.Document d = hits3.Doc(i);
+				Document d = hits3.Doc(i);
 			}
 			mSearcher3.Close();
+			indexStoreA.Close();
+			indexStoreB.Close();
 		}
 		
-		private static Lucene.Net.Documents.Document CreateDocument(System.String contents1, System.String contents2)
+		private static Document DreateDocument(System.String contents1, System.String contents2)
 		{
-			Lucene.Net.Documents.Document document = new Lucene.Net.Documents.Document();
+			Document document = new Document();
 			
 			document.Add(new Field("contents", contents1, Field.Store.YES, Field.Index.UN_TOKENIZED));
-			
+			document.Add(new Field("other", "other contents", Field.Store.YES, Field.Index.UN_TOKENIZED));
 			if (contents2 != null)
 			{
 				document.Add(new Field("contents", contents2, Field.Store.YES, Field.Index.UN_TOKENIZED));
@@ -218,7 +273,7 @@
 				
 				for (int i = 0; i < nDocs; i++)
 				{
-					indexWriter.AddDocument(CreateDocument("doc" + i, contents2));
+					indexWriter.AddDocument(DreateDocument("doc" + i, contents2));
 				}
 			}
 			finally
@@ -230,6 +285,53 @@
 			}
 		}
 		
+		[Test]
+		public virtual void  TestFieldSelector()
+		{
+			RAMDirectory ramDirectory1, ramDirectory2;
+			IndexSearcher indexSearcher1, indexSearcher2;
+			
+			ramDirectory1 = new RAMDirectory();
+			ramDirectory2 = new RAMDirectory();
+			Query query = new TermQuery(new Term("contents", "doc0"));
+			
+			// Now put the documents in a different index
+			InitIndex(ramDirectory1, 10, true, null); // documents with a single token "doc0", "doc1", etc...
+			InitIndex(ramDirectory2, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
+			
+			indexSearcher1 = new IndexSearcher(ramDirectory1);
+			indexSearcher2 = new IndexSearcher(ramDirectory2);
+			
+			MultiSearcher searcher = GetMultiSearcherInstance(new Searcher[]{indexSearcher1, indexSearcher2});
+			Assert.IsTrue(searcher != null, "searcher is null and it shouldn't be");
+			Hits hits = searcher.Search(query);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.Length() == 2, hits.Length() + " does not equal: " + 2);
+			Document document = searcher.Doc(hits.Id(0));
+			Assert.IsTrue(document != null, "document is null and it shouldn't be");
+			Assert.IsTrue(document.GetFields().Count == 2, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 2);
+			//Should be one document from each directory
+			//they both have two fields, contents and other
+			System.Collections.Hashtable ftl = new System.Collections.Hashtable();
+			ftl.Add("other", "other");
+			SetBasedFieldSelector fs = new SetBasedFieldSelector(ftl, new System.Collections.Hashtable());
+			document = searcher.Doc(hits.Id(0), fs);
+			Assert.IsTrue(document != null, "document is null and it shouldn't be");
+			Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1);
+			System.String value_Renamed = document.Get("contents");
+			Assert.IsTrue(value_Renamed == null, "value is not null and it should be");
+			value_Renamed = document.Get("other");
+			Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
+			ftl.Clear();
+			ftl.Add("contents", "contents");
+			fs = new SetBasedFieldSelector(ftl, new System.Collections.Hashtable());
+			document = searcher.Doc(hits.Id(1), fs);
+			value_Renamed = document.Get("contents");
+			Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
+			value_Renamed = document.Get("other");
+			Assert.IsTrue(value_Renamed == null, "value is not null and it should be");
+		}
+		
 		/* uncomment this when the highest score is always normalized to 1.0, even when it was < 1.0
 		public void testNormalization1() throws IOException {
 		testNormalization(1, "Using 1 document per index:");
@@ -237,12 +339,12 @@
 		*/
 		
 		[Test]
-        public virtual void  TestNormalization10()
+		public virtual void  TestNormalization10()
 		{
-			_TestNormalization(10, "Using 10 documents per index:");
+			TestNormalization(10, "Using 10 documents per index:");
 		}
 		
-        private void  _TestNormalization(int nDocs, System.String message)
+		private void  TestNormalization(int nDocs, System.String message)
 		{
 			Query query = new TermQuery(new Term("contents", "doc0"));
 			
@@ -250,7 +352,7 @@
 			IndexSearcher indexSearcher1;
 			Hits hits;
 			
-			ramDirectory1 = new RAMDirectory();
+			ramDirectory1 = new MockRAMDirectory();
 			
 			// First put the documents in the same index
 			InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
@@ -278,8 +380,8 @@
 			RAMDirectory ramDirectory2;
 			IndexSearcher indexSearcher2;
 			
-			ramDirectory1 = new RAMDirectory();
-			ramDirectory2 = new RAMDirectory();
+			ramDirectory1 = new MockRAMDirectory();
+			ramDirectory2 = new MockRAMDirectory();
 			
 			// Now put the documents in a different index
 			InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
@@ -313,5 +415,35 @@
 			ramDirectory1.Close();
 			ramDirectory2.Close();
 		}
+		
+		/// <summary> test that custom similarity is in effect when using MultiSearcher (LUCENE-789).</summary>
+		/// <throws>  IOException  </throws>
+		[Test]
+		public virtual void  TestCustomSimilarity()
+		{
+			RAMDirectory dir = new RAMDirectory();
+			InitIndex(dir, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
+			IndexSearcher srchr = new IndexSearcher(dir);
+			MultiSearcher msrchr = GetMultiSearcherInstance(new Searcher[]{srchr});
+			
+			Similarity customSimilarity = new AnonymousClassDefaultSimilarity(this);
+			
+			srchr.SetSimilarity(customSimilarity);
+			msrchr.SetSimilarity(customSimilarity);
+			
+			Query query = new TermQuery(new Term("contents", "doc0"));
+			
+			// Get a score from IndexSearcher
+			TopDocs topDocs = srchr.Search(query, null, 1);
+			float score1 = topDocs.GetMaxScore();
+			
+			// Get the score from MultiSearcher
+			topDocs = msrchr.Search(query, null, 1);
+			float scoreN = topDocs.GetMaxScore();
+			
+			// The scores from the IndexSearcher and Multisearcher should be the same
+			// if the same similarity is used.
+			Assert.AreEqual(score1, scoreN, 1e-6, "MultiSearcher score must be equal to single esrcher score!");
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcherRanking.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs Tue Jul 15 14:44:04 2008
@@ -19,7 +19,6 @@
 
 using NUnit.Framework;
 
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
@@ -27,6 +26,8 @@
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -38,7 +39,7 @@
 	/// <version>  $Id: TestMultiSearcher.java 150492 2004-09-06 22:01:49Z dnaber $
 	/// </version>
 	[TestFixture]
-    public class TestMultiSearcherRanking
+	public class TestMultiSearcherRanking : LuceneTestCase
 	{
 		
 		private bool verbose = false; // set to true to output hits
@@ -47,43 +48,43 @@
 		private Searcher singleSearcher;
 		
 		[Test]
-        public virtual void  TestOneTermQuery()
+		public virtual void  TestOneTermQuery()
 		{
 			CheckQuery("three");
 		}
 		
 		[Test]
-        public virtual void  TestTwoTermQuery()
+		public virtual void  TestTwoTermQuery()
 		{
 			CheckQuery("three foo");
 		}
 		
 		[Test]
-        public virtual void  TestPrefixQuery()
+		public virtual void  TestPrefixQuery()
 		{
 			CheckQuery("multi*");
 		}
 		
 		[Test]
-        public virtual void  TestFuzzyQuery()
+		public virtual void  TestFuzzyQuery()
 		{
 			CheckQuery("multiThree~");
 		}
 		
 		[Test]
-        public virtual void  TestRangeQuery()
+		public virtual void  TestRangeQuery()
 		{
 			CheckQuery("{multiA TO multiP}");
 		}
 		
 		[Test]
-        public virtual void  TestMultiPhraseQuery()
+		public virtual void  TestMultiPhraseQuery()
 		{
 			CheckQuery("\"blueberry pi*\"");
 		}
 		
 		[Test]
-        public virtual void  TestNoMatchQuery()
+		public virtual void  TestNoMatchQuery()
 		{
 			CheckQuery("+three +nomatch");
 		}
@@ -108,9 +109,9 @@
 			// check result hit ranking
 			if (verbose)
 				System.Console.Out.WriteLine("Query: " + queryStr);
-            Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser(FIELD_NAME, new StandardAnalyzer());
-            Lucene.Net.Search.Query query = queryParser.Parse(queryStr);
-            Hits multiSearcherHits = multiSearcher.Search(query);
+			Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser(FIELD_NAME, new StandardAnalyzer());
+			Lucene.Net.Search.Query query = queryParser.Parse(queryStr);
+			Hits multiSearcherHits = multiSearcher.Search(query);
 			Hits singleSearcherHits = singleSearcher.Search(query);
 			Assert.AreEqual(multiSearcherHits.Length(), singleSearcherHits.Length());
 			for (int i = 0; i < multiSearcherHits.Length(); i++)
@@ -130,8 +131,9 @@
 		
 		/// <summary> initializes multiSearcher and singleSearcher with the same document set</summary>
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
+			base.SetUp();
 			// create MultiSearcher from two seperate searchers
 			Directory d1 = new RAMDirectory();
 			IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(), true);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiThreadTermVectors.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs Tue Jul 15 14:44:04 2008
@@ -19,23 +19,23 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using TermFreqVector = Lucene.Net.Index.TermFreqVector;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Search
 {
 	
-	/// <author>  Bernhard Messer
-	/// </author>
-	/// <version>  $rcs = ' $Id: TestMultiThreadTermVectors.java 150569 2004-10-06 10:40:23Z goller $ ' ;
+	/// <summary> </summary>
+	/// <version>  $rcs = ' $Id: TestMultiThreadTermVectors.java 583534 2007-10-10 16:46:35Z mikemccand $ ' ;
 	/// </version>
 	[TestFixture]
-    public class TestMultiThreadTermVectors
+	public class TestMultiThreadTermVectors : LuceneTestCase
 	{
 		private RAMDirectory directory = new RAMDirectory();
 		public int numDocs = 100;
@@ -43,8 +43,9 @@
 		
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
+			base.SetUp();
 			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
 			//writer.setUseCompoundFile(false);
 			//writer.infoStream = System.out;
@@ -59,7 +60,7 @@
 		}
 		
 		[Test]
-        public virtual void  Test()
+		public virtual void  Test()
 		{
 			
 			IndexReader reader = null;
@@ -91,7 +92,7 @@
 			}
 		}
 		
-        public virtual void  _TestTermPositionVectors(IndexReader reader, int threadCount)
+		public virtual void  _TestTermPositionVectors(IndexReader reader, int threadCount)
 		{
 			MultiThreadTermVectorsReader[] mtr = new MultiThreadTermVectorsReader[threadCount];
 			for (int i = 0; i < threadCount; i++)
@@ -120,7 +121,7 @@
 						threadsAlive--;
 					}
 				}
-				catch (System.Threading.ThreadInterruptedException ie)
+				catch (System.Threading.ThreadInterruptedException)
 				{
 				}
 			}
@@ -178,7 +179,7 @@
 		}
 		
 		[Test]
-        private void  TestTermVectors()
+		private void  TestTermVectors()
 		{
 			// check:
 			int numDocs = reader.NumDocs();

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestNot.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs Tue Jul 15 14:44:04 2008
@@ -19,29 +19,29 @@
 
 using NUnit.Framework;
 
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary>Similarity unit test.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Doug Cutting
-	/// </author>
-	/// <version>  $Revision: 150497 $
+	/// <version>  $Revision: 583534 $
 	/// </version>
 	[TestFixture]
-    public class TestNot
+	public class TestNot : LuceneTestCase
 	{
 		
 		[Test]
-        public virtual void  TestNot_Renamed_Method()
+		public virtual void  TestNot_Renamed_Method()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
@@ -54,9 +54,9 @@
 			writer.Close();
 			
 			Searcher searcher = new IndexSearcher(store);
-            Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
-            Lucene.Net.Search.Query query = parser.Parse("a NOT b");
-            //System.out.println(query);
+			Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
+			Lucene.Net.Search.Query query = parser.Parse("a NOT b");
+			//System.out.println(query);
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length());
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestParallelMultiSearcher.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs Tue Jul 15 14:44:04 2008
@@ -15,12 +15,14 @@
  * limitations under the License.
  */
 
+using NUnit.Framework;
 using System;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> Unit tests for the ParallelMultiSearcher </summary>
+	[TestFixture]
 	public class TestParallelMultiSearcher : TestMultiSearcher
 	{
 		

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhrasePrefixQuery.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs Tue Jul 15 14:44:04 2008
@@ -19,7 +19,6 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -27,24 +26,24 @@
 using Term = Lucene.Net.Index.Term;
 using TermEnum = Lucene.Net.Index.TermEnum;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> This class tests PhrasePrefixQuery class.
 	/// 
+	/// 
 	/// </summary>
-	/// <author>  Otis Gospodnetic
-	/// </author>
-	/// <version>  $Id: TestPhrasePrefixQuery.java 150497 2004-09-07 18:26:36Z dnaber $
+	/// <version>  $Id: TestPhrasePrefixQuery.java 583534 2007-10-10 16:46:35Z mikemccand $
 	/// </version>
 	[TestFixture]
-    public class TestPhrasePrefixQuery
+	public class TestPhrasePrefixQuery : LuceneTestCase
 	{
-		
 		/// <summary> </summary>
 		[Test]
-        public virtual void  TestPhrasePrefix()
+		public virtual void  TestPhrasePrefix()
 		{
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
@@ -69,10 +68,10 @@
 			IndexSearcher searcher = new IndexSearcher(indexStore);
 			
 			//PhrasePrefixQuery query1 = new PhrasePrefixQuery();
-            MultiPhraseQuery query1 = new MultiPhraseQuery();
-            //PhrasePrefixQuery query2 = new PhrasePrefixQuery();
-            MultiPhraseQuery query2 = new MultiPhraseQuery();
-            query1.Add(new Term("body", "blueberry"));
+			MultiPhraseQuery query1 = new MultiPhraseQuery();
+			//PhrasePrefixQuery query2 = new PhrasePrefixQuery();
+			MultiPhraseQuery query2 = new MultiPhraseQuery();
+			query1.Add(new Term("body", "blueberry"));
 			query2.Add(new Term("body", "strawberry"));
 			
 			System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList();



Mime
View raw message