lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r677059 [9/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/ ...
Date Tue, 15 Jul 2008 21:44:10 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterDelete.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterDelete.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterDelete.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterDelete.cs Tue Jul 15 14:44:04 2008
@@ -19,25 +19,130 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Hits = Lucene.Net.Search.Hits;
-using IndexSearcher = Lucene.Net.Search.IndexSearcher;
-using TermQuery = Lucene.Net.Search.TermQuery;
 using Directory = Lucene.Net.Store.Directory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Hits = Lucene.Net.Search.Hits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
-    [TestFixture]
-    public class TestIndexWriterDelete
+	[TestFixture]
+	public class TestIndexWriterDelete : LuceneTestCase
 	{
 		
+		private class AnonymousClassFailure : MockRAMDirectory.Failure
+		{
+			public AnonymousClassFailure(TestIndexWriterDelete enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestIndexWriterDelete enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestIndexWriterDelete enclosingInstance;
+			public TestIndexWriterDelete Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal bool sawMaybe = false;
+			internal bool failed = false;
+			public override MockRAMDirectory.Failure Reset()
+			{
+				sawMaybe = false;
+				failed = false;
+				return this;
+			}
+			public override void  Eval(MockRAMDirectory dir)
+			{
+				if (sawMaybe && !failed)
+				{
+					bool seen = false;
+					// {{DOUG-2.3.1}} this code is suspect.  i have preserved the original (below) for 
+					// comparative purposes.
+					seen = new System.Exception().StackTrace.Contains("applyDeletes");
+					//StackTraceElement[] trace = new System.Exception().getStackTrace();
+					//for (int i = 0; i < trace.Length; i++)
+					//{
+					//    if ("applyDeletes".Equals(trace[i].getMethodName()))
+					//    {
+					//        seen = true;
+					//        break;
+					//    }
+					//}
+					if (!seen)
+					{
+						// Only fail once we are no longer in applyDeletes
+						failed = true;
+						throw new System.IO.IOException("fail after applyDeletes");
+					}
+				}
+				if (!failed)
+				{
+					// {{DOUG-2.3.1}} this code is suspect.  i have preserved the original (below) for 
+					// comparative purposes.
+					sawMaybe = new System.Exception().StackTrace.Contains("applyDeletes");
+					//StackTraceElement[] trace = new System.Exception().getStackTrace();
+					//for (int i = 0; i < trace.Length; i++)
+					//{
+					//    if ("applyDeletes".Equals(trace[i].getMethodName()))
+					//    {
+					//        sawMaybe = true;
+					//        break;
+					//    }
+					//}
+				}
+			}
+		}
+		
+		private class AnonymousClassFailure1 : MockRAMDirectory.Failure
+		{
+			public AnonymousClassFailure1(TestIndexWriterDelete enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestIndexWriterDelete enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestIndexWriterDelete enclosingInstance;
+			public TestIndexWriterDelete Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal bool failed = false;
+			public override MockRAMDirectory.Failure Reset()
+			{
+				failed = false;
+				return this;
+			}
+			public override void  Eval(MockRAMDirectory dir)
+			{
+				if (!failed)
+				{
+					failed = true;
+					throw new System.IO.IOException("fail in add doc");
+				}
+			}
+		}
+		
 		// test the simple case
-        [Test]
+		[Test]
 		public virtual void  TestSimpleCase()
 		{
 			System.String[] keywords = new System.String[]{"1", "2"};
@@ -45,180 +150,285 @@
 			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
 			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
 			
-			Directory dir = new RAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			modifier.SetUseCompoundFile(true);
-			modifier.SetMaxBufferedDeleteTerms(1);
-			
-			for (int i = 0; i < keywords.Length; i++)
-			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-				doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
-				doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-				doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.TOKENIZED));
-				doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.TOKENIZED));
-				modifier.AddDocument(doc);
-			}
-			modifier.Optimize();
-			
-			Term term = new Term("city", "Amsterdam");
-			int hitCount = GetHitCount(dir, term);
-			Assert.AreEqual(1, hitCount);
-			modifier.DeleteDocuments(term);
-			hitCount = GetHitCount(dir, term);
-			Assert.AreEqual(0, hitCount);
-			
-			modifier.Close();
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = (0 == pass);
+				
+				Directory dir = new RAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetUseCompoundFile(true);
+				modifier.SetMaxBufferedDeleteTerms(1);
+				
+				for (int i = 0; i < keywords.Length; i++)
+				{
+					Document doc = new Document();
+					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
+					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.TOKENIZED));
+					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.TOKENIZED));
+					modifier.AddDocument(doc);
+				}
+				modifier.Optimize();
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				Term term = new Term("city", "Amsterdam");
+				int hitCount = GetHitCount(dir, term);
+				Assert.AreEqual(1, hitCount);
+				if (!autoCommit)
+				{
+					modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
+					modifier.SetUseCompoundFile(true);
+				}
+				modifier.DeleteDocuments(term);
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				hitCount = GetHitCount(dir, term);
+				Assert.AreEqual(0, hitCount);
+				
+				if (autoCommit)
+				{
+					modifier.Close();
+				}
+				dir.Close();
+			}
 		}
 		
 		// test when delete terms only apply to disk segments
-        [Test]
+		[Test]
 		public virtual void  TestNonRAMDelete()
 		{
-			Directory dir = new RAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			modifier.SetMaxBufferedDocs(2);
-			modifier.SetMaxBufferedDeleteTerms(2);
-			
-			int id = 0;
-			int value_Renamed = 100;
-			
-			for (int i = 0; i < 7; i++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				AddDoc(modifier, ++id, value_Renamed);
+				bool autoCommit = (0 == pass);
+				
+				Directory dir = new RAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetMaxBufferedDocs(2);
+				modifier.SetMaxBufferedDeleteTerms(2);
+				
+				int id = 0;
+				int value_Renamed = 100;
+				
+				for (int i = 0; i < 7; i++)
+				{
+					AddDoc(modifier, ++id, value_Renamed);
+				}
+				modifier.Flush();
+				
+				Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
+				Assert.IsTrue(0 < modifier.GetSegmentCount());
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				IndexReader reader = IndexReader.Open(dir);
+				Assert.AreEqual(7, reader.NumDocs());
+				reader.Close();
+				
+				if (!autoCommit)
+				{
+					modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
+					modifier.SetMaxBufferedDocs(2);
+					modifier.SetMaxBufferedDeleteTerms(2);
+				}
+				
+				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				reader = IndexReader.Open(dir);
+				Assert.AreEqual(0, reader.NumDocs());
+				reader.Close();
+				if (autoCommit)
+				{
+					modifier.Close();
+				}
+				dir.Close();
 			}
-			modifier.Flush();
-			
-			Assert.AreEqual(0, modifier.GetRamSegmentCount());
-			Assert.IsTrue(0 < modifier.GetSegmentCount());
-			
-			IndexReader reader = IndexReader.Open(dir);
-			Assert.AreEqual(7, reader.NumDocs());
-			reader.Close();
-			
-			modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-			modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-			
-			reader = IndexReader.Open(dir);
-			Assert.AreEqual(0, reader.NumDocs());
-			reader.Close();
-			
-			modifier.Close();
 		}
 		
 		// test when delete terms only apply to ram segments
-        [Test]
+		[Test]
 		public virtual void  TestRAMDeletes()
 		{
-			Directory dir = new RAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			modifier.SetMaxBufferedDocs(4);
-			modifier.SetMaxBufferedDeleteTerms(4);
-			
-			int id = 0;
-			int value_Renamed = 100;
-			
-			AddDoc(modifier, ++id, value_Renamed);
-			modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-			AddDoc(modifier, ++id, value_Renamed);
-			modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-			
-			Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
-			Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
-			
-			AddDoc(modifier, ++id, value_Renamed);
-			Assert.AreEqual(0, modifier.GetSegmentCount());
-			modifier.Flush();
-			
-			IndexReader reader = IndexReader.Open(dir);
-			Assert.AreEqual(1, reader.NumDocs());
-			
-			int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
-			Assert.AreEqual(1, hitCount);
-			reader.Close();
-			
-			modifier.Close();
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = (0 == pass);
+				Directory dir = new RAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetMaxBufferedDocs(4);
+				modifier.SetMaxBufferedDeleteTerms(4);
+				
+				int id = 0;
+				int value_Renamed = 100;
+				
+				AddDoc(modifier, ++id, value_Renamed);
+				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+				AddDoc(modifier, ++id, value_Renamed);
+				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+				
+				Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
+				Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
+				
+				AddDoc(modifier, ++id, value_Renamed);
+				Assert.AreEqual(0, modifier.GetSegmentCount());
+				modifier.Flush();
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				IndexReader reader = IndexReader.Open(dir);
+				Assert.AreEqual(1, reader.NumDocs());
+				
+				int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
+				Assert.AreEqual(1, hitCount);
+				reader.Close();
+				if (autoCommit)
+				{
+					modifier.Close();
+				}
+				dir.Close();
+			}
 		}
 		
 		// test when delete terms apply to both disk and ram segments
-        [Test]
+		[Test]
 		public virtual void  TestBothDeletes()
 		{
-			Directory dir = new RAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			modifier.SetMaxBufferedDocs(100);
-			modifier.SetMaxBufferedDeleteTerms(100);
-			
-			int id = 0;
-			int value_Renamed = 100;
-			
-			for (int i = 0; i < 5; i++)
-			{
-				AddDoc(modifier, ++id, value_Renamed);
-			}
-			
-			value_Renamed = 200;
-			for (int i = 0; i < 5; i++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				AddDoc(modifier, ++id, value_Renamed);
-			}
-			modifier.Flush();
-			
-			for (int i = 0; i < 5; i++)
-			{
-				AddDoc(modifier, ++id, value_Renamed);
+				bool autoCommit = (0 == pass);
+				
+				Directory dir = new RAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetMaxBufferedDocs(100);
+				modifier.SetMaxBufferedDeleteTerms(100);
+				
+				int id = 0;
+				int value_Renamed = 100;
+				
+				for (int i = 0; i < 5; i++)
+				{
+					AddDoc(modifier, ++id, value_Renamed);
+				}
+				
+				value_Renamed = 200;
+				for (int i = 0; i < 5; i++)
+				{
+					AddDoc(modifier, ++id, value_Renamed);
+				}
+				modifier.Flush();
+				
+				for (int i = 0; i < 5; i++)
+				{
+					AddDoc(modifier, ++id, value_Renamed);
+				}
+				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+				
+				modifier.Flush();
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				IndexReader reader = IndexReader.Open(dir);
+				Assert.AreEqual(5, reader.NumDocs());
+				if (autoCommit)
+				{
+					modifier.Close();
+				}
 			}
-			modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-			modifier.Flush();
-			
-			IndexReader reader = IndexReader.Open(dir);
-			Assert.AreEqual(5, reader.NumDocs());
-			
-			modifier.Close();
 		}
 		
 		// test that batched delete terms are flushed together
-        [Test]
+		[Test]
 		public virtual void  TestBatchDeletes()
 		{
-			Directory dir = new RAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			modifier.SetMaxBufferedDocs(2);
-			modifier.SetMaxBufferedDeleteTerms(2);
-			
-			int id = 0;
-			int value_Renamed = 100;
-			
-			for (int i = 0; i < 7; i++)
-			{
-				AddDoc(modifier, ++id, value_Renamed);
-			}
-			modifier.Flush();
-			
-			IndexReader reader = IndexReader.Open(dir);
-			Assert.AreEqual(7, reader.NumDocs());
-			reader.Close();
-			
-			id = 0;
-			modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-			modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-			
-			reader = IndexReader.Open(dir);
-			Assert.AreEqual(5, reader.NumDocs());
-			reader.Close();
-			
-			Term[] terms = new Term[3];
-			for (int i = 0; i < terms.Length; i++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				terms[i] = new Term("id", System.Convert.ToString(++id));
+				bool autoCommit = (0 == pass);
+				Directory dir = new RAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetMaxBufferedDocs(2);
+				modifier.SetMaxBufferedDeleteTerms(2);
+				
+				int id = 0;
+				int value_Renamed = 100;
+				
+				for (int i = 0; i < 7; i++)
+				{
+					AddDoc(modifier, ++id, value_Renamed);
+				}
+				modifier.Flush();
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				IndexReader reader = IndexReader.Open(dir);
+				Assert.AreEqual(7, reader.NumDocs());
+				reader.Close();
+				
+				if (!autoCommit)
+				{
+					modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
+					modifier.SetMaxBufferedDocs(2);
+					modifier.SetMaxBufferedDeleteTerms(2);
+				}
+				
+				id = 0;
+				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
+				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				reader = IndexReader.Open(dir);
+				Assert.AreEqual(5, reader.NumDocs());
+				reader.Close();
+				
+				Term[] terms = new Term[3];
+				for (int i = 0; i < terms.Length; i++)
+				{
+					terms[i] = new Term("id", System.Convert.ToString(++id));
+				}
+				if (!autoCommit)
+				{
+					modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
+					modifier.SetMaxBufferedDocs(2);
+					modifier.SetMaxBufferedDeleteTerms(2);
+				}
+				modifier.DeleteDocuments(terms);
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				reader = IndexReader.Open(dir);
+				Assert.AreEqual(2, reader.NumDocs());
+				reader.Close();
+				
+				if (autoCommit)
+				{
+					modifier.Close();
+				}
+				dir.Close();
 			}
-			modifier.DeleteDocuments(terms);
-			
-			reader = IndexReader.Open(dir);
-			Assert.AreEqual(2, reader.NumDocs());
-			reader.Close();
-			
-			modifier.Close();
 		}
 		
 		private void  AddDoc(IndexWriter modifier, int id, int value_Renamed)
@@ -238,13 +448,13 @@
 			return hitCount;
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestDeletesOnDiskFull()
 		{
 			TestOperationsOnDiskFull(false);
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestUpdatesOnDiskFull()
 		{
 			TestOperationsOnDiskFull(true);
@@ -253,6 +463,7 @@
 		/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
 		/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
 		/// </summary>
+		[Test]
 		private void  TestOperationsOnDiskFull(bool updates)
 		{
 			
@@ -261,221 +472,420 @@
 			int START_COUNT = 157;
 			int END_COUNT = 144;
 			
-			// First build up a starting index:
-			RAMDirectory startDir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);
-			for (int i = 0; i < 157; i++)
-			{
-				Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
-				d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
-				d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.TOKENIZED));
-				writer.AddDocument(d);
-			}
-			writer.Close();
-			
-			long diskUsage = startDir.SizeInBytes();
-			long diskFree = diskUsage + 10;
-			
-			System.IO.IOException err = null;
-			
-			bool done = false;
-			
-			// Iterate w/ ever increasing free disk space:
-			while (!done)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				MockRAMDirectory dir = new MockRAMDirectory(startDir);
-				IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+				bool autoCommit = (0 == pass);
 				
-				modifier.SetMaxBufferedDocs(1000); // use flush or Close
-				modifier.SetMaxBufferedDeleteTerms(1000); // use flush or Close
+				// First build up a starting index:
+				RAMDirectory startDir = new RAMDirectory();
+				IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
+				for (int i = 0; i < 157; i++)
+				{
+					Document d = new Document();
+					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+					d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.TOKENIZED));
+					writer.AddDocument(d);
+				}
+				writer.Close();
 				
-				// For each disk size, first try to commit against
-				// dir that will hit random IOExceptions & disk
-				// full; after, give it infinite disk space & turn
-				// off random IOExceptions & retry w/ same reader:
-				bool success = false;
+				long diskUsage = startDir.SizeInBytes();
+				long diskFree = diskUsage + 10;
 				
-				for (int x = 0; x < 2; x++)
+				System.IO.IOException err = null;
+				
+				bool done = false;
+				
+				// Iterate w/ ever increasing free disk space:
+				while (!done)
 				{
+					MockRAMDirectory dir = new MockRAMDirectory(startDir);
+					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
 					
-					double rate = 0.1;
-					double diskRatio = ((double) diskFree) / diskUsage;
-					long thisDiskFree;
-					System.String testName;
+					modifier.SetMaxBufferedDocs(1000); // use flush or close
+					modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
 					
-					if (0 == x)
-					{
-						thisDiskFree = diskFree;
-						if (diskRatio >= 2.0)
-						{
-							rate /= 2;
-						}
-						if (diskRatio >= 4.0)
+					// For each disk size, first try to commit against
+					// dir that will hit random IOExceptions & disk
+					// full; after, give it infinite disk space & turn
+					// off random IOExceptions & retry w/ same reader:
+					bool success = false;
+					
+					for (int x = 0; x < 2; x++)
+					{
+						
+						double rate = 0.1;
+						//UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1042'"
+						double diskRatio = ((double) diskFree) / diskUsage;
+						long thisDiskFree;
+						System.String testName;
+						
+						if (0 == x)
 						{
-							rate /= 2;
+							thisDiskFree = diskFree;
+							if (diskRatio >= 2.0)
+							{
+								rate /= 2;
+							}
+							if (diskRatio >= 4.0)
+							{
+								rate /= 2;
+							}
+							if (diskRatio >= 6.0)
+							{
+								rate = 0.0;
+							}
+							if (debug)
+							{
+								System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
+							}
+							testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
 						}
-						if (diskRatio >= 6.0)
+						else
 						{
+							thisDiskFree = 0;
 							rate = 0.0;
+							if (debug)
+							{
+								System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
+							}
+							testName = "reader re-use after disk full";
 						}
-						if (debug)
+						
+						dir.SetMaxSizeInBytes(thisDiskFree);
+						dir.SetRandomIOExceptionRate(rate, diskFree);
+						
+						try
 						{
-							System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
+							if (0 == x)
+							{
+								int docId = 12;
+								for (int i = 0; i < 13; i++)
+								{
+									if (updates)
+									{
+										Document d = new Document();
+										d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+										d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.TOKENIZED));
+										modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
+									}
+									else
+									{
+										// deletes
+										modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
+										// modifier.setNorm(docId, "contents", (float)2.0);
+									}
+									docId += 12;
+								}
+							}
+							modifier.Close();
+							success = true;
+							if (0 == x)
+							{
+								done = true;
+							}
 						}
-						testName = "disk full during reader.Close() @ " + thisDiskFree + " bytes";
-					}
-					else
-					{
-						thisDiskFree = 0;
-						rate = 0.0;
-						if (debug)
+						catch (System.IO.IOException e)
 						{
-							System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
+							if (debug)
+							{
+								//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
+								System.Console.Out.WriteLine("  hit IOException: " + e);
+								System.Console.Out.WriteLine(e.StackTrace);
+							}
+							err = e;
+							if (1 == x)
+							{
+								System.Console.Error.WriteLine(e.StackTrace);
+								Assert.Fail(testName + " hit IOException after disk space was freed up");
+							}
 						}
-						testName = "reader re-use after disk full";
-					}
-					
-					dir.SetMaxSizeInBytes(thisDiskFree);
-					dir.SetRandomIOExceptionRate(rate, diskFree);
-					
-					try
-					{
-						if (0 == x)
+						
+						// Whether we succeeded or failed, check that all
+						// un-referenced files were in fact deleted (ie,
+						// we did not create garbage). Just create a
+						// new IndexFileDeleter, have it delete
+						// unreferenced files, then verify that in fact
+						// no files were deleted:
+						System.String[] startFiles = dir.List();
+						SegmentInfos infos = new SegmentInfos();
+						infos.Read(dir);
+						new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
+						System.String[] endFiles = dir.List();
+						
+						//UPGRADE_TODO: Method 'java.util.Arrays.sort' was converted to 'System.Array.Sort' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilArrayssort_javalangObject[]'"
+						System.Array.Sort(startFiles);
+						//UPGRADE_TODO: Method 'java.util.Arrays.sort' was converted to 'System.Array.Sort' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilArrayssort_javalangObject[]'"
+						System.Array.Sort(endFiles);
+						
+						// for(int i=0;i<startFiles.length;i++) {
+						// System.out.println(" startFiles: " + i + ": " + startFiles[i]);
+						// }
+						
+						if (!startFiles.Equals(endFiles))
 						{
-							int docId = 12;
-							for (int i = 0; i < 13; i++)
+							System.String successStr;
+							if (success)
 							{
-								if (updates)
-								{
-									Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
-									d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
-									d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.TOKENIZED));
-									modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
-								}
-								else
-								{
-									// deletes
-									modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
-									// modifier.setNorm(docId, "contents", (float)2.0);
-								}
-								docId += 12;
+								successStr = "success";
+							}
+							else
+							{
+								successStr = "IOException";
+								System.Console.Error.WriteLine(err.StackTrace);
 							}
+							Assert.Fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
 						}
-						modifier.Close();
-						success = true;
-						if (0 == x)
+						
+						// Finally, verify index is not corrupt, and, if
+						// we succeeded, we see all docs changed, and if
+						// we failed, we see either all docs or no docs
+						// changed (transactional semantics):
+						IndexReader newReader = null;
+						try
 						{
-							done = true;
+							newReader = IndexReader.Open(dir);
 						}
-					}
-					catch (System.IO.IOException e)
-					{
-						if (debug)
+						catch (System.IO.IOException e)
 						{
-							System.Console.Out.WriteLine("  hit IOException: " + e);
+							System.Console.Error.WriteLine(e.StackTrace);
+							//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
+							Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
 						}
-						err = e;
-						if (1 == x)
+						
+						IndexSearcher searcher = new IndexSearcher(newReader);
+						Hits hits = null;
+						try
+						{
+							hits = searcher.Search(new TermQuery(searchTerm));
+						}
+						catch (System.IO.IOException e)
 						{
 							System.Console.Error.WriteLine(e.StackTrace);
-							Assert.Fail(testName + " hit IOException after disk space was freed up");
+							//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
+							Assert.Fail(testName + ": exception when searching: " + e);
 						}
-					}
-					
-					// Whether we succeeded or failed, check that all
-					// un-referenced files were in fact deleted (ie,
-					// we did not create garbage). Just create a
-					// new IndexFileDeleter, have it delete
-					// unreferenced files, then verify that in fact
-					// no files were deleted:
-					System.String[] startFiles = dir.List();
-					SegmentInfos infos = new SegmentInfos();
-					infos.Read(dir);
-					IndexFileDeleter d2 = new IndexFileDeleter(infos, dir);
-					d2.FindDeletableFiles();
-					d2.DeleteFiles();
-					System.String[] endFiles = dir.List();
-					
-					System.Array.Sort(startFiles);
-					System.Array.Sort(endFiles);
-					
-					// for(int i=0;i<startFiles.length;i++) {
-					// System.out.println(" startFiles: " + i + ": " + startFiles[i]);
-					// }
-					
-					if (SupportClass.Compare.CompareStringArrays(startFiles, endFiles) == false)
-					{
-						System.String successStr;
+						int result2 = hits.Length();
 						if (success)
 						{
-							successStr = "success";
+							if (x == 0 && result2 != END_COUNT)
+							{
+								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
+							}
+							else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
+							{
+								// It's possible that the first exception was
+								// "recoverable" wrt pending deletes, in which
+								// case the pending deletes are retained and
+								// then re-flushing (with plenty of disk
+								// space) will succeed in flushing the
+								// deletes:
+								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+							}
 						}
 						else
 						{
-							successStr = "IOException";
-							System.Console.Error.WriteLine(err.StackTrace);
+							// On hitting exception we still may have added
+							// all docs:
+							if (result2 != START_COUNT && result2 != END_COUNT)
+							{
+								System.Console.Error.WriteLine(err.StackTrace);
+								Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+							}
+						}
+						
+						searcher.Close();
+						newReader.Close();
+						
+						if (result2 == END_COUNT)
+						{
+							break;
 						}
-						Assert.Fail("reader.Close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
 					}
 					
-					// Finally, verify index is not corrupt, and, if
-					// we succeeded, we see all docs changed, and if
-					// we failed, we see either all docs or no docs
-					// changed (transactional semantics):
-					IndexReader newReader = null;
-					try
-					{
-						newReader = IndexReader.Open(dir);
-					}
-					catch (System.IO.IOException e)
-					{
-						System.Console.Error.WriteLine(e.StackTrace);
-						Assert.Fail(testName + ":exception when creating IndexReader after disk full during Close: " + e);
-					}
+					dir.Close();
 					
-					IndexSearcher searcher = new IndexSearcher(newReader);
-					Hits hits = null;
+					// Try again with 10 more bytes of free space:
+					diskFree += 10;
+				}
+			}
+		}
+		
+		// This test tests that buffered deletes are cleared when
+		// an Exception is hit during flush.
+		[Test]
+		public virtual void  TestErrorAfterApplyDeletes()
+		{
+			
+			MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
+			
+			// create a couple of files
+			
+			System.String[] keywords = new System.String[]{"1", "2"};
+			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
+			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
+			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
+			
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = (0 == pass);
+				MockRAMDirectory dir = new MockRAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetUseCompoundFile(true);
+				modifier.SetMaxBufferedDeleteTerms(2);
+				
+				dir.FailOn(failure.Reset());
+				
+				for (int i = 0; i < keywords.Length; i++)
+				{
+					Document doc = new Document();
+					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
+					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.TOKENIZED));
+					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.TOKENIZED));
+					modifier.AddDocument(doc);
+				}
+				// flush (and commit if ac)
+				
+				modifier.Optimize();
+				
+				// commit if !ac
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				// one of the two files hits
+				
+				Term term = new Term("city", "Amsterdam");
+				int hitCount = GetHitCount(dir, term);
+				Assert.AreEqual(1, hitCount);
+				
+				// open the writer again (closed above)
+				
+				if (!autoCommit)
+				{
+					modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
+					modifier.SetUseCompoundFile(true);
+				}
+				
+				// delete the doc
+				// max buf del terms is two, so this is buffered
+				
+				modifier.DeleteDocuments(term);
+				
+				// add a doc (needed for the !ac case; see below)
+				// doc remains buffered
+				
+				Document doc2 = new Document();
+				modifier.AddDocument(doc2);
+				
+				// flush the changes, the buffered deletes, and the new doc
+				
+				// The failure object will fail on the first write after the del
+				// file gets created when processing the buffered delete
+				
+				// in the ac case, this will be when writing the new segments
+				// files so we really don't need the new doc, but it's harmless
+				
+				// in the !ac case, a new segments file won't be created but in
+				// this case, creation of the cfs file happens next so we need
+				// the doc (to test that it's okay that we don't lose deletes if
+				// failing while creating the cfs file
+				
+				bool failed = false;
+				try
+				{
+					modifier.Flush();
+				}
+				catch (System.IO.IOException)
+				{
+					failed = true;
+				}
+				
+				Assert.IsTrue(failed);
+				
+				// The flush above failed, so we need to retry it (which will
+				// succeed, because the failure is a one-shot)
+				
+				if (!autoCommit)
+				{
+					modifier.Close();
+				}
+				else
+				{
+					modifier.Flush();
+				}
+				
+				hitCount = GetHitCount(dir, term);
+				
+				// If the delete was not cleared then hit count will
+				// be 0.  With autoCommit=false, we hit the exception
+				// on creating the compound file, so the delete was
+				// flushed successfully.
+				Assert.AreEqual(autoCommit?1:0, hitCount);
+				
+				if (autoCommit)
+				{
+					modifier.Close();
+				}
+				
+				dir.Close();
+			}
+		}
+		
+		// This test tests that the files created by the docs writer before
+		// a segment is written are cleaned up if there's an i/o error
+		
+		[Test]
+		public virtual void  TestErrorInDocsWriterAdd()
+		{
+			
+			MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
+			
+			// create a couple of files
+			
+			System.String[] keywords = new System.String[]{"1", "2"};
+			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
+			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
+			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
+			
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = (0 == pass);
+				MockRAMDirectory dir = new MockRAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				
+				dir.FailOn(failure.Reset());
+				
+				for (int i = 0; i < keywords.Length; i++)
+				{
+					Document doc = new Document();
+					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
+					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.TOKENIZED));
+					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.TOKENIZED));
 					try
 					{
-						hits = searcher.Search(new TermQuery(searchTerm));
-					}
-					catch (System.IO.IOException e)
-					{
-						System.Console.Error.WriteLine(e.StackTrace);
-						Assert.Fail(testName + ": exception when searching: " + e);
-					}
-					int result2 = hits.Length();
-					if (success)
-					{
-						if (result2 != END_COUNT)
-						{
-							Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
-						}
+						modifier.AddDocument(doc);
 					}
-					else
-					{
-						// On hitting exception we still may have added
-						// all docs:
-						if (result2 != START_COUNT && result2 != END_COUNT)
-						{
-							System.Console.Error.WriteLine(err.StackTrace);
-							Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
-						}
-					}
-					
-					searcher.Close();
-					newReader.Close();
-					
-					if (result2 == END_COUNT)
+					catch (System.IO.IOException)
 					{
 						break;
 					}
 				}
 				
-				dir.Close();
+				System.String[] startFiles = dir.List();
+				SegmentInfos infos = new SegmentInfos();
+				infos.Read(dir);
+				new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
+				System.String[] endFiles = dir.List();
+				
+				if (!startFiles.Equals(endFiles))
+				{
+					Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
+				}
 				
-				// Try again with 10 more bytes of free space:
-				diskFree += 10;
+				modifier.Close();
 			}
 		}
 		

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterLockRelease.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterLockRelease.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterLockRelease.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterLockRelease.cs Tue Jul 15 14:44:04 2008
@@ -19,9 +19,10 @@
 
 using NUnit.Framework;
 
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
 namespace Lucene.Net.Index
 {
-	
 	/// <summary> This tests the patch for issue #LUCENE-715 (IndexWriter does not
 	/// release its write lock when trying to open an index which does not yet
 	/// exist).
@@ -32,14 +33,15 @@
 	/// <version>  $Id$
 	/// </version>
 	
-    [TestFixture]
-    public class TestIndexWriterLockRelease
+	[TestFixture]
+	public class TestIndexWriterLockRelease : LuceneTestCase
 	{
 		private System.IO.FileInfo __test_dir;
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
+			base.SetUp();
 			if (this.__test_dir == null)
 			{
 				System.String tmp_dir = SupportClass.AppSettings.Get("java.io.tmpdir", "tmp");
@@ -56,25 +58,26 @@
 				}
 				
 				bool mustThrow = false;
-                try
-                {
-                    System.IO.Directory.CreateDirectory(this.__test_dir.FullName);
-                    if (!System.IO.Directory.Exists(this.__test_dir.FullName))
-                        mustThrow = true;
-                }
-                catch
-                {
-                    mustThrow = true;
-                }
+				try
+				{
+					System.IO.Directory.CreateDirectory(this.__test_dir.FullName);
+					if (!System.IO.Directory.Exists(this.__test_dir.FullName))
+						mustThrow = true;
+				}
+				catch
+				{
+					mustThrow = true;
+				}
 
-                if (mustThrow)
+				if (mustThrow)
 					throw new System.IO.IOException("unable to create test directory \"" + this.__test_dir.FullName + "\"");
 			}
 		}
 		
-        [TearDown]
-        public virtual void  TearDown()
+		[TearDown]
+		public override void TearDown()
 		{
+			base.TearDown();
 			if (this.__test_dir != null)
 			{
 				System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(this.__test_dir);
@@ -120,22 +123,22 @@
 			}
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  _TestIndexWriterLockRelease()
 		{
-			IndexModifier im;
+			IndexWriter im;
 			
 			try
 			{
-				im = new IndexModifier(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false);
+				im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false);
 			}
-			catch (System.IO.FileNotFoundException e)
+			catch (System.IO.FileNotFoundException)
 			{
 				try
 				{
-					im = new IndexModifier(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false);
+					im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false);
 				}
-				catch (System.IO.FileNotFoundException e1)
+				catch (System.IO.FileNotFoundException)
 				{
 				}
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMergePolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterMergePolicy.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMergePolicy.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMergePolicy.cs Tue Jul 15 14:44:04 2008
@@ -19,21 +19,23 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
-    [TestFixture]
-    public class TestIndexWriterMergePolicy
+	[TestFixture]
+	public class TestIndexWriterMergePolicy : LuceneTestCase
 	{
 		
 		// Test the normal case
-        [Test]
+		[Test]
 		public virtual void  TestNormalCase()
 		{
 			Directory dir = new RAMDirectory();
@@ -41,6 +43,7 @@
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
+			writer.SetMergePolicy(new LogDocMergePolicy());
 			
 			for (int i = 0; i < 100; i++)
 			{
@@ -52,7 +55,7 @@
 		}
 		
 		// Test to see if there is over merge
-        [Test]
+		[Test]
 		public virtual void  TestNoOverMerge()
 		{
 			Directory dir = new RAMDirectory();
@@ -60,13 +63,14 @@
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
+			writer.SetMergePolicy(new LogDocMergePolicy());
 			
 			bool noOverMerge = false;
 			for (int i = 0; i < 100; i++)
 			{
 				AddDoc(writer);
 				CheckInvariants(writer);
-				if (writer.GetRamSegmentCount() + writer.GetSegmentCount() >= 18)
+				if (writer.GetNumBufferedDocuments() + writer.GetSegmentCount() >= 18)
 				{
 					noOverMerge = true;
 				}
@@ -77,7 +81,7 @@
 		}
 		
 		// Test the case where flush is forced after every AddDoc
-        [Test]
+		[Test]
 		public virtual void  TestForceFlush()
 		{
 			Directory dir = new RAMDirectory();
@@ -85,6 +89,9 @@
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
+			LogDocMergePolicy mp = new LogDocMergePolicy();
+			mp.SetMinMergeDocs(100);
+			writer.SetMergePolicy(mp);
 			
 			for (int i = 0; i < 100; i++)
 			{
@@ -94,6 +101,8 @@
 				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
 				writer.SetMaxBufferedDocs(10);
 				writer.SetMergeFactor(10);
+				mp.SetMinMergeDocs(100);
+				writer.SetMergeFactor(10);
 				CheckInvariants(writer);
 			}
 			
@@ -101,7 +110,7 @@
 		}
 		
 		// Test the case where mergeFactor changes
-        [Test]
+		[Test]
 		public virtual void  TestMergeFactorChange()
 		{
 			Directory dir = new RAMDirectory();
@@ -109,6 +118,7 @@
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(100);
+			writer.SetMergePolicy(new LogDocMergePolicy());
 			
 			for (int i = 0; i < 250; i++)
 			{
@@ -130,7 +140,7 @@
 		}
 		
 		// Test the case where both mergeFactor and maxBufferedDocs change
-        [Test]
+		[Test]
 		public virtual void  TestMaxBufferedDocsChange()
 		{
 			Directory dir = new RAMDirectory();
@@ -138,6 +148,7 @@
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			writer.SetMaxBufferedDocs(101);
 			writer.SetMergeFactor(101);
+			writer.SetMergePolicy(new LogDocMergePolicy());
 			
 			// leftmost* segment has 1 doc
 			// rightmost* segment has 100 docs
@@ -153,6 +164,7 @@
 				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
 				writer.SetMaxBufferedDocs(101);
 				writer.SetMergeFactor(101);
+				writer.SetMergePolicy(new LogDocMergePolicy());
 			}
 			
 			writer.SetMaxBufferedDocs(10);
@@ -176,12 +188,13 @@
 		}
 		
 		// Test the case where a merge results in no doc at all
-        [Test]
+		[Test]
 		public virtual void  TestMergeDocCount0()
 		{
 			Directory dir = new RAMDirectory();
 			
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			writer.SetMergePolicy(new LogDocMergePolicy());
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(100);
 			
@@ -197,6 +210,7 @@
 			reader.Close();
 			
 			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer.SetMergePolicy(new LogDocMergePolicy());
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(5);
 			
@@ -220,11 +234,12 @@
 		
 		private void  CheckInvariants(IndexWriter writer)
 		{
+			_TestUtil.SyncConcurrentMerges(writer);
 			int maxBufferedDocs = writer.GetMaxBufferedDocs();
 			int mergeFactor = writer.GetMergeFactor();
 			int maxMergeDocs = writer.GetMaxMergeDocs();
 			
-			int ramSegmentCount = writer.GetRamSegmentCount();
+			int ramSegmentCount = writer.GetNumBufferedDocuments();
 			Assert.IsTrue(ramSegmentCount < maxBufferedDocs);
 			
 			int lowerBound = - 1;

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMerging.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterMerging.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMerging.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMerging.cs Tue Jul 15 14:44:04 2008
@@ -19,95 +19,97 @@
 
 using NUnit.Framework;
 
-using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
 	
-    [TestFixture]
-    public class TestIndexWriterMerging
-    {
+	[TestFixture]
+	public class TestIndexWriterMerging : LuceneTestCase
+	{
 		
-        /// <summary> Tests that index merging (specifically addIndexes()) doesn't
-        /// change the index order of documents.
-        /// </summary>
-        [Test]
-        public virtual void  TestLucene()
-        {
-			
-            int num = 100;
-			
-            Directory indexA = new RAMDirectory();
-            Directory indexB = new RAMDirectory();
-			
-            FillIndex(indexA, 0, num);
-            bool fail = VerifyIndex(indexA, 0);
-            if (fail)
-            {
-                Assert.Fail("Index a is invalid");
-            }
-			
-            FillIndex(indexB, num, num);
-            fail = VerifyIndex(indexB, num);
-            if (fail)
-            {
-                Assert.Fail("Index b is invalid");
-            }
-			
-            Directory merged = new RAMDirectory();
-			
-            IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true);
-            writer.SetMergeFactor(2);
+		/// <summary> Tests that index merging (specifically addIndexes()) doesn't
+		/// change the index order of documents.
+		/// </summary>
+		[Test]
+		public virtual void  TestLucene()
+		{
+			
+			int num = 100;
+
+			Directory indexA = new MockRAMDirectory();
+			Directory indexB = new MockRAMDirectory();
+			
+			FillIndex(indexA, 0, num);
+			bool fail = VerifyIndex(indexA, 0);
+			if (fail)
+			{
+				Assert.Fail("Index a is invalid");
+			}
+			
+			FillIndex(indexB, num, num);
+			fail = VerifyIndex(indexB, num);
+			if (fail)
+			{
+				Assert.Fail("Index b is invalid");
+			}
+
+			Directory merged = new MockRAMDirectory();
+			
+			IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true);
+			writer.SetMergeFactor(2);
 			
-            writer.AddIndexes(new Directory[]{indexA, indexB});
-            writer.Close();
+			writer.AddIndexes(new Directory[]{indexA, indexB});
+			writer.Close();
 			
-            fail = VerifyIndex(merged, 0);
-            merged.Close();
+			fail = VerifyIndex(merged, 0);
+			merged.Close();
 			
-            Assert.IsFalse(fail, "The merged index is invalid");
-        }
+			Assert.IsFalse(fail, "The merged index is invalid");
+		}
 		
-        private bool VerifyIndex(Directory directory, int startAt)
-        {
-            bool fail = false;
-            IndexReader reader = IndexReader.Open(directory);
-			
-            int max = reader.MaxDoc();
-            for (int i = 0; i < max; i++)
-            {
-                Lucene.Net.Documents.Document temp = reader.Document(i);
-                //System.out.println("doc "+i+"="+temp.getField("count").stringValue());
-                //compare the index doc number to the value that it should be
-                if (!temp.GetField("count").StringValue().Equals((i + startAt) + ""))
-                {
-                    fail = true;
-                    System.Console.Out.WriteLine("Document " + (i + startAt) + " is returning document " + temp.GetField("count").StringValue());
-                }
-            }
-            return fail;
-        }
+		private bool VerifyIndex(Directory directory, int startAt)
+		{
+			bool fail = false;
+			IndexReader reader = IndexReader.Open(directory);
+			
+			int max = reader.MaxDoc();
+			for (int i = 0; i < max; i++)
+			{
+				Lucene.Net.Documents.Document temp = reader.Document(i);
+				//System.out.println("doc "+i+"="+temp.getField("count").stringValue());
+				//compare the index doc number to the value that it should be
+				if (!temp.GetField("count").StringValue().Equals((i + startAt) + ""))
+				{
+					fail = true;
+					System.Console.Out.WriteLine("Document " + (i + startAt) + " is returning document " + temp.GetField("count").StringValue());
+				}
+			}
+			reader.Close();
+			return fail;
+		}
 		
-        private void  FillIndex(Directory dir, int start, int numDocs)
-        {
+		private void  FillIndex(Directory dir, int start, int numDocs)
+		{
 			
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
-            writer.SetMergeFactor(2);
-            writer.SetMaxBufferedDocs(2);
-			
-            for (int i = start; i < (start + numDocs); i++)
-            {
-                Lucene.Net.Documents.Document temp = new Lucene.Net.Documents.Document();
-                temp.Add(new Field("count", ("" + i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
+			writer.SetMergeFactor(2);
+			writer.SetMaxBufferedDocs(2);
+			
+			for (int i = start; i < (start + numDocs); i++)
+			{
+				Lucene.Net.Documents.Document temp = new Lucene.Net.Documents.Document();
+				temp.Add(new Field("count", ("" + i), Field.Store.YES, Field.Index.UN_TOKENIZED));
 				
-                writer.AddDocument(temp);
-            }
-            writer.Close();
-        }
-    }
+				writer.AddDocument(temp);
+			}
+			writer.Close();
+		}
+	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestInputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestInputStream.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestInputStream.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestInputStream.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// {{Aroush-2.3.1}} remove this file from SVN
+/*
+using System;
+using NUnit.Framework;
+using InputStream = Lucene.Net.Store.InputStream;
+namespace Lucene.Net.Index
+{
+	[TestFixture]
+	public class TestInputStream
+	{
+        [Test]
+		public virtual void  TestRead()
+		{
+			InputStream is_Renamed = new MockInputStream(new byte[] 
+                {
+                    (byte) 0x80, (byte) 0x01, (byte) 0xFF, (byte) 0x7F, 
+                    (byte) 0x80, (byte) 0x80, (byte) 0x01, (byte) 0x81, 
+                    (byte) 0x80, (byte) 0x01, (byte) 0x06, (byte) 'L', 
+                    (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', 
+                    (byte) 'e'}
+                );
+			Assert.AreEqual(128, is_Renamed.ReadVInt());
+			Assert.AreEqual(16383, is_Renamed.ReadVInt());
+			Assert.AreEqual(16384, is_Renamed.ReadVInt());
+			Assert.AreEqual(16385, is_Renamed.ReadVInt());
+			Assert.AreEqual("Lucene", is_Renamed.ReadString());
+		}
+	}
+}
+*/
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyBug.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestLazyBug.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyBug.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyBug.cs Tue Jul 15 14:44:04 2008
@@ -19,11 +19,12 @@
 
 using NUnit.Framework;
 
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Index
 {
@@ -33,8 +34,9 @@
 	/// if other docs have allready been accessed.
 	/// </summary>
 	[TestFixture]
-    public class TestLazyBug
+	public class TestLazyBug : LuceneTestCase
 	{
+		[Serializable]
 		public class AnonymousClassFieldSelector : FieldSelector
 		{
 			public virtual FieldSelectorResult Accept(System.String f)
@@ -91,13 +93,13 @@
 		
 		public static void  DoTest(int[] docs)
 		{
-            if (dataset.Count == 0)
-            {
-                for (int i = 0; i < data.Length; i++)
-                {
-                    dataset.Add(data[i], data[i]);
-                }
-            }
+			if (dataset.Count == 0)
+			{
+				for (int i = 0; i < data.Length; i++)
+				{
+					dataset.Add(data[i], data[i]);
+				}
+			}
 
 			Directory dir = MakeIndex();
 			IndexReader reader = IndexReader.Open(dir);
@@ -131,19 +133,19 @@
 			reader.Close();
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestLazyWorks()
 		{
 			DoTest(new int[]{399});
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestLazyAlsoWorks()
 		{
 			DoTest(new int[]{399, 150});
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestLazyBroken()
 		{
 			DoTest(new int[]{150, 399});

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyProxSkipping.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestLazyProxSkipping.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyProxSkipping.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyProxSkipping.cs Tue Jul 15 14:44:04 2008
@@ -19,16 +19,17 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using IndexInput = Lucene.Net.Store.IndexInput;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using PhraseQuery = Lucene.Net.Search.PhraseQuery;
 using Searcher = Lucene.Net.Search.Searcher;
-using Directory = Lucene.Net.Store.Directory;
-using IndexInput = Lucene.Net.Store.IndexInput;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
@@ -37,7 +38,7 @@
 	/// 
 	/// </summary>
 	[TestFixture]
-    public class TestLazyProxSkipping
+	public class TestLazyProxSkipping : LuceneTestCase
 	{
 		private Searcher searcher;
 		private int seeksCounter = 0;
@@ -53,7 +54,7 @@
 			
 			Directory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
-			
+			writer.SetMaxBufferedDocs(10);
 			for (int i = 0; i < numDocs; i++)
 			{
 				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
@@ -86,7 +87,7 @@
 			SegmentReader reader = (SegmentReader) IndexReader.Open(directory);
 			
 			// we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
-			reader.ProxStream = new SeeksCountingStream(this, reader.ProxStream);
+			reader.ProxStream_ForNUnitTest = new SeeksCountingStream(this, reader.ProxStream_ForNUnitTest);
 			
 			this.searcher = new IndexSearcher(reader);
 		}
@@ -109,10 +110,10 @@
 			Assert.AreEqual(numHits, hits.Length());
 			
 			// check if the number of calls of seek() does not exceed the number of hits
-			Assert.AreEqual(numHits, this.seeksCounter);
+			Assert.IsTrue(this.seeksCounter <= numHits + 1);
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestLazySkipping()
 		{
 			// test whether only the minimum amount of seeks() are performed
@@ -120,6 +121,37 @@
 			PerformTest(10);
 		}
 		
+		[Test]
+		public virtual void  TestSeek()
+		{
+			Directory directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			for (int i = 0; i < 10; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field(this.field, "a b", Field.Store.YES, Field.Index.TOKENIZED));
+				writer.AddDocument(doc);
+			}
+			
+			writer.Close();
+			IndexReader reader = IndexReader.Open(directory);
+			TermPositions tp = reader.TermPositions();
+			tp.Seek(new Term(this.field, "b"));
+			for (int i = 0; i < 10; i++)
+			{
+				tp.Next();
+				Assert.AreEqual(tp.Doc(), i);
+				Assert.AreEqual(tp.NextPosition(), 1);
+			}
+			tp.Seek(new Term(this.field, "a"));
+			for (int i = 0; i < 10; i++)
+			{
+				tp.Next();
+				Assert.AreEqual(tp.Doc(), i);
+				Assert.AreEqual(tp.NextPosition(), 0);
+			}
+		}
+		
 		
 		// Simply extends IndexInput in a way that we are able to count the number
 		// of invocations of seek()

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiLevelSkipList.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestMultiLevelSkipList.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiLevelSkipList.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiLevelSkipList.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexInput = Lucene.Net.Store.IndexInput;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
+using Token = Lucene.Net.Analysis.Token;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+namespace Lucene.Net.Index
+{
+	
+	/// <summary> This testcase tests whether multi-level skipping is being used
+	/// to reduce I/O while skipping through posting lists.
+	/// 
+	/// Skipping in general is already covered by several other
+	/// testcases.
+	/// 
+	/// </summary>
+	[TestFixture]
+	public class TestMultiLevelSkipList : LuceneTestCase
+	{
+		[Test]
+		public virtual void  TestSimpleSkip()
+		{
+			RAMDirectory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true);
+			Term term = new Term("test", "a");
+			for (int i = 0; i < 5000; i++)
+			{
+				Document d1 = new Document();
+				d1.Add(new Field(term.Field(), term.Text(), Field.Store.NO, Field.Index.TOKENIZED));
+				writer.AddDocument(d1);
+			}
+			writer.Flush();
+			writer.Optimize();
+			writer.Close();
+			
+			IndexReader reader = IndexReader.Open(dir);
+			SegmentTermPositions tp = (SegmentTermPositions) reader.TermPositions();
+			tp.FreqStream_ForNUnitTest = new CountingStream(this, tp.FreqStream_ForNUnitTest);
+			
+			for (int i = 0; i < 2; i++)
+			{
+				counter = 0;
+				tp.Seek(term);
+				
+				CheckSkipTo(tp, 14, 185); // no skips
+				CheckSkipTo(tp, 17, 190); // one skip on level 0
+				CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0
+				
+				// this test would fail if we had only one skip level,
+				// because than more bytes would be read from the freqStream
+				CheckSkipTo(tp, 4800, 250); // one skip on level 2
+			}
+		}
+		
+		public virtual void  CheckSkipTo(TermPositions tp, int target, int maxCounter)
+		{
+			tp.SkipTo(target);
+			if (maxCounter < counter)
+			{
+				Assert.Fail("Too many bytes read: " + counter);
+			}
+			
+			Assert.AreEqual(target, tp.Doc(), "Wrong document " + tp.Doc() + " after skipTo target " + target);
+			Assert.AreEqual(1, tp.Freq(), "Frequency is not 1: " + tp.Freq());
+			tp.NextPosition();
+			byte[] b = new byte[1];
+			tp.GetPayload(b, 0);
+			Assert.AreEqual((byte) target, b[0], "Wrong payload for the target " + target + ": " + b[0]);
+		}
+		
+		private class PayloadAnalyzer : Analyzer
+		{
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				return new PayloadFilter(new LowerCaseTokenizer(reader));
+			}
+		}
+		
+		private class PayloadFilter : TokenFilter
+		{
+			internal static int count = 0;
+			
+			protected internal PayloadFilter(TokenStream input):base(input)
+			{
+			}
+			
+			public override Token Next()
+			{
+				Token t = input.Next();
+				if (t != null)
+				{
+					t.SetPayload(new Payload(new byte[]{(byte) count++}));
+				}
+				return t;
+			}
+		}
+		
+		private int counter = 0;
+		
+		// Simply extends IndexInput in a way that we are able to count the number
+		// of bytes read
+		internal class CountingStream : IndexInput, System.ICloneable
+		{
+			private void  InitBlock(TestMultiLevelSkipList enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestMultiLevelSkipList enclosingInstance;
+			public TestMultiLevelSkipList Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private IndexInput input;
+			
+			internal CountingStream(TestMultiLevelSkipList enclosingInstance, IndexInput input)
+			{
+				InitBlock(enclosingInstance);
+				this.input = input;
+			}
+			
+			public override byte ReadByte()
+			{
+				Enclosing_Instance.counter++;
+				return this.input.ReadByte();
+			}
+			
+			public override void  ReadBytes(byte[] b, int offset, int len)
+			{
+				Enclosing_Instance.counter += len;
+				this.input.ReadBytes(b, offset, len);
+			}
+			
+			public override void  Close()
+			{
+				this.input.Close();
+			}
+			
+			public override long GetFilePointer()
+			{
+				return this.input.GetFilePointer();
+			}
+			
+			public override void  Seek(long pos)
+			{
+				this.input.Seek(pos);
+			}
+			
+			public override long Length()
+			{
+				return this.input.Length();
+			}
+			
+			public override System.Object Clone()
+			{
+				return new CountingStream(enclosingInstance, (IndexInput) this.input.Clone());
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestMultiReader.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs Tue Jul 15 14:44:04 2008
@@ -19,148 +19,37 @@
 
 using NUnit.Framework;
 
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Index
 {
 	
 	[TestFixture]
-	public class TestMultiReader
+	public class TestMultiReader : TestMultiSegmentReader
 	{
-		private Directory dir = new RAMDirectory();
-		private Lucene.Net.Documents.Document doc1 = new Lucene.Net.Documents.Document();
-		private Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
-		private SegmentReader reader1;
-		private SegmentReader reader2;
-		private SegmentReader[] readers = new SegmentReader[2];
-		private SegmentInfos sis = new SegmentInfos();
-		
-        // public TestMultiReader(System.String s)
-        // {
-        // }
-		
-        // This is needed if for the test to pass and mimic what happens wiht JUnit
-        // For some reason, JUnit is creating a new member variable for each sub-test
-        // but NUnit is not -- who is wrong/right, I don't know.
-        private void SetUpInternal()        // {{Aroush-1.9}} See note above
-        {
-		    dir = new RAMDirectory();
-		    doc1 = new Lucene.Net.Documents.Document();
-		    doc2 = new Lucene.Net.Documents.Document();
-		    readers = new SegmentReader[2];
-		    sis = new SegmentInfos();
-        }
-
-		[SetUp]
-        public virtual void  SetUp()
+		public TestMultiReader():base()
 		{
-            SetUpInternal();    // We need this for NUnit; see note above
-
-			DocHelper.SetupDoc(doc1);
-			DocHelper.SetupDoc(doc2);
-			DocHelper.WriteDoc(dir, "seg-1", doc1);
-			DocHelper.WriteDoc(dir, "seg-2", doc2);
-			sis.Write(dir);
-			reader1 = SegmentReader.Get(new SegmentInfo("seg-1", 1, dir));
-			reader2 = SegmentReader.Get(new SegmentInfo("seg-2", 1, dir));
-			readers[0] = reader1;
-			readers[1] = reader2;
 		}
 		
-		[Test]
-        public virtual void  Test()
+		protected internal override IndexReader OpenReader()
 		{
-			Assert.IsTrue(dir != null);
+			IndexReader reader;
+			
+			sis.Read(dir);
+			SegmentReader reader1 = SegmentReader.Get(sis.Info(0));
+			SegmentReader reader2 = SegmentReader.Get(sis.Info(1));
+			readers[0] = reader1;
+			readers[1] = reader2;
 			Assert.IsTrue(reader1 != null);
 			Assert.IsTrue(reader2 != null);
-			Assert.IsTrue(sis != null);
-		}
-		
-		[Test]
-        public virtual void  TestDocument()
-		{
-			sis.Read(dir);
-			MultiReader reader = new MultiReader(dir, sis, false, readers);
-			Assert.IsTrue(reader != null);
-			Lucene.Net.Documents.Document newDoc1 = reader.Document(0);
-			Assert.IsTrue(newDoc1 != null);
-			Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
-			Lucene.Net.Documents.Document newDoc2 = reader.Document(1);
-			Assert.IsTrue(newDoc2 != null);
-			Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);
-			TermFreqVector vector = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
-			Assert.IsTrue(vector != null);
-			TestSegmentReader.CheckNorms(reader);
-		}
-		
-		[Test]
-        public virtual void  TestUndeleteAll()
-		{
-			sis.Read(dir);
-			MultiReader reader = new MultiReader(dir, sis, false, readers);
-			Assert.IsTrue(reader != null);
-			Assert.AreEqual(2, reader.NumDocs());
-			reader.DeleteDocument(0);
-			Assert.AreEqual(1, reader.NumDocs());
-			reader.UndeleteAll();
-			Assert.AreEqual(2, reader.NumDocs());
 			
-            // Ensure undeleteAll survives commit/close/reopen:
-            reader.Commit();
-            reader.Close();
-            sis.Read(dir);
-            reader = new MultiReader(dir, sis, false, readers);
-            Assert.AreEqual(2, reader.NumDocs());
+			reader = new MultiReader(readers);
 			
-            reader.DeleteDocument(0);
-            Assert.AreEqual(1, reader.NumDocs());
-            reader.Commit();
-            reader.Close();
-            sis.Read(dir);
-            reader = new MultiReader(dir, sis, false, readers);
-            Assert.AreEqual(1, reader.NumDocs());
-        }
-		
-		[Test]
-		public virtual void  TestTermVectors()
-		{
-			MultiReader reader = new MultiReader(dir, sis, false, readers);
+			Assert.IsTrue(dir != null);
+			Assert.IsTrue(sis != null);
 			Assert.IsTrue(reader != null);
+			
+			return reader;
 		}
-		
-        /* known to fail, see https://issues.apache.org/jira/browse/LUCENE-781
-        public void testIsCurrent() throws IOException {
-        RAMDirectory ramDir1=new RAMDirectory();
-        addDoc(ramDir1, "test foo", true);
-        RAMDirectory ramDir2=new RAMDirectory();
-        addDoc(ramDir2, "test blah", true);
-        IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1), IndexReader.open(ramDir2)};
-        MultiReader mr = new MultiReader(readers);
-        assertTrue(mr.isCurrent());   // just opened, must be current
-        addDoc(ramDir1, "more text", false);
-        assertFalse(mr.isCurrent());   // has been modified, not current anymore
-        addDoc(ramDir2, "even more text", false);
-        assertFalse(mr.isCurrent());   // has been modified even more, not current anymore
-        try {
-        mr.getVersion();
-        fail();
-        } catch (UnsupportedOperationException e) {
-        // expected exception
-        }
-        mr.close();
-        }
-		
-        private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
-        IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(), create);
-        Document doc = new Document();
-        doc.add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
-        iw.addDocument(doc);
-        iw.close();
-        }
-        */
-    }
+	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestMultiSegmentReader.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiSegmentReader.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiSegmentReader.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+
+namespace Lucene.Net.Index
+{
+	
+	[TestFixture]
+	public class TestMultiSegmentReader : LuceneTestCase
+	{
+		protected internal Directory dir;
+		private Document doc1;
+		private Document doc2;
+		protected internal SegmentReader[] readers = new SegmentReader[2];
+		protected internal SegmentInfos sis;
+		
+		
+		//public TestMultiSegmentReader(System.String s):base(s)
+		//{
+		//}
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			dir = new RAMDirectory();
+			doc1 = new Document();
+			doc2 = new Document();
+			DocHelper.SetupDoc(doc1);
+			DocHelper.SetupDoc(doc2);
+			SegmentInfo info1 = DocHelper.WriteDoc(dir, doc1);
+			SegmentInfo info2 = DocHelper.WriteDoc(dir, doc2);
+			sis = new SegmentInfos();
+			sis.Read(dir);
+		}
+		
+		protected internal virtual IndexReader OpenReader()
+		{
+			IndexReader reader;
+			reader = IndexReader.Open(dir);
+			Assert.IsTrue(reader is MultiSegmentReader);
+			
+			Assert.IsTrue(dir != null);
+			Assert.IsTrue(sis != null);
+			Assert.IsTrue(reader != null);
+			
+			return reader;
+		}
+		
+		[Test]
+		public virtual void  Test()
+		{
+			SetUp();
+			DoTestDocument();
+			DoTestUndeleteAll();
+		}
+		
+		public virtual void  DoTestDocument()
+		{
+			sis.Read(dir);
+			IndexReader reader = OpenReader();
+			Assert.IsTrue(reader != null);
+			Document newDoc1 = reader.Document(0);
+			Assert.IsTrue(newDoc1 != null);
+			Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
+			Document newDoc2 = reader.Document(1);
+			Assert.IsTrue(newDoc2 != null);
+			Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);
+			TermFreqVector vector = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
+			Assert.IsTrue(vector != null);
+			TestSegmentReader.CheckNorms(reader);
+		}
+		
+		public virtual void  DoTestUndeleteAll()
+		{
+			sis.Read(dir);
+			IndexReader reader = OpenReader();
+			Assert.IsTrue(reader != null);
+			Assert.AreEqual(2, reader.NumDocs());
+			reader.DeleteDocument(0);
+			Assert.AreEqual(1, reader.NumDocs());
+			reader.UndeleteAll();
+			Assert.AreEqual(2, reader.NumDocs());
+			
+			// Ensure undeleteAll survives commit/close/reopen:
+			reader.Commit();
+			reader.Close();
+			
+			if (reader is MultiReader)
+			// MultiReader does not "own" the directory so it does
+			// not write the changes to sis on commit:
+				sis.Write(dir);
+			
+			sis.Read(dir);
+			reader = OpenReader();
+			Assert.AreEqual(2, reader.NumDocs());
+			
+			reader.DeleteDocument(0);
+			Assert.AreEqual(1, reader.NumDocs());
+			reader.Commit();
+			reader.Close();
+			if (reader is MultiReader)
+			// MultiReader does not "own" the directory so it does
+			// not write the changes to sis on commit:
+				sis.Write(dir);
+			sis.Read(dir);
+			reader = OpenReader();
+			Assert.AreEqual(1, reader.NumDocs());
+		}
+		
+		
+		public virtual void  _testTermVectors()
+		{
+			MultiReader reader = new MultiReader(readers);
+			Assert.IsTrue(reader != null);
+		}
+		
+		
+		[Test]
+		public virtual void  TestIsCurrent()
+		{
+			RAMDirectory ramDir1 = new RAMDirectory();
+			AddDoc(ramDir1, "test foo", true);
+			RAMDirectory ramDir2 = new RAMDirectory();
+			AddDoc(ramDir2, "test blah", true);
+			IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2)};
+			MultiReader mr = new MultiReader(readers);
+			Assert.IsTrue(mr.IsCurrent()); // just opened, must be current
+			AddDoc(ramDir1, "more text", false);
+			Assert.IsFalse(mr.IsCurrent()); // has been modified, not current anymore
+			AddDoc(ramDir2, "even more text", false);
+			Assert.IsFalse(mr.IsCurrent()); // has been modified even more, not current anymore
+			try
+			{
+				mr.GetVersion();
+				Assert.Fail();
+			}
+			catch (System.NotSupportedException)
+			{
+				// expected exception
+			}
+			mr.Close();
+		}
+		
+		private void  AddDoc(RAMDirectory ramDir1, System.String s, bool create)
+		{
+			IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(), create);
+			Document doc = new Document();
+			doc.Add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
+			iw.AddDocument(doc);
+			iw.Close();
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestNorms.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestNorms.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestNorms.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestNorms.cs Tue Jul 15 14:44:04 2008
@@ -19,23 +19,26 @@
 
 using NUnit.Framework;
 
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Index = Lucene.Net.Documents.Field.Index;
 using Store = Lucene.Net.Documents.Field.Store;
-using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
-using Similarity = Lucene.Net.Search.Similarity;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
+using Similarity = Lucene.Net.Search.Similarity;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
-	/// <summary> Test that norms info is preserved during index life - incluidng seprate norms, addDocument, addIndexes, optimize.</summary>
-    [TestFixture]
-    public class TestNorms
+	/// <summary> Test that norms info is preserved during index life - including
+	/// separate norms, addDocument, addIndexes, optimize.
+	/// </summary>
+	[TestFixture]
+	public class TestNorms : LuceneTestCase
 	{
 		
 		[Serializable]
@@ -75,25 +78,21 @@
 		private float normDelta = (float) 0.001;
 		
 		
-        [SetUp]
-		public virtual void  SetUp()
+		[SetUp]
+		public override void SetUp()
 		{
+			base.SetUp();
 			similarityOne = new SimilarityOne(this);
 			anlzr = new StandardAnalyzer();
 		}
 		
-        [TearDown]
-		public virtual void  TearDown()
-		{
-		}
-		
 		/// <summary> Test that norms values are preserved as the index is maintained.
 		/// Including separate norms.
 		/// Including merging indexes with seprate norms. 
 		/// Including optimize. 
 		/// </summary>
 		[Test]
-        public virtual void  _TestNorms()
+		public virtual void  _TestNorms()
 		{
 			// tmp dir
 			System.String tempDir = System.IO.Path.GetTempPath();



Mime
View raw message