lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dougs...@apache.org
Subject svn commit: r798995 [23/35] - in /incubator/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/QueryParser/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net...
Date Wed, 29 Jul 2009 18:04:24 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TeeSinkTokenTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TeeSinkTokenTest.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TeeSinkTokenTest.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TeeSinkTokenTest.cs Wed Jul 29 18:04:12 2009
@@ -22,13 +22,14 @@
 using StandardFilter = Lucene.Net.Analysis.Standard.StandardFilter;
 using StandardTokenizer = Lucene.Net.Analysis.Standard.StandardTokenizer;
 using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Analysis
 {
 	
 	/// <summary> tests for the TeeTokenFilter and SinkTokenizer</summary>
 	[TestFixture]
-	public class TeeSinkTokenTest
+	public class TeeSinkTokenTest : LuceneTestCase
 	{
 		private class AnonymousClassSinkTokenizer : SinkTokenizer
 		{
@@ -51,7 +52,7 @@
 			}
 			public override void  Add(Token t)
 			{
-				if (t != null && t.TermText().ToUpper().Equals("The".ToUpper()))
+				if (t != null && t.Term().ToUpper().Equals("The".ToUpper()))
 				{
 					base.Add(t);
 				}
@@ -79,7 +80,7 @@
 			}
 			public override void  Add(Token t)
 			{
-				if (t != null && t.TermText().ToUpper().Equals("The".ToUpper()))
+				if (t != null && t.Term().ToUpper().Equals("The".ToUpper()))
 				{
 					base.Add(t);
 				}
@@ -107,7 +108,7 @@
 			}
 			public override void  Add(Token t)
 			{
-				if (t != null && t.TermText().ToUpper().Equals("Dogs".ToUpper()))
+				if (t != null && t.Term().ToUpper().Equals("Dogs".ToUpper()))
 				{
 					base.Add(t);
 				}
@@ -119,7 +120,7 @@
 		protected internal System.String[] tokens2;
 		
 		[SetUp]
-		public virtual void  SetUp()
+		override public void  SetUp()
 		{
 			tokens1 = new System.String[]{"The", "quick", "Burgundy", "Fox", "jumped", "over", "the", "lazy", "Red", "Dogs"};
 			tokens2 = new System.String[]{"The", "Lazy", "Dogs", "should", "stay", "on", "the", "porch"};
@@ -137,9 +138,8 @@
 		}
 		
 		[TearDown]
-		public virtual void  TearDown()
+		override public void  TearDown()
 		{
-			
 		}
 		
 		[Test]
@@ -148,20 +148,20 @@
 			
 			SinkTokenizer sink1 = new AnonymousClassSinkTokenizer(this, null);
 			TokenStream source = new TeeTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())), sink1);
-			Token token = null;
 			int i = 0;
-			while ((token = source.Next()) != null)
+            Token reusableToken = new Token();
+            for (Token nextToken = source.Next(reusableToken); nextToken != null; nextToken = source.Next(reusableToken))
 			{
-				Assert.IsTrue(token.TermText().Equals(tokens1[i]) == true, token.TermText() + " is not equal to " + tokens1[i]);
+				Assert.IsTrue(nextToken.Term().Equals(tokens1[i]) == true, nextToken.Term() + " is not equal to " + tokens1[i]);
 				i++;
 			}
 			Assert.IsTrue(i == tokens1.Length, i + " does not equal: " + tokens1.Length);
 			Assert.IsTrue(sink1.GetTokens().Count == 2, "sink1 Size: " + sink1.GetTokens().Count + " is not: " + 2);
 			i = 0;
-			while ((token = sink1.Next()) != null)
-			{
-				Assert.IsTrue(token.TermText().ToUpper().Equals("The".ToUpper()) == true, token.TermText() + " is not equal to " + "The");
-				i++;
+            for (Token token = sink1.Next(reusableToken); token != null; token = sink1.Next(reusableToken))
+            {
+                Assert.IsTrue(token.Term().ToUpper().Equals("The".ToUpper()) == true, token.Term() + " is not equal to " + "The");
+                i++;
 			}
 			Assert.IsTrue(i == sink1.GetTokens().Count, i + " does not equal: " + sink1.GetTokens().Count);
 		}
@@ -173,45 +173,45 @@
 			SinkTokenizer dogDetector = new AnonymousClassSinkTokenizer2(this, null);
 			TokenStream source1 = new CachingTokenFilter(new TeeTokenFilter(new TeeTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())), theDetector), dogDetector));
 			TokenStream source2 = new TeeTokenFilter(new TeeTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer2.ToString())), theDetector), dogDetector);
-			Token token = null;
 			int i = 0;
-			while ((token = source1.Next()) != null)
+            Token reusableToken = new Token();
+            for (Token nextToken = source1.Next(reusableToken); nextToken != null; nextToken = source1.Next(reusableToken))
 			{
-				Assert.IsTrue(token.TermText().Equals(tokens1[i]) == true, token.TermText() + " is not equal to " + tokens1[i]);
+                Assert.IsTrue(nextToken.Term().Equals(tokens1[i]) == true, nextToken.Term() + " is not equal to " + tokens1[i]);
 				i++;
 			}
 			Assert.IsTrue(i == tokens1.Length, i + " does not equal: " + tokens1.Length);
 			Assert.IsTrue(theDetector.GetTokens().Count == 2, "theDetector Size: " + theDetector.GetTokens().Count + " is not: " + 2);
 			Assert.IsTrue(dogDetector.GetTokens().Count == 1, "dogDetector Size: " + dogDetector.GetTokens().Count + " is not: " + 1);
 			i = 0;
-			while ((token = source2.Next()) != null)
+            for (Token nextToken = source2.Next(reusableToken); nextToken != null; nextToken = source2.Next(reusableToken))
 			{
-				Assert.IsTrue(token.TermText().Equals(tokens2[i]) == true, token.TermText() + " is not equal to " + tokens2[i]);
+				Assert.IsTrue(nextToken.Term().Equals(tokens2[i]) == true, nextToken.Term() + " is not equal to " + tokens2[i]);
 				i++;
 			}
 			Assert.IsTrue(i == tokens2.Length, i + " does not equal: " + tokens2.Length);
 			Assert.IsTrue(theDetector.GetTokens().Count == 4, "theDetector Size: " + theDetector.GetTokens().Count + " is not: " + 4);
 			Assert.IsTrue(dogDetector.GetTokens().Count == 2, "dogDetector Size: " + dogDetector.GetTokens().Count + " is not: " + 2);
 			i = 0;
-			while ((token = theDetector.Next()) != null)
+            for (Token nextToken = theDetector.Next(reusableToken); nextToken != null; nextToken = theDetector.Next(reusableToken))
 			{
-				Assert.IsTrue(token.TermText().ToUpper().Equals("The".ToUpper()) == true, token.TermText() + " is not equal to " + "The");
+				Assert.IsTrue(nextToken.Term().ToUpper().Equals("The".ToUpper()) == true, nextToken.Term() + " is not equal to " + "The");
 				i++;
 			}
 			Assert.IsTrue(i == theDetector.GetTokens().Count, i + " does not equal: " + theDetector.GetTokens().Count);
 			i = 0;
-			while ((token = dogDetector.Next()) != null)
+            for (Token nextToken = dogDetector.Next(reusableToken); nextToken != null; nextToken = dogDetector.Next(reusableToken))
 			{
-				Assert.IsTrue(token.TermText().ToUpper().Equals("Dogs".ToUpper()) == true, token.TermText() + " is not equal to " + "Dogs");
+				Assert.IsTrue(nextToken.Term().ToUpper().Equals("Dogs".ToUpper()) == true, nextToken.Term() + " is not equal to " + "Dogs");
 				i++;
 			}
 			Assert.IsTrue(i == dogDetector.GetTokens().Count, i + " does not equal: " + dogDetector.GetTokens().Count);
 			source1.Reset();
 			TokenStream lowerCasing = new LowerCaseFilter(source1);
 			i = 0;
-			while ((token = lowerCasing.Next()) != null)
+            for (Token nextToken = lowerCasing.Next(reusableToken); nextToken != null; nextToken = lowerCasing.Next(reusableToken))
 			{
-				Assert.IsTrue(token.TermText().Equals(tokens1[i].ToLower()) == true, token.TermText() + " is not equal to " + tokens1[i].ToLower());
+				Assert.IsTrue(nextToken.Term().Equals(tokens1[i].ToLower()) == true, nextToken.Term() + " is not equal to " + tokens1[i].ToLower());
 				i++;
 			}
 			Assert.IsTrue(i == tokens1.Length, i + " does not equal: " + tokens1.Length);
@@ -236,17 +236,16 @@
 				}
 				//make sure we produce the same tokens
 				ModuloSinkTokenizer sink = new ModuloSinkTokenizer(this, tokCount[k], 100);
-				Token next = new Token();
-				TokenStream result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
-				while ((next = result.Next(next)) != null)
+                Token reusableToken = new Token();
+				TokenStream stream = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
+				while ((stream.Next(reusableToken)) != null)
 				{
 				}
-				result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
-				next = new Token();
+				stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
 				System.Collections.IList tmp = new System.Collections.ArrayList();
-				while ((next = result.Next(next)) != null)
+                for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 				{
-					tmp.Add(next.Clone());
+					tmp.Add(nextToken.Clone());
 				}
 				System.Collections.IList sinkList = sink.GetTokens();
 				Assert.IsTrue(tmp.Count == sinkList.Count, "tmp Size: " + tmp.Count + " is not: " + sinkList.Count);
@@ -254,7 +253,7 @@
 				{
 					Token tfTok = (Token) tmp[i];
 					Token sinkTok = (Token) sinkList[i];
-					Assert.IsTrue(tfTok.TermText().Equals(sinkTok.TermText()) == true, tfTok.TermText() + " is not equal to " + sinkTok.TermText() + " at token: " + i);
+					Assert.IsTrue(tfTok.Term().Equals(sinkTok.Term()) == true, tfTok.Term() + " is not equal to " + sinkTok.Term() + " at token: " + i);
 				}
 				//simulate two fields, each being analyzed once, for 20 documents
 				
@@ -264,17 +263,15 @@
 					long start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
 					for (int i = 0; i < 20; i++)
 					{
-						next = new Token();
-						result = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
-						while ((next = result.Next(next)) != null)
+						stream = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
+                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 						{
-							tfPos += next.GetPositionIncrement();
+							tfPos += nextToken.GetPositionIncrement();
 						}
-						next = new Token();
-						result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
-						while ((next = result.Next(next)) != null)
+						stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
+                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 						{
-							tfPos += next.GetPositionIncrement();
+							tfPos += nextToken.GetPositionIncrement();
 						}
 					}
 					long finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
@@ -285,17 +282,16 @@
 					for (int i = 0; i < 20; i++)
 					{
 						sink = new ModuloSinkTokenizer(this, tokCount[k], modCounts[j]);
-						next = new Token();
-						result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
-						while ((next = result.Next(next)) != null)
+						stream = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
+                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 						{
-							sinkPos += next.GetPositionIncrement();
+							sinkPos += nextToken.GetPositionIncrement();
 						}
 						//System.out.println("Modulo--------");
-						result = sink;
-						while ((next = result.Next(next)) != null)
+						stream = sink;
+                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 						{
-							sinkPos += next.GetPositionIncrement();
+							sinkPos += nextToken.GetPositionIncrement();
 						}
 					}
 					finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
@@ -334,15 +330,17 @@
 			internal int count = 0;
 			
 			//return every 100 tokens
-			public override Token Next(Token result)
+			public override Token Next(Token reusableToken)
 			{
-				
-				while ((result = input.Next(result)) != null && count % modCount != 0)
-				{
+                Token nextToken = null;
+                for (nextToken = input.Next(reusableToken);
+                    nextToken != null && count % modCount != 0;
+                    nextToken = input.Next(reusableToken))
+                    {
 					count++;
 				}
 				count++;
-				return result;
+				return nextToken;
 			}
 		}
 		
@@ -376,7 +374,7 @@
 			{
 				if (t != null && count % modCount == 0)
 				{
-					lst.Add(t.Clone());
+					base.Add(t);
 				}
 				count++;
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestAnalyzers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestAnalyzers.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestAnalyzers.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestAnalyzers.cs Wed Jul 29 18:04:12 2009
@@ -33,13 +33,14 @@
 		public virtual void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output)
 		{
 			TokenStream ts = a.TokenStream("dummy", new System.IO.StringReader(input));
+            Token reusableToken = new Token();
 			for (int i = 0; i < output.Length; i++)
 			{
-				Token t = ts.Next();
-				Assert.IsNotNull(t);
-				Assert.AreEqual(t.TermText(), output[i]);
+				Token nextToken = ts.Next(reusableToken);
+				Assert.IsNotNull(nextToken);
+				Assert.AreEqual(nextToken.Term(), output[i]);
 			}
-			Assert.IsNull(ts.Next());
+			Assert.IsNull(ts.Next(reusableToken));
 			ts.Close();
 		}
 		
@@ -81,16 +82,16 @@
 		
 		internal virtual void  VerifyPayload(TokenStream ts)
 		{
-			Token t = new Token();
+            Token reusableToken = new Token();
 			for (byte b = 1; ; b++)
 			{
-				t.Clear();
-				t = ts.Next(t);
-				if (t == null)
+                reusableToken.Clear();
+                Token nextToken = ts.Next(reusableToken);
+				if (nextToken == null)
 					break;
-				// System.out.println("id="+System.identityHashCode(t) + " " + t);
-				// System.out.println("payload=" + (int)t.getPayload().toByteArray()[0]);
-				Assert.AreEqual(b, t.GetPayload().ToByteArray()[0]);
+				// System.out.println("id="+System.identityHashCode(nextToken) + " " + nextToken);
+				// System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
+				Assert.AreEqual(b, nextToken.GetPayload().ToByteArray()[0]);
 			}
 		}
 		
@@ -135,21 +136,17 @@
 		{
 		}
 		
-		public override Token Next()
+		public override Token Next(Token reusableToken)
 		{
 			if (lst == null)
 			{
 				lst = new System.Collections.ArrayList();
-				for (; ; )
+                for (Token nextToken = input.Next(reusableToken); nextToken != null; nextToken = input.Next(reusableToken))
 				{
-					Token t = input.Next();
-					if (t == null)
-						break;
-					lst.Add(t);
+					lst.Add(nextToken.Clone());
 				}
 			}
-			System.Object tempObject;
-			tempObject = lst[0];
+			object tempObject = lst[0];
 			lst.RemoveAt(0);
 			return lst.Count == 0 ? null : (Token) tempObject;
 		}
@@ -169,14 +166,14 @@
 		internal byte[] data = new byte[1];
 		internal Payload p;
 		
-		public override Token Next(Token target)
+		public override Token Next(Token reusableToken)
 		{
-			target = input.Next(target);
-			if (target == null)
-				return null;
-			target.SetPayload(p); // reuse the payload / byte[]
+            System.Diagnostics.Debug.Assert(reusableToken != null);
+            Token nextToken = input.Next(reusableToken);
+			if (nextToken == null) return null;
+			nextToken.SetPayload(p); // reuse the payload / byte[]
 			data[0]++;
-			return target;
+			return nextToken;
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestCachingTokenFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs Wed Jul 29 18:04:12 2009
@@ -32,7 +32,7 @@
 
 namespace Lucene.Net.Analysis
 {
-	
+	[TestFixture]
 	public class TestCachingTokenFilter : LuceneTestCase
 	{
 		private class AnonymousClassTokenStream : TokenStream
@@ -56,25 +56,26 @@
 			}
 			private int index = 0;
 			
-			public override Token Next()
+			public override Token Next(Token reusableToken)
 			{
+                System.Diagnostics.Debug.Assert(reusableToken != null);
 				if (index == Enclosing_Instance.tokens.Length)
 				{
 					return null;
 				}
 				else
 				{
-					return new Token(Enclosing_Instance.tokens[index++], 0, 0);
+					return reusableToken.Reinit(Enclosing_Instance.tokens[index++], 0, 0);
 				}
 			}
 		}
 		private System.String[] tokens = new System.String[]{"term1", "term2", "term3", "term2"};
 		
-		[Test]
+		[NUnit.Framework.Test]
 		public virtual void  TestCaching()
 		{
 			Directory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer());
+			IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
 			TokenStream stream = new AnonymousClassTokenStream(this);
 			
@@ -118,11 +119,11 @@
 		private void  CheckTokens(TokenStream stream)
 		{
 			int count = 0;
-			Token token;
-			while ((token = stream.Next()) != null)
+			Token reusableToken = new Token();
+            for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 			{
 				Assert.IsTrue(count < tokens.Length);
-				Assert.AreEqual(tokens[count], token.TermText());
+				Assert.AreEqual(tokens[count], nextToken.Term());
 				count++;
 			}
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestISOLatin1AccentFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestISOLatin1AccentFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestISOLatin1AccentFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestISOLatin1AccentFilter.cs Wed Jul 29 18:04:12 2009
@@ -30,80 +30,85 @@
 		[Test]
 		public virtual void  TestU()
 		{
-			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ"));
+            TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
 			ISOLatin1AccentFilter filter = new ISOLatin1AccentFilter(stream);
-			Assert.AreEqual("Des", filter.Next().TermText());
-			Assert.AreEqual("mot", filter.Next().TermText());
-			Assert.AreEqual("cles", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("LA", filter.Next().TermText());
-			Assert.AreEqual("CHAINE", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("A", filter.Next().TermText());
-			Assert.AreEqual("AE", filter.Next().TermText());
-			Assert.AreEqual("C", filter.Next().TermText());
-			Assert.AreEqual("E", filter.Next().TermText());
-			Assert.AreEqual("E", filter.Next().TermText());
-			Assert.AreEqual("E", filter.Next().TermText());
-			Assert.AreEqual("E", filter.Next().TermText());
-			Assert.AreEqual("I", filter.Next().TermText());
-			Assert.AreEqual("I", filter.Next().TermText());
-			Assert.AreEqual("I", filter.Next().TermText());
-			Assert.AreEqual("I", filter.Next().TermText());
-			Assert.AreEqual("D", filter.Next().TermText());
-			Assert.AreEqual("N", filter.Next().TermText());
-			Assert.AreEqual("O", filter.Next().TermText());
-			Assert.AreEqual("O", filter.Next().TermText());
-			Assert.AreEqual("O", filter.Next().TermText());
-			Assert.AreEqual("O", filter.Next().TermText());
-			Assert.AreEqual("O", filter.Next().TermText());
-			Assert.AreEqual("O", filter.Next().TermText());
-			Assert.AreEqual("OE", filter.Next().TermText());
-			Assert.AreEqual("TH", filter.Next().TermText());
-			Assert.AreEqual("U", filter.Next().TermText());
-			Assert.AreEqual("U", filter.Next().TermText());
-			Assert.AreEqual("U", filter.Next().TermText());
-			Assert.AreEqual("U", filter.Next().TermText());
-			Assert.AreEqual("Y", filter.Next().TermText());
-			Assert.AreEqual("Y", filter.Next().TermText());
-			Assert.AreEqual("a", filter.Next().TermText());
-			Assert.AreEqual("a", filter.Next().TermText());
-			Assert.AreEqual("a", filter.Next().TermText());
-			Assert.AreEqual("a", filter.Next().TermText());
-			Assert.AreEqual("a", filter.Next().TermText());
-			Assert.AreEqual("a", filter.Next().TermText());
-			Assert.AreEqual("ae", filter.Next().TermText());
-			Assert.AreEqual("c", filter.Next().TermText());
-			Assert.AreEqual("e", filter.Next().TermText());
-			Assert.AreEqual("e", filter.Next().TermText());
-			Assert.AreEqual("e", filter.Next().TermText());
-			Assert.AreEqual("e", filter.Next().TermText());
-			Assert.AreEqual("i", filter.Next().TermText());
-			Assert.AreEqual("i", filter.Next().TermText());
-			Assert.AreEqual("i", filter.Next().TermText());
-			Assert.AreEqual("i", filter.Next().TermText());
-			Assert.AreEqual("d", filter.Next().TermText());
-			Assert.AreEqual("n", filter.Next().TermText());
-			Assert.AreEqual("o", filter.Next().TermText());
-			Assert.AreEqual("o", filter.Next().TermText());
-			Assert.AreEqual("o", filter.Next().TermText());
-			Assert.AreEqual("o", filter.Next().TermText());
-			Assert.AreEqual("o", filter.Next().TermText());
-			Assert.AreEqual("o", filter.Next().TermText());
-			Assert.AreEqual("oe", filter.Next().TermText());
-			Assert.AreEqual("ss", filter.Next().TermText());
-			Assert.AreEqual("th", filter.Next().TermText());
-			Assert.AreEqual("u", filter.Next().TermText());
-			Assert.AreEqual("u", filter.Next().TermText());
-			Assert.AreEqual("u", filter.Next().TermText());
-			Assert.AreEqual("u", filter.Next().TermText());
-			Assert.AreEqual("y", filter.Next().TermText());
-			Assert.AreEqual("y", filter.Next().TermText());
-			Assert.IsNull(filter.Next());
+            Token reusableToken = new Token();
+            Assert.AreEqual("Des", filter.Next(reusableToken).Term());
+			Assert.AreEqual("mot", filter.Next(reusableToken).Term());
+			Assert.AreEqual("cles", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("LA", filter.Next(reusableToken).Term());
+			Assert.AreEqual("CHAINE", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("A", filter.Next(reusableToken).Term());
+			Assert.AreEqual("AE", filter.Next(reusableToken).Term());
+			Assert.AreEqual("C", filter.Next(reusableToken).Term());
+			Assert.AreEqual("E", filter.Next(reusableToken).Term());
+			Assert.AreEqual("E", filter.Next(reusableToken).Term());
+			Assert.AreEqual("E", filter.Next(reusableToken).Term());
+			Assert.AreEqual("E", filter.Next(reusableToken).Term());
+			Assert.AreEqual("I", filter.Next(reusableToken).Term());
+			Assert.AreEqual("I", filter.Next(reusableToken).Term());
+			Assert.AreEqual("I", filter.Next(reusableToken).Term());
+            Assert.AreEqual("I", filter.Next(reusableToken).Term());
+            Assert.AreEqual("IJ", filter.Next(reusableToken).Term());
+			Assert.AreEqual("D", filter.Next(reusableToken).Term());
+			Assert.AreEqual("N", filter.Next(reusableToken).Term());
+			Assert.AreEqual("O", filter.Next(reusableToken).Term());
+			Assert.AreEqual("O", filter.Next(reusableToken).Term());
+			Assert.AreEqual("O", filter.Next(reusableToken).Term());
+			Assert.AreEqual("O", filter.Next(reusableToken).Term());
+			Assert.AreEqual("O", filter.Next(reusableToken).Term());
+			Assert.AreEqual("O", filter.Next(reusableToken).Term());
+			Assert.AreEqual("OE", filter.Next(reusableToken).Term());
+			Assert.AreEqual("TH", filter.Next(reusableToken).Term());
+			Assert.AreEqual("U", filter.Next(reusableToken).Term());
+			Assert.AreEqual("U", filter.Next(reusableToken).Term());
+			Assert.AreEqual("U", filter.Next(reusableToken).Term());
+			Assert.AreEqual("U", filter.Next(reusableToken).Term());
+			Assert.AreEqual("Y", filter.Next(reusableToken).Term());
+			Assert.AreEqual("Y", filter.Next(reusableToken).Term());
+			Assert.AreEqual("a", filter.Next(reusableToken).Term());
+			Assert.AreEqual("a", filter.Next(reusableToken).Term());
+			Assert.AreEqual("a", filter.Next(reusableToken).Term());
+			Assert.AreEqual("a", filter.Next(reusableToken).Term());
+			Assert.AreEqual("a", filter.Next(reusableToken).Term());
+			Assert.AreEqual("a", filter.Next(reusableToken).Term());
+			Assert.AreEqual("ae", filter.Next(reusableToken).Term());
+			Assert.AreEqual("c", filter.Next(reusableToken).Term());
+			Assert.AreEqual("e", filter.Next(reusableToken).Term());
+			Assert.AreEqual("e", filter.Next(reusableToken).Term());
+			Assert.AreEqual("e", filter.Next(reusableToken).Term());
+			Assert.AreEqual("e", filter.Next(reusableToken).Term());
+			Assert.AreEqual("i", filter.Next(reusableToken).Term());
+			Assert.AreEqual("i", filter.Next(reusableToken).Term());
+			Assert.AreEqual("i", filter.Next(reusableToken).Term());
+            Assert.AreEqual("i", filter.Next(reusableToken).Term());
+            Assert.AreEqual("ij", filter.Next(reusableToken).Term());
+			Assert.AreEqual("d", filter.Next(reusableToken).Term());
+			Assert.AreEqual("n", filter.Next(reusableToken).Term());
+			Assert.AreEqual("o", filter.Next(reusableToken).Term());
+			Assert.AreEqual("o", filter.Next(reusableToken).Term());
+			Assert.AreEqual("o", filter.Next(reusableToken).Term());
+			Assert.AreEqual("o", filter.Next(reusableToken).Term());
+			Assert.AreEqual("o", filter.Next(reusableToken).Term());
+			Assert.AreEqual("o", filter.Next(reusableToken).Term());
+			Assert.AreEqual("oe", filter.Next(reusableToken).Term());
+			Assert.AreEqual("ss", filter.Next(reusableToken).Term());
+			Assert.AreEqual("th", filter.Next(reusableToken).Term());
+			Assert.AreEqual("u", filter.Next(reusableToken).Term());
+			Assert.AreEqual("u", filter.Next(reusableToken).Term());
+			Assert.AreEqual("u", filter.Next(reusableToken).Term());
+			Assert.AreEqual("u", filter.Next(reusableToken).Term());
+			Assert.AreEqual("y", filter.Next(reusableToken).Term());
+            Assert.AreEqual("y", filter.Next(reusableToken).Term());
+            Assert.AreEqual("fi", filter.Next(reusableToken).Term());
+            Assert.AreEqual("fl", filter.Next(reusableToken).Term());
+			Assert.IsNull(filter.Next(reusableToken));
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestKeywordAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestKeywordAnalyzer.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestKeywordAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestKeywordAnalyzer.cs Wed Jul 29 18:04:12 2009
@@ -27,7 +27,7 @@
 using TermDocs = Lucene.Net.Index.TermDocs;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using Hits = Lucene.Net.Search.Hits;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
@@ -47,50 +47,71 @@
 		{
 			base.SetUp();
 			directory = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
-			
+			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+//writer.SetInfoStream(System.Console.Out);			
 			Document doc = new Document();
-			doc.Add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.UN_TOKENIZED));
-			doc.Add(new Field("description", "Illidium Space Modulator", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("description", "Illidium Space Modulator", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
 			writer.Close();
 			
 			searcher = new IndexSearcher(directory);
 		}
-		
-		[Test]
+
+        //[Test]
+        //public void TestSameThreadConsecutive()
+        //{
+        //    TestMultipleDocument();
+        //    TestPerFieldAnalyzer();
+        //}
+
+        //[Test]
+        //public void TestDistinctThreadConsecutive()
+        //{
+        //    SupportClass.ThreadClass thread1 = new SupportClass.ThreadClass(new System.Threading.ThreadStart(TestMultipleDocument));
+        //    thread1.Start();
+        //    System.Threading.Thread.CurrentThread.Join();
+        //    SupportClass.ThreadClass thread2 = new SupportClass.ThreadClass(new System.Threading.ThreadStart(TestPerFieldAnalyzer));
+        //    thread2.Start();
+        //    System.Threading.Thread.CurrentThread.Join();
+        //}
+
+        [Test]
 		public virtual void  TestPerFieldAnalyzer()
 		{
-			PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
+            PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
 			analyzer.AddAnalyzer("partnum", new KeywordAnalyzer());
 
 			Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser("description", analyzer);
 			Query query = queryParser.Parse("partnum:Q36 AND SPACE");
 			
-			Hits hits = searcher.Search(query);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual("+partnum:Q36 +space", query.ToString("description"), "Q36 kept as-is");
-			Assert.AreEqual(1, hits.Length(), "doc found!");
+			Assert.AreEqual(1, hits.Length, "doc found!");
 		}
 		
 		[Test]
-		public virtual void  TestMutipleDocument()
+		public virtual void  TestMultipleDocument()
 		{
 			RAMDirectory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
-			doc.Add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			doc = new Document();
-			doc.Add(new Field("partnum", "Q37", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("partnum", "Q37", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Close();
 			
-			IndexReader reader = IndexReader.Open(dir);
-			TermDocs td = reader.TermDocs(new Term("partnum", "Q36"));
-			Assert.IsTrue(td.Next());
-			td = reader.TermDocs(new Term("partnum", "Q37"));
-			Assert.IsTrue(td.Next());
+            IndexReader reader = IndexReader.Open(dir);
+            // following is the line whose inclusion causes TestPerFieldAnalyzer to fail:
+            TermDocs td = reader.TermDocs(new Term("partnum", "Q36"));
+            Assert.IsTrue(td.Next());
+            td = reader.TermDocs(new Term("partnum", "Q37"));
+            Assert.IsTrue(td.Next());
+//this fixes TestPerFieldAnalyzer:
+//((Lucene.Net.Index.SegmentReader)reader).foo();
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestLengthFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestLengthFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestLengthFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestLengthFilter.cs Wed Jul 29 18:04:12 2009
@@ -32,10 +32,11 @@
 		{
 			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader("short toolong evenmuchlongertext a ab toolong foo"));
 			LengthFilter filter = new LengthFilter(stream, 2, 6);
-			Assert.AreEqual("short", filter.Next().TermText());
-			Assert.AreEqual("ab", filter.Next().TermText());
-			Assert.AreEqual("foo", filter.Next().TermText());
-			Assert.IsNull(filter.Next());
+            Token reusableToken = new Token();
+            Assert.AreEqual("short", filter.Next(reusableToken).Term());
+            Assert.AreEqual("ab", filter.Next(reusableToken).Term());
+            Assert.AreEqual("foo", filter.Next(reusableToken).Term());
+			Assert.IsNull(filter.Next(reusableToken));
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs Wed Jul 29 18:04:12 2009
@@ -25,7 +25,7 @@
 {
 	
 	[TestFixture]	
-	public class TestPerFieldAnalzyerWrapper : LuceneTestCase
+	public class TestPerFieldAnalyzerWrapper : LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestPerField()
@@ -35,12 +35,13 @@
 			analyzer.AddAnalyzer("special", new SimpleAnalyzer());
 			
 			TokenStream tokenStream = analyzer.TokenStream("field", new System.IO.StringReader(text));
-			Token token = tokenStream.Next();
-			Assert.AreEqual("Qwerty", token.TermText(), "WhitespaceAnalyzer does not lowercase");
+            Token reusableToken = new Token();
+			Token nextToken = tokenStream.Next(reusableToken);
+			Assert.AreEqual("Qwerty", nextToken.Term(), "WhitespaceAnalyzer does not lowercase");
 			
 			tokenStream = analyzer.TokenStream("special", new System.IO.StringReader(text));
-			token = tokenStream.Next();
-			Assert.AreEqual("qwerty", token.TermText(), "SimpleAnalyzer lowercases");
+			nextToken = tokenStream.Next(reusableToken);
+			Assert.AreEqual("qwerty", nextToken.Term(), "SimpleAnalyzer lowercases");
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStandardAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestStandardAnalyzer.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStandardAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStandardAnalyzer.cs Wed Jul 29 18:04:12 2009
@@ -44,25 +44,25 @@
 		public virtual void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] expectedImages, System.String[] expectedTypes, int[] expectedPosIncrs)
 		{
 			TokenStream ts = a.TokenStream("dummy", new System.IO.StringReader(input));
+            Token reusableToken = new Token();
 			for (int i = 0; i < expectedImages.Length; i++)
 			{
-				Token t = ts.Next();
-				Assert.IsNotNull(t);
-				Assert.AreEqual(expectedImages[i], t.TermText());
+				Token nextToken = ts.Next(reusableToken);
+				Assert.IsNotNull(nextToken);
+				Assert.AreEqual(expectedImages[i], nextToken.Term());
 				if (expectedTypes != null)
 				{
-					Assert.AreEqual(expectedTypes[i], t.Type());
+					Assert.AreEqual(expectedTypes[i], nextToken.Type());
 				}
 				if (expectedPosIncrs != null)
 				{
-					Assert.AreEqual(expectedPosIncrs[i], t.GetPositionIncrement());
+					Assert.AreEqual(expectedPosIncrs[i], nextToken.GetPositionIncrement());
 				}
 			}
-			Assert.IsNull(ts.Next());
+			Assert.IsNull(ts.Next(reusableToken));
 			ts.Close();
 		}
 		
-		
 		[Test]
 		public virtual void  TestMaxTermLength()
 		{
@@ -167,14 +167,16 @@
 		[Test]
 		public virtual void  TestDomainNames()
 		{
+            // don't reuse because we alter its state (SetReplaceInvalidAcronym)
+            StandardAnalyzer a2 = new StandardAnalyzer();
 			// domain names
-			AssertAnalyzesTo(a, "www.nutch.org", new System.String[]{"www.nutch.org"});
+			AssertAnalyzesTo(a2, "www.nutch.org", new System.String[]{"www.nutch.org"});
 			//Notice the trailing .  See https://issues.apache.org/jira/browse/LUCENE-1068.
-			//TODO: Remove in 3.x
-			AssertAnalyzesTo(a, "www.nutch.org.", new System.String[]{"wwwnutchorg"}, new System.String[]{"<ACRONYM>"});
+			// the following should be recognized as HOST
+			AssertAnalyzesTo(a2, "www.nutch.org.", new System.String[]{"www.nutch.org"}, new System.String[]{"<HOST>"});
 			// the following should be recognized as HOST. The code that sets replaceDepAcronym should be removed in the next release.
-			((StandardAnalyzer) a).SetReplaceInvalidAcronym(true);
-			AssertAnalyzesTo(a, "www.nutch.org.", new System.String[]{"www.nutch.org"}, new System.String[]{"<HOST>"});
+			a2.SetReplaceInvalidAcronym(false);
+			AssertAnalyzesTo(a2, "www.nutch.org.", new System.String[]{"wwwnutchorg"}, new System.String[]{"<ACRONYM>"});
 		}
 		
 		[Test]
@@ -290,7 +292,7 @@
 		{
 			// test backward compatibility for applications that require the old behavior.
 			// this should be removed once replaceDepAcronym is removed.
-			AssertAnalyzesTo(a, "lucene.apache.org.", new System.String[]{"luceneapacheorg"}, new System.String[]{"<ACRONYM>"});
+			AssertAnalyzesTo(a, "lucene.apache.org.", new System.String[]{"lucene.apache.org"}, new System.String[]{"<HOST>"});
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestStopAnalyzer.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopAnalyzer.cs Wed Jul 29 18:04:12 2009
@@ -51,10 +51,10 @@
 			System.IO.StringReader reader = new System.IO.StringReader("This is a test of the english stop analyzer");
 			TokenStream stream = stop.TokenStream("test", reader);
 			Assert.IsTrue(stream != null);
-			Token token = null;
-			while ((token = stream.Next()) != null)
+			Token reusableToken = new Token();
+			for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 			{
-				Assert.IsFalse(inValidTokens.Contains(token.TermText()));
+				Assert.IsFalse(inValidTokens.Contains(nextToken.Term()));
 			}
 		}
 		
@@ -69,12 +69,12 @@
 			System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the english stop analyzer");
 			TokenStream stream = newStop.TokenStream("test", reader);
 			Assert.IsNotNull(stream);
-			Token token = null;
-			while ((token = stream.Next()) != null)
+			Token reusableToken = new Token();
+			for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 			{
-				System.String text = token.TermText();
+                System.String text = nextToken.Term();
 				Assert.IsFalse(stopWordsSet.Contains(text));
-				Assert.AreEqual(1, token.GetPositionIncrement()); // by default stop tokenizer does not apply increments.
+                Assert.AreEqual(1, nextToken.GetPositionIncrement()); // by default stop tokenizer does not apply increments.
 			}
 		}
 
@@ -94,13 +94,13 @@
 				int[] expectedIncr = new int[]{1, 1, 1, 3, 1, 1, 1, 2, 1};
 				TokenStream stream = newStop.TokenStream("test", reader);
 				Assert.IsNotNull(stream);
-				Token token = null;
 				int i = 0;
-				while ((token = stream.Next()) != null)
+                Token reusableToken = new Token();
+                for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 				{
-					System.String text = token.TermText();
+                    System.String text = nextToken.Term();
 					Assert.IsFalse(stopWordsSet.Contains(text));
-					Assert.AreEqual(expectedIncr[i++], token.GetPositionIncrement());
+                    Assert.AreEqual(expectedIncr[i++], nextToken.GetPositionIncrement());
 				}
 			}
 			finally

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestStopFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopFilter.cs Wed Jul 29 18:04:12 2009
@@ -41,9 +41,10 @@
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
 			TokenStream stream = new StopFilter(new WhitespaceTokenizer(reader), stopWords);
-			Assert.AreEqual("Now", stream.Next().TermText());
-			Assert.AreEqual("The", stream.Next().TermText());
-			Assert.AreEqual(null, stream.Next());
+            Token reusableToken = new Token();
+			Assert.AreEqual("Now", stream.Next(reusableToken).Term());
+			Assert.AreEqual("The", stream.Next(reusableToken).Term());
+			Assert.AreEqual(null, stream.Next(reusableToken));
 		}
 		
 		[Test]
@@ -52,8 +53,9 @@
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
 			TokenStream stream = new StopFilter(new WhitespaceTokenizer(reader), stopWords, true);
-			Assert.AreEqual("Now", stream.Next().TermText());
-			Assert.AreEqual(null, stream.Next());
+            Token reusableToken = new Token();
+            Assert.AreEqual("Now", stream.Next(reusableToken).Term());
+			Assert.AreEqual(null, stream.Next(reusableToken));
 		}
 		
 		[Test]
@@ -63,9 +65,10 @@
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
 			System.Collections.Hashtable stopSet = StopFilter.MakeStopSet(stopWords);
 			TokenStream stream = new StopFilter(new WhitespaceTokenizer(reader), stopSet);
-			Assert.AreEqual("Now", stream.Next().TermText());
-			Assert.AreEqual("The", stream.Next().TermText());
-			Assert.AreEqual(null, stream.Next());
+            Token reusableToken = new Token();
+            Assert.AreEqual("Now", stream.Next(reusableToken).Term());
+			Assert.AreEqual("The", stream.Next(reusableToken).Term());
+			Assert.AreEqual(null, stream.Next(reusableToken));
 		}
 		
 		/// <summary> Test Position increments applied by StopFilter with and without enabling this option.</summary>
@@ -127,15 +130,16 @@
 		{
 			Log("---> test with enable-increments-" + (enableIcrements?"enabled":"disabled"));
 			stpf.SetEnablePositionIncrements(enableIcrements);
-			for (int i = 0; i < 20; i += 3)
+            Token reusableToken = new Token();
+            for (int i = 0; i < 20; i += 3)
 			{
-				Token t = stpf.Next();
-				Log("Token " + i + ": " + t);
+				Token nextToken = stpf.Next(reusableToken);
+				Log("Token " + i + ": " + nextToken);
 				System.String w = English.IntToEnglish(i).Trim();
-				Assert.AreEqual(w, t.TermText(), "expecting token " + i + " to be " + w);
-				Assert.AreEqual(enableIcrements ? (i == 0 ? 1 : 3) : 1, t.GetPositionIncrement(), "all but first token must have position increment of 3");
+				Assert.AreEqual(w, nextToken.Term(), "expecting token " + i + " to be " + w);
+				Assert.AreEqual(enableIcrements ? (i == 0 ? 1 : 3) : 1, nextToken.GetPositionIncrement(), "all but first token must have position increment of 3");
 			}
-			Assert.IsNull(stpf.Next());
+			Assert.IsNull(stpf.Next(reusableToken));
 		}
 		
 		// print debug info depending on VERBOSE

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestToken.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestToken.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestToken.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestToken.cs Wed Jul 29 18:04:12 2009
@@ -23,39 +23,169 @@
 
 namespace Lucene.Net.Analysis
 {
-	[TestFixture]
-	public class TestToken : LuceneTestCase
-	{
-		[Test]
-		public virtual void  TestToString()
-		{
-			char[] b = new char[]{'a', 'l', 'o', 'h', 'a'};
-			Token t = new Token("", 0, 5);
-			t.SetTermBuffer(b, 0, 5);
-			Assert.AreEqual("(aloha,0,5)", t.ToString());
-			
-			t.SetTermText("hi there");
-			Assert.AreEqual("(hi there,0,5)", t.ToString());
-		}
-		
-		[Test]
-		public virtual void  TestMixedStringArray()
-		{
-			Token t = new Token("hello", 0, 5);
-			Assert.AreEqual(t.TermText(), "hello");
-			Assert.AreEqual(t.TermLength(), 5);
-			Assert.AreEqual(new System.String(t.TermBuffer(), 0, 5), "hello");
-			t.SetTermText("hello2");
-			Assert.AreEqual(t.TermLength(), 6);
-			Assert.AreEqual(new System.String(t.TermBuffer(), 0, 6), "hello2");
-			t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
-			Assert.AreEqual(t.TermText(), "hello3");
-			
-			// Make sure if we get the buffer and change a character
-			// that termText() reflects the change
-			char[] buffer = t.TermBuffer();
-			buffer[1] = 'o';
-			Assert.AreEqual(t.TermText(), "hollo3");
-		}
-	}
+    [TestFixture]
+    public class TestToken : LuceneTestCase
+    {
+        [Test]
+        public void TestCtor()
+        {
+            Token t = new Token();
+            char[] content = "hello".ToCharArray();
+            t.SetTermBuffer(content, 0, content.Length);
+            char[] buf = t.TermBuffer();
+            Assert.AreNotSame(t.TermBuffer(), content);
+            Assert.AreEqual("hello", t.Term());
+            Assert.AreEqual("word", t.Type());
+            Assert.AreEqual(0, t.GetFlags());
+
+            t = new Token(6, 22);
+            t.SetTermBuffer(content, 0, content.Length);
+            Assert.AreEqual("hello", t.Term());
+            Assert.AreEqual("(hello,6,22)", t.ToString());
+            Assert.AreEqual("word", t.Type());
+            Assert.AreEqual(0, t.GetFlags());
+
+            t = new Token(6, 22, 7);
+            t.SetTermBuffer(content, 0, content.Length);
+            Assert.AreEqual("hello", t.Term());
+            Assert.AreEqual("(hello,6,22)", t.ToString());
+            Assert.AreEqual(7, t.GetFlags());
+
+            t = new Token(6, 22, "junk");
+            t.SetTermBuffer(content, 0, content.Length);
+            Assert.AreEqual("hello", t.Term());
+            Assert.AreEqual("(hello,6,22,type=junk)", t.ToString());
+            Assert.AreEqual(0, t.GetFlags());
+        }
+
+        [Test]
+        public void TestResize()
+        {
+            Token t = new Token();
+            char[] content = "hello".ToCharArray();
+            t.SetTermBuffer(content, 0, content.Length);
+            for (int i = 0; i < 2000; i++)
+            {
+                t.ResizeTermBuffer(i);
+                Assert.IsTrue(i <= t.TermBuffer().Length);
+                Assert.AreEqual("hello", t.Term());
+            }
+        }
+
+        [Test]
+        public void TestGrow()
+        {
+            Token t = new Token();
+            System.Text.StringBuilder buf = new System.Text.StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                char[] content = buf.ToString().ToCharArray();
+                t.SetTermBuffer(content, 0, content.Length);
+                Assert.AreEqual(buf.Length, t.TermLength());
+                Assert.AreEqual(buf.ToString(), t.Term());
+                buf.Append(buf.ToString());
+            }
+            Assert.AreEqual(1048576, t.TermLength());
+            Assert.AreEqual(1179654, t.TermBuffer().Length);
+
+            // now as a string, first variant
+            t = new Token();
+            buf = new System.Text.StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                String content = buf.ToString();
+                t.SetTermBuffer(content, 0, content.Length);
+                Assert.AreEqual(content.Length, t.TermLength());
+                Assert.AreEqual(content, t.Term());
+                buf.Append(content);
+            }
+            Assert.AreEqual(1048576, t.TermLength());
+            Assert.AreEqual(1179654, t.TermBuffer().Length);
+
+            // now as a string, second variant
+            t = new Token();
+            buf = new System.Text.StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                String content = buf.ToString();
+                t.SetTermBuffer(content);
+                Assert.AreEqual(content.Length, t.TermLength());
+                Assert.AreEqual(content, t.Term());
+                buf.Append(content);
+            }
+            Assert.AreEqual(1048576, t.TermLength());
+            Assert.AreEqual(1179654, t.TermBuffer().Length);
+
+            // Test for slow growth to a long term
+            t = new Token();
+            buf = new System.Text.StringBuilder("a");
+            for (int i = 0; i < 20000; i++)
+            {
+                String content = buf.ToString();
+                t.SetTermBuffer(content);
+                Assert.AreEqual(content.Length, t.TermLength());
+                Assert.AreEqual(content, t.Term());
+                buf.Append("a");
+            }
+            Assert.AreEqual(20000, t.TermLength());
+            Assert.AreEqual(20331, t.TermBuffer().Length);
+
+            // Test for slow growth to a long term
+            t = new Token();
+            buf = new System.Text.StringBuilder("a");
+            for (int i = 0; i < 20000; i++)
+            {
+                String content = buf.ToString();
+                t.SetTermBuffer(content);
+                Assert.AreEqual(content.Length, t.TermLength());
+                Assert.AreEqual(content, t.Term());
+                buf.Append("a");
+            }
+            Assert.AreEqual(20000, t.TermLength());
+            Assert.AreEqual(20331, t.TermBuffer().Length);
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            char[] b = new char[] { 'a', 'l', 'o', 'h', 'a' };
+            Token t = new Token("", 0, 5);
+            t.SetTermBuffer(b, 0, 5);
+            Assert.AreEqual("(aloha,0,5)", t.ToString());
+
+            t.SetTermText("hi there");
+            Assert.AreEqual("(hi there,0,5)", t.ToString());
+        }
+
+        [Test]
+        public virtual void TestMixedStringArray()
+        {
+            Token t = new Token("hello", 0, 5);
+            Assert.AreEqual(t.TermText(), "hello");
+            Assert.AreEqual(t.TermLength(), 5);
+            Assert.AreEqual(new System.String(t.TermBuffer(), 0, 5), "hello");
+            t.SetTermText("hello2");
+            Assert.AreEqual(t.TermLength(), 6);
+            Assert.AreEqual(new System.String(t.TermBuffer(), 0, 6), "hello2");
+            t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
+            Assert.AreEqual(t.TermText(), "hello3");
+
+            // Make sure if we get the buffer and change a character
+            // that termText() reflects the change
+            char[] buffer = t.TermBuffer();
+            buffer[1] = 'o';
+            Assert.AreEqual(t.TermText(), "hollo3");
+        }
+
+        [Test]
+        public void TestClone()
+        {
+            Token t = new Token(0, 5);
+            char[] content = "hello".ToCharArray();
+            t.SetTermBuffer(content, 0, 5);
+            char[] buf = t.TermBuffer();
+            Token copy = (Token)t.Clone();
+            Assert.AreNotSame(buf, copy.TermBuffer());
+        }
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/AnalysisTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/AnalysisTest.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/AnalysisTest.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/AnalysisTest.cs Wed Jul 29 18:04:12 2009
@@ -27,19 +27,26 @@
 	
 	class AnalysisTest
 	{
-		[STAThread]
+        static System.IO.FileInfo tmpFile;
+		
+        [STAThread]
 		public static void  Main(System.String[] args)
 		{
 			try
 			{
 				Test("This is a test", true);
-				// FIXME: OG: what's with this hard-coded file name??
-				Test(new System.IO.FileInfo("words.txt"), false);
+
+                tmpFile = new System.IO.FileInfo(
+                    System.IO.Path.Combine(
+                    System.IO.Path.GetDirectoryName(System.IO.Path.GetTempFileName())
+                    , "words.txt"));
+				Test(tmpFile, false);
 			}
 			catch (System.Exception e)
 			{
 				System.Console.Out.WriteLine(" caught a " + e.GetType() + "\n with message: " + e.Message);
 			}
+            tmpFile.Delete();
 		}
 		
 		internal static void  Test(System.IO.FileInfo file, bool verbose)
@@ -69,11 +76,12 @@
 			System.DateTime start = System.DateTime.Now;
 			
 			int count = 0;
-			for (Token t = stream.Next(); t != null; t = stream.Next())
+            Token reusableToken = new Token();
+			for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
 			{
 				if (verbose)
 				{
-					System.Console.Out.WriteLine("Text=" + t.TermText() + " start=" + t.StartOffset() + " end=" + t.EndOffset());
+					System.Console.Out.WriteLine("Text=" + nextToken.TermText() + " start=" + nextToken.StartOffset() + " end=" + nextToken.EndOffset());
 				}
 				count++;
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/App.config
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/App.config?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/App.config (original)
+++ incubator/lucene.net/trunk/C#/src/Test/App.config Wed Jul 29 18:04:12 2009
@@ -1,7 +1,7 @@
 <?xml version="1.0" encoding="utf-8" ?>
 <configuration>
   <appSettings>
-    <add key="tempdir" value="c:\temp"/>
+    <add key="tempDir" value="c:\windows\temp\Lucene.Net-Tests"/>
   </appSettings>
   <!-- when i add this setting and run tests, i get 0 success, 0 failures, 0 tests not run
   <appSettings>

Modified: incubator/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/AssemblyInfo.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs Wed Jul 29 18:04:12 2009
@@ -11,12 +11,12 @@
 [assembly: AssemblyConfiguration("")]
 [assembly: AssemblyCompany("The Apache Software Foundation")]
 [assembly: AssemblyProduct("Lucene.Net.Test")]
-[assembly: AssemblyCopyright("Copyright 2006 - 2008 The Apache Software Foundation")]
-[assembly: AssemblyTrademark("Copyright 2006 - 2008 The Apache Software Foundation")]
+[assembly: AssemblyCopyright("Copyright 2006 - 2009 The Apache Software Foundation")]
+[assembly: AssemblyTrademark("Copyright 2006 - 2009 The Apache Software Foundation")]
 [assembly: AssemblyDefaultAlias("Lucene.Net")]
 [assembly: AssemblyCulture("")]
 
-[assembly: AssemblyInformationalVersionAttribute("2.3.2")]
+[assembly: AssemblyInformationalVersionAttribute("2.4.0")]
 
 //
 // Version information for an assembly consists of the following four values:
@@ -29,7 +29,7 @@
 // You can specify all the values or you can default the Revision and Build Numbers 
 // by using the '*' as shown below:
 
-[assembly: AssemblyVersion("2.3.2.001")]
+[assembly: AssemblyVersion("2.4.0.001")]
 
 //
 // In order to sign your assembly you must specify a key to use. Refer to the 

Modified: incubator/lucene.net/trunk/C#/src/Test/Document/TestBinaryDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Document/TestBinaryDocument.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Document/TestBinaryDocument.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Document/TestBinaryDocument.cs Wed Jul 29 18:04:12 2009
@@ -72,7 +72,7 @@
 			
 			/** add the doc to a ram index */
 			RAMDirectory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(doc);
 			writer.Close();
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Document/TestDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Document/TestDocument.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Document/TestDocument.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Document/TestDocument.cs Wed Jul 29 18:04:12 2009
@@ -19,15 +19,15 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using Searcher = Lucene.Net.Search.Searcher;
 using TermQuery = Lucene.Net.Search.TermQuery;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Documents
@@ -128,7 +128,7 @@
 		public virtual void  TestConstructorExceptions()
 		{
 			new Field("name", "value", Field.Store.YES, Field.Index.NO); // okay
-			new Field("name", "value", Field.Store.NO, Field.Index.UN_TOKENIZED); // okay
+			new Field("name", "value", Field.Store.NO, Field.Index.NOT_ANALYZED); // okay
 			try
 			{
 				new Field("name", "value", Field.Store.NO, Field.Index.NO);
@@ -170,7 +170,7 @@
 		public virtual void  TestGetValuesForIndexedDocument()
 		{
 			RAMDirectory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(MakeDocumentWithFields());
 			writer.Close();
 			
@@ -180,24 +180,24 @@
 			Query query = new TermQuery(new Term("keyword", "test1"));
 			
 			// ensure that queries return expected results without DateFilter first
-			Hits hits = searcher.Search(query);
-			Assert.AreEqual(1, hits.Length());
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
 			
-			DoAssert(hits.Doc(0), true);
+			DoAssert(searcher.Doc(hits[0].doc), true);
 			searcher.Close();
 		}
 		
 		private Lucene.Net.Documents.Document MakeDocumentWithFields()
 		{
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("keyword", "test1", Field.Store.YES, Field.Index.UN_TOKENIZED));
-			doc.Add(new Field("keyword", "test2", Field.Store.YES, Field.Index.UN_TOKENIZED));
-			doc.Add(new Field("text", "test1", Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("text", "test2", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("keyword", "test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("keyword", "test2", Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("text", "test1", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("text", "test2", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("unindexed", "test1", Field.Store.YES, Field.Index.NO));
 			doc.Add(new Field("unindexed", "test2", Field.Store.YES, Field.Index.NO));
-			doc.Add(new Field("unstored", "test1", Field.Store.NO, Field.Index.TOKENIZED));
-			doc.Add(new Field("unstored", "test2", Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("unstored", "test1", Field.Store.NO, Field.Index.ANALYZED));
+			doc.Add(new Field("unstored", "test2", Field.Store.NO, Field.Index.ANALYZED));
 			return doc;
 		}
 		
@@ -237,13 +237,13 @@
 		public virtual void  TestFieldSetValue()
 		{
 			
-			Field field = new Field("id", "id1", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			Field field = new Field("id", "id1", Field.Store.YES, Field.Index.NOT_ANALYZED);
 			Document doc = new Document();
 			doc.Add(field);
-			doc.Add(new Field("keyword", "test", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
 			
 			RAMDirectory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(doc);
 			field.SetValue("id2");
 			writer.AddDocument(doc);
@@ -256,12 +256,12 @@
 			Query query = new TermQuery(new Term("keyword", "test"));
 			
 			// ensure that queries return expected results without DateFilter first
-			Hits hits = searcher.Search(query);
-			Assert.AreEqual(3, hits.Length());
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length);
 			int result = 0;
 			for (int i = 0; i < 3; i++)
 			{
-				Document doc2 = hits.Doc(i);
+				Document doc2 = searcher.Doc(hits[i].doc);
 				Field f = doc2.GetField("id");
 				if (f.StringValue().Equals("id1"))
 					result |= 1;

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/DocHelper.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs Wed Jul 29 18:04:12 2009
@@ -171,7 +171,7 @@
 		/// <throws>  IOException </throws>
 		public static SegmentInfo WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc)
 		{
-			IndexWriter writer = new IndexWriter(dir, analyzer);
+			IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetSimilarity(similarity);
 			//writer.setUseCompoundFile(false);
 			writer.AddDocument(doc);
@@ -187,21 +187,21 @@
 		}
 		static DocHelper()
 		{
-			textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
-			textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
-			compressedTextField2 = new Field(COMPRESSED_TEXT_FIELD_2_KEY, FIELD_2_COMPRESSED_TEXT, Field.Store.COMPRESS, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
-			textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
+			textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO);
+			textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+			compressedTextField2 = new Field(COMPRESSED_TEXT_FIELD_2_KEY, FIELD_2_COMPRESSED_TEXT, Field.Store.COMPRESS, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+			textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.ANALYZED);
 			{
 				textField3.SetOmitNorms(true);
 			}
-			keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.UN_TOKENIZED);
+			keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.NOT_ANALYZED);
 			noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NO_NORMS);
 			unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, Field.Store.YES, Field.Index.NO);
-			unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO);
-			unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES);
-			lazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
-			textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
-			textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+			unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO);
+			unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES);
+			lazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.ANALYZED);
+			textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO);
+			textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
 			{
 				//Initialize the large Lazy Field
 				System.Text.StringBuilder buffer = new System.Text.StringBuilder();
@@ -219,7 +219,7 @@
 				}
 				lazyFieldBinary = new Field(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES, Field.Store.YES);
 				LARGE_LAZY_FIELD_TEXT = buffer.ToString();
-				largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
+				largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.ANALYZED);
 				fields = new Field[] { textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField };
 				for (int i = 0; i < fields.Length; i++)
 				{

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/MockIndexInput.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs Wed Jul 29 18:04:12 2009
@@ -24,7 +24,8 @@
 	
 	public class MockIndexInput : BufferedIndexInput
 	{
-		private byte[] buffer;
+        // i consider this weird
+		new private byte[] buffer;
 		private int pointer = 0;
 		private long length;
 		

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/TestRAMDirectory.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs Wed Jul 29 18:04:12 2009
@@ -19,16 +19,18 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Directory = Lucene.Net.Store.Directory;
+using IndexInput = Lucene.Net.Store.IndexInput;
+using IndexOutput = Lucene.Net.Store.IndexOutput;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
-using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using English = Lucene.Net.Util.English;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
@@ -39,9 +41,6 @@
 	/// but not one of them uses an different constructor other than the default constructor.
 	/// 
 	/// </summary>
-	/// <author>  Bernhard Messer
-	/// 
-	/// </author>
 	/// <version>  $Id: RAMDirectory.java 150537 2004-09-28 22:45:26 +0200 (Di, 28 Sep 2004) cutting $
 	/// </version>
 	[TestFixture]
@@ -77,7 +76,7 @@
 				for (int j = 1; j < Enclosing_Instance.docsPerThread; j++)
 				{
 					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-					doc.Add(new Field("sizeContent", English.IntToEnglish(num * Enclosing_Instance.docsPerThread + j).Trim(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+					doc.Add(new Field("sizeContent", English.IntToEnglish(num * Enclosing_Instance.docsPerThread + j).Trim(), Field.Store.YES, Field.Index.NOT_ANALYZED));
 					try
 					{
 						writer.AddDocument(doc);
@@ -109,13 +108,13 @@
 				throw new System.IO.IOException("java.io.tmpdir undefined, cannot run test");
 			indexDir = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "RAMDirIndex"));
 			
-			IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			// add some documents
 			Lucene.Net.Documents.Document doc = null;
 			for (int i = 0; i < docsToAdd; i++)
 			{
 				doc = new Lucene.Net.Documents.Document();
-				doc.Add(new Field("content", English.IntToEnglish(i).Trim(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+				doc.Add(new Field("content", English.IntToEnglish(i).Trim(), Field.Store.YES, Field.Index.NOT_ANALYZED));
 				writer.AddDocument(doc);
 			}
 			Assert.AreEqual(docsToAdd, writer.DocCount());
@@ -216,9 +215,8 @@
 		[Test]
 		public virtual void  TestRAMDirectorySize()
 		{
-			
 			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
-			IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false);
+			IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 			writer.Optimize();
 			
 			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
@@ -229,12 +227,12 @@
 				int num = i;
 				threads[i] = new AnonymousClassThread(num, writer, ramDir, this);
 			}
-			for (int i = 0; i < numThreads; i++)
-				threads[i].Start();
-			for (int i = 0; i < numThreads; i++)
-				threads[i].Join();
-			
-			writer.Optimize();
+            for (int i = 0; i < numThreads; i++)
+                threads[i].Start();
+            for (int i = 0; i < numThreads; i++)
+                threads[i].Join();
+
+            writer.Optimize();
 			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
 			
 			writer.Close();
@@ -253,22 +251,35 @@
 			// out_Renamed.Close();
 			Assert.IsTrue(headerSize < bos.Length, "contains more then just header");
 		}
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+            // cleanup 
+            bool tmpBool;
+            if (System.IO.File.Exists(indexDir.FullName))
+                tmpBool = true;
+            else
+                tmpBool = System.IO.Directory.Exists(indexDir.FullName);
+            if (indexDir != null && tmpBool)
+                RmDir(indexDir);
+        }
 		
-		[TearDown]
-		public override void TearDown()
-		{
-			base.TearDown();
-			// cleanup 
-			bool tmpBool;
-			if (System.IO.File.Exists(indexDir.FullName))
-				tmpBool = true;
-			else
-				tmpBool = System.IO.Directory.Exists(indexDir.FullName);
-			if (indexDir != null && tmpBool)
-			{
-				RmDir(indexDir);
-			}
-		}
+        // LUCENE-1196
+        [Test]
+        public void TestIllegalEOF()
+        {
+            RAMDirectory dir = new RAMDirectory();
+            IndexOutput o = dir.CreateOutput("out");
+            byte[] b = new byte[1024];
+            o.WriteBytes(b, 0, 1024);
+            o.Close();
+            IndexInput i = dir.OpenInput("out");
+            i.Seek(1024);
+            i.Close();
+            dir.Close();
+        }
 
 		private void  RmDir(System.IO.FileInfo dir)
 		{
@@ -305,5 +316,5 @@
 				tmpBool2 = false;
 			bool generatedAux2 = tmpBool2;
 		}
-	}
-}
\ No newline at end of file
+    }
+}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAddIndexesNoOptimize.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs Wed Jul 29 18:04:12 2009
@@ -19,13 +19,15 @@
 
 using NUnit.Framework;
 
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using PhraseQuery = Lucene.Net.Search.PhraseQuery;
 
 namespace Lucene.Net.Index
 {
@@ -127,11 +129,127 @@
 			VerifyTermDocs(dir, new Term("content", "bbb"), 51);
 		}
 		
+		[Test]
+  public void TestWithPendingDeletes() {
+    // main directory
+    Directory dir = new RAMDirectory();
+    // auxiliary directory
+    Directory aux = new RAMDirectory();
+
+    SetUpDirs(dir, aux);
+    IndexWriter writer = NewWriter(dir, false);
+    writer.AddIndexesNoOptimize(new Directory[] {aux});
+
+    // Adds 10 docs, then replaces them with another 10
+    // docs, so 10 pending deletes:
+    for (int i = 0; i < 20; i++) {
+      Document doc = new Document();
+      doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
+      doc.Add(new Field("content", "bbb " + i, Field.Store.NO,
+                        Field.Index.ANALYZED));
+      writer.UpdateDocument(new Term("id", "" + (i%10)), doc);
+    }
+    // Deletes one of the 10 added docs, leaving 9:
+    PhraseQuery q = new PhraseQuery();
+    q.Add(new Term("content", "bbb"));
+    q.Add(new Term("content", "14"));
+    writer.DeleteDocuments(q);
+
+    writer.Optimize();
+
+    VerifyNumDocs(dir, 1039);
+    VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+    VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+
+    writer.Close();
+    dir.Close();
+    aux.Close();
+  }
+
+		[Test]
+  public void TestWithPendingDeletes2() {
+    // main directory
+    Directory dir = new RAMDirectory();
+    // auxiliary directory
+    Directory aux = new RAMDirectory();
+
+    SetUpDirs(dir, aux);
+    IndexWriter writer = NewWriter(dir, false);
+
+    // Adds 10 docs, then replaces them with another 10
+    // docs, so 10 pending deletes:
+    for (int i = 0; i < 20; i++) {
+      Document doc = new Document();
+      doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
+      doc.Add(new Field("content", "bbb " + i, Field.Store.NO,
+                        Field.Index.ANALYZED));
+      writer.UpdateDocument(new Term("id", "" + (i%10)), doc);
+    }
+
+    writer.AddIndexesNoOptimize(new Directory[] {aux});
+
+    // Deletes one of the 10 added docs, leaving 9:
+    PhraseQuery q = new PhraseQuery();
+    q.Add(new Term("content", "bbb"));
+    q.Add(new Term("content", "14"));
+    writer.DeleteDocuments(q);
+
+    writer.Optimize();
+
+    VerifyNumDocs(dir, 1039);
+    VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+    VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+
+    writer.Close();
+    dir.Close();
+    aux.Close();
+  }
+
+		[Test]
+  public void TestWithPendingDeletes3() {
+    // main directory
+    Directory dir = new RAMDirectory();
+    // auxiliary directory
+    Directory aux = new RAMDirectory();
+
+    SetUpDirs(dir, aux);
+    IndexWriter writer = NewWriter(dir, false);
+
+    // Adds 10 docs, then replaces them with another 10
+    // docs, so 10 pending deletes:
+    for (int i = 0; i < 20; i++) {
+      Document doc = new Document();
+      doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
+      doc.Add(new Field("content", "bbb " + i, Field.Store.NO,
+                        Field.Index.ANALYZED));
+      writer.UpdateDocument(new Term("id", "" + (i%10)), doc);
+    }
+
+    // Deletes one of the 10 added docs, leaving 9:
+    PhraseQuery q = new PhraseQuery();
+    q.Add(new Term("content", "bbb"));
+    q.Add(new Term("content", "14"));
+    writer.DeleteDocuments(q);
+
+    writer.AddIndexesNoOptimize(new Directory[] {aux});
+
+    writer.Optimize();
+
+    VerifyNumDocs(dir, 1039);
+    VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+    VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+
+    writer.Close();
+    dir.Close();
+    aux.Close();
+  }
+
+
 		// case 0: add self or exceed maxMergeDocs, expect exception
 		[Test]
 		public virtual void  TestAddSelf()
-		{
-			// main directory
+        {
+            // main directory
 			Directory dir = new RAMDirectory();
 			// auxiliary directory
 			Directory aux = new RAMDirectory();
@@ -241,7 +359,7 @@
 			IndexWriter writer = NewWriter(dir, false);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(4);
-			writer.AddIndexesNoOptimize(new Directory[] { aux, aux });
+			writer.AddIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
 			Assert.AreEqual(1060, writer.DocCount());
 			Assert.AreEqual(1000, writer.GetDocCount(0));
 			writer.Close();
@@ -273,7 +391,7 @@
 			writer.SetMaxBufferedDocs(4);
 			writer.SetMergeFactor(4);
 			
-			writer.AddIndexesNoOptimize(new Directory[]{aux, aux});
+			writer.AddIndexesNoOptimize(new Directory[]{aux, new RAMDirectory(aux)});
 			Assert.AreEqual(1020, writer.DocCount());
 			Assert.AreEqual(1000, writer.GetDocCount(0));
 			writer.Close();
@@ -333,7 +451,7 @@
 		
 		private IndexWriter NewWriter(Directory dir, bool create)
 		{
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create);
+			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), create);
 			writer.SetMergePolicy(new LogDocMergePolicy());
 			return writer;
 		}
@@ -343,7 +461,7 @@
 			for (int i = 0; i < numDocs; i++)
 			{
 				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-				doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+				doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
 		}
@@ -353,7 +471,7 @@
 			for (int i = 0; i < numDocs; i++)
 			{
 				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-				doc.Add(new Field("content", "bbb", Field.Store.NO, Field.Index.TOKENIZED));
+				doc.Add(new Field("content", "bbb", Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
 		}
@@ -413,7 +531,7 @@
         public void TestHangOnClose()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMergePolicy(new LogByteSizeMergePolicy());
             writer.SetMaxBufferedDocs(5);
             writer.SetUseCompoundFile(false);
@@ -434,7 +552,7 @@
             writer.Close();
 
             Directory dir2 = new MockRAMDirectory();
-            writer = new IndexWriter(dir2, false, new WhitespaceAnalyzer(), true);
+            writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
             LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
             lmp.SetMinMergeMB(0.0001);
             writer.SetMergePolicy(lmp);

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAtomicUpdate.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs Wed Jul 29 18:04:12 2009
@@ -36,7 +36,22 @@
 	{
 		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
 		private static readonly System.Random RANDOM = new System.Random();
-		
+
+        public class MockIndexWriter : IndexWriter
+        {
+            public MockIndexWriter(Directory d, bool autoCommit, Analyzer a, bool create)
+                : base(d, autoCommit, a, create)
+            {
+            }
+
+            override protected bool TestPoint(string name)
+            {
+                if (RANDOM.Next(4) == 2)
+                    System.Threading.Thread.Sleep(1);
+                return true;
+            }
+        }
+
 		abstract public class TimedThread : SupportClass.ThreadClass
 		{
 			internal bool failed;
@@ -67,7 +82,8 @@
 				}
 				catch (System.Exception e)
 				{
-					System.Console.Out.WriteLine(e.StackTrace);
+                    System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread.Name + ": exc");
+                    System.Console.Out.WriteLine(e.StackTrace);
 					failed = true;
 				}
 			}
@@ -97,9 +113,8 @@
 				for (int i = 0; i < 100; i++)
 				{
 					Document d = new Document();
-					int n = Lucene.Net.Index.TestAtomicUpdate.RANDOM.Next();
-					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
-					d.Add(new Field("contents", English.IntToEnglish(i + 10 * count), Field.Store.NO, Field.Index.TOKENIZED));
+					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					d.Add(new Field("contents", English.IntToEnglish(i + 10 * count), Field.Store.NO, Field.Index.ANALYZED));
 					writer.UpdateDocument(new Term("id", System.Convert.ToString(i)), d);
 				}
 			}
@@ -117,14 +132,7 @@
 			public override void  DoWork()
 			{
 				IndexReader r = IndexReader.Open(directory);
-				try
-				{
-					Assert.AreEqual(100, r.NumDocs());
-				}
-				catch (System.Exception t)
-				{
-					throw t;
-				}
+				Assert.AreEqual(100, r.NumDocs());
 				r.Close();
 			}
 		}
@@ -138,18 +146,24 @@
 			
 			TimedThread[] threads = new TimedThread[4];
 			
-			IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
-			
+			IndexWriter writer = new MockIndexWriter(directory, true, ANALYZER, true);
+            writer.SetMaxBufferedDocs(7);
+            writer.SetMergeFactor(3);
+
 			// Establish a base index of 100 docs:
 			for (int i = 0; i < 100; i++)
 			{
 				Document d = new Document();
-				d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
-				d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+				d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+				d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(d);
 			}
-			writer.Flush();
-			
+			writer.Commit();
+
+            IndexReader r = IndexReader.Open(directory);
+            Assert.AreEqual(100, r.NumDocs());
+            r.Close();
+
 			IndexerThread indexerThread = new IndexerThread(writer, threads);
 			threads[0] = indexerThread;
 			indexerThread.Start();



Mime
View raw message