lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r411501 [24/30] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/DemoLib/HTML/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Docu...
Date Sun, 04 Jun 2006 02:41:25 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/DocHelper.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs Sat Jun  3 19:41:13 2006
@@ -1,5 +1,5 @@
 /*
- * Copyright 2004 The Apache Software Foundation
+ * Copyright 2005 The Apache Software Foundation
  * 
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,13 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-/// <summary> Created by IntelliJ IDEA.
-/// User: Grant Ingersoll
-/// Date: Feb 2, 2004
-/// Time: 6:16:12 PM
-/// $Id: DocHelper.java,v 1.1 2004/02/20 20:14:55 cutting Exp $
-/// Copyright 2004.  Center For Natural Language Processing
-/// </summary>
+
 using System;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -27,159 +21,204 @@
 using Field = Lucene.Net.Documents.Field;
 using Similarity = Lucene.Net.Search.Similarity;
 using Directory = Lucene.Net.Store.Directory;
+
 namespace Lucene.Net.Index
 {
 	
-	/// <summary> 
-	/// 
-	/// 
-	/// </summary>
 	class DocHelper
 	{
-		public const System.String FIELD_1_TEXT = "Field one text";
+		public const System.String FIELD_1_TEXT = "field one text";
 		public const System.String TEXT_FIELD_1_KEY = "textField1";
 		public static Field textField1;
 		
-		public const System.String FIELD_2_TEXT = "Field Field Field two text";
+		public const System.String FIELD_2_TEXT = "field field field two text";
+		//Fields will be lexicographically sorted.  So, the order is: field, text, two
 		public static readonly int[] FIELD_2_FREQS = new int[]{3, 1, 1};
 		public const System.String TEXT_FIELD_2_KEY = "textField2";
 		public static Field textField2;
 		
+		public const System.String FIELD_3_TEXT = "aaaNoNorms aaaNoNorms bbbNoNorms";
+		public const System.String TEXT_FIELD_3_KEY = "textField3";
+		public static Field textField3;
+		
 		public const System.String KEYWORD_TEXT = "Keyword";
 		public const System.String KEYWORD_FIELD_KEY = "keyField";
 		public static Field keyField;
 		
-		public const System.String UNINDEXED_FIELD_TEXT = "unindexed Field text";
+		public const System.String NO_NORMS_TEXT = "omitNormsText";
+		public const System.String NO_NORMS_KEY = "omitNorms";
+		public static Field noNormsField;
+		
+		public const System.String UNINDEXED_FIELD_TEXT = "unindexed field text";
 		public const System.String UNINDEXED_FIELD_KEY = "unIndField";
 		public static Field unIndField;
 		
-		public const System.String UNSTORED_1_FIELD_TEXT = "unstored Field text";
+		
+		public const System.String UNSTORED_1_FIELD_TEXT = "unstored field text";
 		public const System.String UNSTORED_FIELD_1_KEY = "unStoredField1";
 		public static Field unStoredField1;
 		
-		public const System.String UNSTORED_2_FIELD_TEXT = "unstored Field text";
+		public const System.String UNSTORED_2_FIELD_TEXT = "unstored field text";
 		public const System.String UNSTORED_FIELD_2_KEY = "unStoredField2";
 		public static Field unStoredField2;
 		
-		//  public static Set fieldNamesSet = null;
-		//  public static Set fieldValuesSet = null;
 		public static System.Collections.IDictionary nameValues = null;
 		
+		// ordered list of all the fields...
+		// could use LinkedHashMap for this purpose if Java1.4 is OK
+		public static Field[] fields = new Field[]{textField1, textField2, textField3, keyField, noNormsField, unIndField, unStoredField1, unStoredField2};
+		
+		// Map<String fieldName, Field field>
+		public static System.Collections.IDictionary all = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary indexed = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary stored = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary unstored = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary unindexed = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary termvector = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary notermvector = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary noNorms = new System.Collections.Hashtable();
+		
+		
+		private static void  Add(System.Collections.IDictionary map, Field field)
+		{
+			map[field.Name()] = field;
+		}
+		
 		/// <summary> Adds the fields above to a document </summary>
 		/// <param name="doc">The document to write
 		/// </param>
-		public static void  SetupDoc(Document doc)
+		public static void  SetupDoc(Lucene.Net.Documents.Document doc)
 		{
-			doc.Add(textField1);
-			doc.Add(textField2);
-			doc.Add(keyField);
-			doc.Add(unIndField);
-			doc.Add(unStoredField1);
-			doc.Add(unStoredField2);
+			for (int i = 0; i < fields.Length; i++)
+			{
+				doc.Add(fields[i]);
+			}
 		}
+		
 		/// <summary> Writes the document to the directory using a segment named "test"</summary>
-		/// <param name="">dir
+		/// <param name="dir">
 		/// </param>
-		/// <param name="">doc
+		/// <param name="doc">
 		/// </param>
-		public static void  WriteDoc(Directory dir, Document doc)
+		/// <throws>  IOException </throws>
+		public static void  WriteDoc(Directory dir, Lucene.Net.Documents.Document doc)
 		{
-			
 			WriteDoc(dir, "test", doc);
 		}
+		
 		/// <summary> Writes the document to the directory in the given segment</summary>
-		/// <param name="">dir
+		/// <param name="dir">
 		/// </param>
-		/// <param name="">segment
+		/// <param name="segment">
 		/// </param>
-		/// <param name="">doc
+		/// <param name="doc">
 		/// </param>
-		public static void  WriteDoc(Directory dir, System.String segment, Document doc)
+		/// <throws>  IOException </throws>
+		public static void  WriteDoc(Directory dir, System.String segment, Lucene.Net.Documents.Document doc)
 		{
-			Analyzer analyzer = new WhitespaceAnalyzer();
 			Similarity similarity = Similarity.GetDefault();
-			WriteDoc(dir, analyzer, similarity, segment, doc);
+			WriteDoc(dir, new WhitespaceAnalyzer(), similarity, segment, doc);
 		}
+		
 		/// <summary> Writes the document to the directory segment named "test" using the specified analyzer and similarity</summary>
-		/// <param name="">dir
+		/// <param name="dir">
 		/// </param>
-		/// <param name="">analyzer
+		/// <param name="analyzer">
 		/// </param>
-		/// <param name="">similarity
+		/// <param name="similarity">
 		/// </param>
-		/// <param name="">doc
+		/// <param name="doc">
 		/// </param>
-		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc)
+		/// <throws>  IOException </throws>
+		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Lucene.Net.Documents.Document doc)
 		{
 			WriteDoc(dir, analyzer, similarity, "test", doc);
 		}
+		
 		/// <summary> Writes the document to the directory segment using the analyzer and the similarity score</summary>
-		/// <param name="">dir
+		/// <param name="dir">
 		/// </param>
-		/// <param name="">analyzer
+		/// <param name="analyzer">
 		/// </param>
-		/// <param name="">similarity
+		/// <param name="similarity">
 		/// </param>
-		/// <param name="">segment
+		/// <param name="segment">
 		/// </param>
-		/// <param name="">doc
+		/// <param name="doc">
 		/// </param>
-		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, System.String segment, Document doc)
+		/// <throws>  IOException </throws>
+		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, System.String segment, Lucene.Net.Documents.Document doc)
 		{
 			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
-			try
-			{
-				writer.AddDocument(segment, doc);
-			}
-			catch (System.IO.IOException e)
+			writer.AddDocument(segment, doc);
+		}
+		
+		public static int NumFields(Lucene.Net.Documents.Document doc)
+		{
+			System.Collections.IEnumerator fields = doc.Fields();
+			int result = 0;
+			while (fields.MoveNext())
 			{
-				System.Console.Error.WriteLine(e.StackTrace);
+				System.String name = fields.Current.ToString();
+				name += ""; // avoid compiler warning
+				result++;
 			}
+			return result;
 		}
-		
-		public static int NumFields(Document doc)
+
+        static DocHelper()
 		{
-            int result = 0;
-            foreach (Field field in doc.Fields())
+            textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
+            fields[0] = textField1;
+            textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+            fields[1] = textField2;
+            textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
+            fields[2] = textField3;
             {
-                System.Object generatedAux = field;
-                result++;
+                textField3.SetOmitNorms(true);
             }
-            return result;
-		}
-		static DocHelper()
-		{
-			textField1 = Field.Text(TEXT_FIELD_1_KEY, FIELD_1_TEXT, false);
-			textField2 = Field.Text(TEXT_FIELD_2_KEY, FIELD_2_TEXT, true);
-			keyField = Field.Keyword(KEYWORD_FIELD_KEY, KEYWORD_TEXT);
-			unIndField = Field.UnIndexed(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT);
-			unStoredField1 = Field.UnStored(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, false);
-			unStoredField2 = Field.UnStored(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, true);
+            keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.UN_TOKENIZED);
+            fields[3] = keyField;
+            noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NO_NORMS);
+            fields[4] = noNormsField;
+            unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, Field.Store.YES, Field.Index.NO);
+            fields[5] = unIndField;
+            unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO);
+            fields[6] = unStoredField1;
+            unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES);
+            fields[7] = unStoredField2;
+			{
+				for (int i = 0; i < fields.Length; i++)
+				{
+					Field f = fields[i];
+					Add(all, f);
+					if (f.IsIndexed())
+						Add(indexed, f);
+					else
+						Add(unindexed, f);
+					if (f.IsTermVectorStored())
+						Add(termvector, f);
+					if (f.IsIndexed() && !f.IsTermVectorStored())
+						Add(notermvector, f);
+					if (f.IsStored())
+						Add(stored, f);
+					else
+						Add(unstored, f);
+					if (f.GetOmitNorms())
+						Add(noNorms, f);
+				}
+			}
 			{
-				
 				nameValues = new System.Collections.Hashtable();
 				nameValues[TEXT_FIELD_1_KEY] = FIELD_1_TEXT;
 				nameValues[TEXT_FIELD_2_KEY] = FIELD_2_TEXT;
+				nameValues[TEXT_FIELD_3_KEY] = FIELD_3_TEXT;
 				nameValues[KEYWORD_FIELD_KEY] = KEYWORD_TEXT;
+				nameValues[NO_NORMS_KEY] = NO_NORMS_TEXT;
 				nameValues[UNINDEXED_FIELD_KEY] = UNINDEXED_FIELD_TEXT;
 				nameValues[UNSTORED_FIELD_1_KEY] = UNSTORED_1_FIELD_TEXT;
 				nameValues[UNSTORED_FIELD_2_KEY] = UNSTORED_2_FIELD_TEXT;
 			}
 		}
 	}
-	/*
-	fieldNamesSet = new HashSet();
-	fieldNamesSet.add(TEXT_FIELD_1_KEY);
-	fieldNamesSet.add(TEXT_FIELD_2_KEY);
-	fieldNamesSet.add(KEYWORD_FIELD_KEY);
-	fieldNamesSet.add(UNINDEXED_FIELD_KEY);
-	fieldNamesSet.add(UNSTORED_FIELD_1_KEY);
-	fieldNamesSet.add(UNSTORED_FIELD_2_KEY);
-	fieldValuesSet = new HashSet();
-	fieldValuesSet.add(FIELD_1_TEXT);
-	fieldValuesSet.add(FIELD_2_TEXT);
-	fieldValuesSet.add(KEYWORD_TEXT);
-	fieldValuesSet.add(UNINDEXED_FIELD_TEXT);
-	fieldValuesSet.add(UNSTORED_1_FIELD_TEXT);
-	fieldValuesSet.add(UNSTORED_2_FIELD_TEXT);*/
 }

Added: incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/MockIndexInput.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs Sat Jun  3 19:41:13 2006
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
+
+namespace Lucene.Net.Index
+{
+	
+	public class MockIndexInput : BufferedIndexInput
+	{
+		private byte[] buffer;
+		private int pointer = 0;
+		private long length;
+		
+		public MockIndexInput(byte[] bytes)
+		{
+			buffer = bytes;
+			length = bytes.Length;
+		}
+		
+		public override void  ReadInternal(byte[] dest, int destOffset, int len)
+		{
+			int remainder = len;
+			int start = pointer;
+			while (remainder != 0)
+			{
+				//          int bufferNumber = start / buffer.length;
+				int bufferOffset = start % buffer.Length;
+				int bytesInBuffer = buffer.Length - bufferOffset;
+				int bytesToCopy = bytesInBuffer >= remainder?remainder:bytesInBuffer;
+				Array.Copy(buffer, bufferOffset, dest, destOffset, bytesToCopy);
+				destOffset += bytesToCopy;
+				start += bytesToCopy;
+				remainder -= bytesToCopy;
+			}
+			pointer += len;
+		}
+		
+		public override void  Close()
+		{
+			// ignore
+		}
+		
+		public override void  SeekInternal(long pos)
+		{
+			pointer = (int) pos;
+		}
+		
+		public override long Length()
+		{
+			return length;
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestFSDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/TestFSDirectory.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestFSDirectory.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestFSDirectory.cs Sat Jun  3 19:41:13 2006
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+
+namespace Lucene.Net.Index.Store
+{
+	
+	/// <summary> Test to illustrate the problem found when trying to open an IndexWriter in
+	/// a situation where the the property <code>Lucene.Net.lockDir</code>
+	/// was not set and the one specified by <code>java.io.tmpdir</code> had been
+	/// set to a non-existent path. What I observed is that this combination of
+	/// conditions resulted in a <code>NullPointerException</code> being thrown in
+	/// the <code>create()</code> method in <code>FSDirectory</code>, where
+	/// <code>files.length</code> is de-referenced, but <code>files</code> is
+	/// </code>null</code>.
+	/// 
+	/// </summary>
+	/// <author>  Michael Goddard
+	/// </author>
+	
+    [TestFixture]
+	public class TestFSDirectory
+	{
+		
+		/// <summary> What happens if the Lucene lockDir doesn't exist?
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+        public virtual void  TestNonExistentTmpDir()
+		{
+            orgApacheLuceneLockDir = System.Configuration.ConfigurationSettings.AppSettings.Get("Lucene.Net.lockDir");
+			//System.Configuration.ConfigurationSettings.AppSettings.Set("Lucene.Net.lockDir", NON_EXISTENT_DIRECTORY); // {{Aroush}} how do we setup an envirement variable in C#?
+			System.String exceptionClassName = OpenIndexWriter();
+			if (exceptionClassName == null || exceptionClassName.Equals("java.io.IOException"))
+				Assert.IsTrue(true);
+			else
+				Assert.Fail("Caught an unexpected Exception");
+		}
+		
+		/// <summary> What happens if the Lucene lockDir is a regular file instead of a
+		/// directory?
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+        public virtual void  TestTmpDirIsPlainFile()
+		{
+			shouldBeADirectory = new System.IO.FileInfo(NON_EXISTENT_DIRECTORY);
+            shouldBeADirectory.Create().Close();
+            System.String exceptionClassName = OpenIndexWriter();
+			if (exceptionClassName == null || exceptionClassName.Equals("java.io.IOException"))
+				Assert.IsTrue(true);
+			else
+				Assert.Fail("Caught an unexpected Exception");
+		}
+		
+		public static readonly System.String FILE_SEP = System.IO.Path.DirectorySeparatorChar.ToString();
+		
+		public static readonly System.String NON_EXISTENT_DIRECTORY = System.IO.Path.GetTempPath() + FILE_SEP + "highly_improbable_directory_name";
+		
+		public static readonly System.String TEST_INDEX_DIR = System.IO.Path.GetTempPath() + FILE_SEP + "temp_index";
+		
+		private System.String orgApacheLuceneLockDir;
+		
+		private System.IO.FileInfo shouldBeADirectory;
+		
+        [TestFixtureTearDown]
+		public virtual void  TearDown()
+		{
+			if (orgApacheLuceneLockDir != null)
+			{
+				System.Configuration.ConfigurationSettings.AppSettings.Set("Lucene.Net.lockDir", orgApacheLuceneLockDir);
+			}
+            bool tmpBool = false;
+            if ((shouldBeADirectory != null) && 
+                System.IO.File.Exists(shouldBeADirectory.FullName) && 
+                System.IO.Directory.Exists(shouldBeADirectory.FullName))
+            {
+                tmpBool = true;
+            }
+            if (shouldBeADirectory != null && tmpBool)
+			{
+				try
+				{
+					bool tmpBool2;
+					if (System.IO.File.Exists(shouldBeADirectory.FullName))
+					{
+						System.IO.File.Delete(shouldBeADirectory.FullName);
+						tmpBool2 = true;
+					}
+					else if (System.IO.Directory.Exists(shouldBeADirectory.FullName))
+					{
+						System.IO.Directory.Delete(shouldBeADirectory.FullName);
+						tmpBool2 = true;
+					}
+					else
+						tmpBool2 = false;
+					bool generatedAux = tmpBool2;
+				}
+				catch (System.Exception e)
+				{
+                    System.Console.Error.WriteLine(e.StackTrace);
+				}
+			}
+			System.IO.FileInfo deletableIndex = new System.IO.FileInfo(TEST_INDEX_DIR);
+			bool tmpBool3;
+			if (System.IO.File.Exists(deletableIndex.FullName))
+				tmpBool3 = true;
+			else
+				tmpBool3 = System.IO.Directory.Exists(deletableIndex.FullName);
+			if (tmpBool3)
+				try
+				{
+					RmDir(deletableIndex);
+				}
+				catch (System.Exception e)
+				{
+					System.Console.Error.WriteLine(e.StackTrace);
+				}
+		}
+		
+		/// <summary> Open an IndexWriter<br>
+		/// Catch any (expected) IOException<br>
+		/// Close the IndexWriter
+		/// </summary>
+		private static System.String OpenIndexWriter()
+		{
+			IndexWriter iw = null;
+			System.String ret = null;
+			try
+			{
+				iw = new IndexWriter(TEST_INDEX_DIR, new StandardAnalyzer(), true);
+			}
+			catch (System.IO.IOException e)
+			{
+				ret = e.ToString();
+				System.Console.Error.WriteLine(e.StackTrace);
+			}
+			catch (System.NullReferenceException e)
+			{
+				ret = e.ToString();
+				System.Console.Error.WriteLine(e.StackTrace);
+			}
+			finally
+			{
+				if (iw != null)
+				{
+					try
+					{
+						iw.Close();
+					}
+					catch (System.IO.IOException ioe)
+					{
+						// ignore this
+					}
+				}
+			}
+			return ret;
+		}
+		
+		private static void  RmDir(System.IO.FileInfo dirName)
+		{
+			bool tmpBool;
+			if (System.IO.File.Exists(dirName.FullName))
+				tmpBool = true;
+			else
+				tmpBool = System.IO.Directory.Exists(dirName.FullName);
+			if (tmpBool)
+			{
+				if (System.IO.Directory.Exists(dirName.FullName))
+				{
+					System.IO.FileInfo[] contents = SupportClass.FileSupport.GetFiles(dirName);
+					for (int i = 0; i < contents.Length; i++)
+						RmDir(contents[i]);
+					bool tmpBool2;
+					if (System.IO.File.Exists(dirName.FullName))
+					{
+						System.IO.File.Delete(dirName.FullName);
+						tmpBool2 = true;
+					}
+					else if (System.IO.Directory.Exists(dirName.FullName))
+					{
+						System.IO.Directory.Delete(dirName.FullName);
+						tmpBool2 = true;
+					}
+					else
+						tmpBool2 = false;
+					bool generatedAux = tmpBool2;
+				}
+				else
+				{
+					bool tmpBool3;
+					if (System.IO.File.Exists(dirName.FullName))
+					{
+						System.IO.File.Delete(dirName.FullName);
+						tmpBool3 = true;
+					}
+					else if (System.IO.Directory.Exists(dirName.FullName))
+					{
+						System.IO.Directory.Delete(dirName.FullName);
+						tmpBool3 = true;
+					}
+					else
+						tmpBool3 = false;
+					bool generatedAux2 = tmpBool3;
+				}
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/TestRAMDirectory.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs Sat Jun  3 19:41:13 2006
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using English = Lucene.Net.Util.English;
+
+namespace Lucene.Net.Index.Store
+{
+	
+	/// <summary> JUnit testcase to test RAMDirectory. RAMDirectory itself is used in many testcases,
+	/// but not one of them uses an different constructor other than the default constructor.
+	/// 
+	/// </summary>
+	/// <author>  Bernhard Messer
+	/// 
+	/// </author>
+	/// <version>  $Id: RAMDirectory.java 150537 2004-09-28 22:45:26 +0200 (Di, 28 Sep 2004) cutting $
+	/// </version>
+	[TestFixture]
+    public class TestRAMDirectory
+	{
+		
+		private System.IO.FileInfo indexDir = null;
+		
+		// add enough document so that the index will be larger than RAMDirectory.READ_BUFFER_SIZE
+		private int docsToAdd = 500;
+		
+		// setup the index
+        [TestFixtureSetUp]
+		public virtual void  SetUp()
+		{
+			System.String tempDir = System.IO.Path.GetTempPath();
+			if (tempDir == null)
+				throw new System.IO.IOException("java.io.tmpdir undefined, cannot run test");
+			indexDir = new System.IO.FileInfo(tempDir + "\\" + "RAMDirIndex");
+			
+			IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true);
+			// add some documents
+			Lucene.Net.Documents.Document doc = null;
+			for (int i = 0; i < docsToAdd; i++)
+			{
+				doc = new Lucene.Net.Documents.Document();
+				doc.Add(new Field("content", English.IntToEnglish(i).Trim(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+				writer.AddDocument(doc);
+			}
+			Assert.AreEqual(docsToAdd, writer.DocCount());
+			writer.Optimize();
+			writer.Close();
+		}
+		
+		[Test]
+        public virtual void  TestRAMDirectory_Renamed_Method()
+		{
+			
+			Directory dir = FSDirectory.GetDirectory(indexDir, false);
+			RAMDirectory ramDir = new RAMDirectory(dir);
+			
+			// close the underlaying directory and delete the index
+			dir.Close();
+			
+			// open reader to test document count
+			IndexReader reader = IndexReader.Open(ramDir);
+			Assert.AreEqual(docsToAdd, reader.NumDocs());
+			
+			// open search zo check if all doc's are there
+			IndexSearcher searcher = new IndexSearcher(reader);
+			
+			// search for all documents
+			for (int i = 0; i < docsToAdd; i++)
+			{
+				Lucene.Net.Documents.Document doc = searcher.Doc(i);
+				Assert.IsTrue(doc.GetField("content") != null);
+			}
+			
+			// cleanup
+			reader.Close();
+			searcher.Close();
+		}
+		
+        [Test]
+		public virtual void  TestRAMDirectoryFile()
+		{
+			
+			RAMDirectory ramDir = new RAMDirectory(indexDir);
+			
+			// open reader to test document count
+			IndexReader reader = IndexReader.Open(ramDir);
+			Assert.AreEqual(docsToAdd, reader.NumDocs());
+			
+			// open search zo check if all doc's are there
+			IndexSearcher searcher = new IndexSearcher(reader);
+			
+			// search for all documents
+			for (int i = 0; i < docsToAdd; i++)
+			{
+				Lucene.Net.Documents.Document doc = searcher.Doc(i);
+				Assert.IsTrue(doc.GetField("content") != null);
+			}
+			
+			// cleanup
+			reader.Close();
+			searcher.Close();
+		}
+		
+		[Test]
+        public virtual void  TestRAMDirectoryString()
+		{
+			
+			RAMDirectory ramDir = new RAMDirectory(indexDir.FullName);
+			
+			// open reader to test document count
+			IndexReader reader = IndexReader.Open(ramDir);
+			Assert.AreEqual(docsToAdd, reader.NumDocs());
+			
+			// open search zo check if all doc's are there
+			IndexSearcher searcher = new IndexSearcher(reader);
+			
+			// search for all documents
+			for (int i = 0; i < docsToAdd; i++)
+			{
+				Lucene.Net.Documents.Document doc = searcher.Doc(i);
+				Assert.IsTrue(doc.GetField("content") != null);
+			}
+			
+			// cleanup
+			reader.Close();
+			searcher.Close();
+		}
+		
+        [TestFixtureTearDown]
+		public virtual void  TearDown()
+		{
+			// cleanup 
+			bool tmpBool;
+			if (System.IO.File.Exists(indexDir.FullName))
+				tmpBool = true;
+			else
+				tmpBool = System.IO.Directory.Exists(indexDir.FullName);
+			if (indexDir != null && tmpBool)
+			{
+				RmDir(indexDir);
+			}
+		}
+
+		private void  RmDir(System.IO.FileInfo dir)
+		{
+			System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(dir);
+			for (int i = 0; i < files.Length; i++)
+			{
+				bool tmpBool;
+				if (System.IO.File.Exists(files[i].FullName))
+				{
+					System.IO.File.Delete(files[i].FullName);
+					tmpBool = true;
+				}
+				else if (System.IO.Directory.Exists(files[i].FullName))
+				{
+					System.IO.Directory.Delete(files[i].FullName);
+					tmpBool = true;
+				}
+				else
+					tmpBool = false;
+				bool generatedAux = tmpBool;
+			}
+			bool tmpBool2;
+			if (System.IO.File.Exists(dir.FullName))
+			{
+				System.IO.File.Delete(dir.FullName);
+				tmpBool2 = true;
+			}
+			else if (System.IO.Directory.Exists(dir.FullName))
+			{
+				System.IO.Directory.Delete(dir.FullName);
+				tmpBool2 = true;
+			}
+			else
+				tmpBool2 = false;
+			bool generatedAux2 = tmpBool2;
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCompoundFile.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs Sat Jun  3 19:41:13 2006
@@ -13,31 +13,31 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
-using InputStream = Lucene.Net.Store.InputStream;
-using OutputStream = Lucene.Net.Store.OutputStream;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using _TestHelper = Lucene.Net.Store.TestHelper;
+using IndexInput = Lucene.Net.Store.IndexInput;
+using IndexOutput = Lucene.Net.Store.IndexOutput;
+using _TestHelper = Lucene.Net.Store._TestHelper;
+
 namespace Lucene.Net.Index
 {
 	
 	
 	/// <author>  dmitrys@earthlink.net
 	/// </author>
-	/// <version>  $Id: TestCompoundFile.java,v 1.5 2004/03/29 22:48:06 cutting Exp $
+	/// <version>  $Id: TestCompoundFile.java 208807 2005-07-01 22:13:53Z dnaber $
 	/// </version>
-    [TestFixture]
+	[TestFixture]
     public class TestCompoundFile
 	{
 		/// <summary>Main for running test case by itself. </summary>
 		[STAThread]
 		public static void  Main(System.String[] args)
 		{
-            /*
-			TestRunner.run(new NUnit.Framework.TestSuite(typeof(TestCompoundFile)));
+			// NUnit.Util.TestRunner.Run(new NUnit.Core.TestSuite(typeof(TestCompoundFile)));   // {{Aroush}} where is 'TestRunner' in NUnit?
 			//        TestRunner.run (new TestCompoundFile("testSingleFile"));
 			//        TestRunner.run (new TestCompoundFile("testTwoFiles"));
 			//        TestRunner.run (new TestCompoundFile("testRandomFiles"));
@@ -49,7 +49,6 @@
 			//        TestRunner.run (new TestCompoundFile("testReadPastEOF"));
 			
 			//        TestRunner.run (new TestCompoundFile("testIWCreate"));
-            */
 		}
 		
 		
@@ -59,17 +58,17 @@
 		public virtual void  SetUp()
 		{
 			//dir = new RAMDirectory();
-			dir = FSDirectory.GetDirectory(new System.IO.FileInfo(SupportClass.AppSettings.Get("tempDir", "testIndex")), true);
+			dir = FSDirectory.GetDirectory(new System.IO.FileInfo(System.Configuration.ConfigurationSettings.AppSettings.Get("tempDir") + "\\" + "testIndex"), true);
 		}
 		
 		
 		/// <summary>Creates a file of the specified size with random data. </summary>
 		private void  CreateRandomFile(Directory dir, System.String name, int size)
 		{
-			OutputStream os = dir.CreateFile(name);
+			IndexOutput os = dir.CreateOutput(name);
 			for (int i = 0; i < size; i++)
 			{
-				byte b = (byte) (((new System.Random()).NextDouble()) * 256);
+				byte b = (byte) ((new System.Random().NextDouble()) * 256);
 				os.WriteByte(b);
 			}
 			os.Close();
@@ -81,7 +80,7 @@
 		/// </summary>
 		private void  CreateSequenceFile(Directory dir, System.String name, byte start, int size)
 		{
-			OutputStream os = dir.CreateFile(name);
+			IndexOutput os = dir.CreateOutput(name);
 			for (int i = 0; i < size; i++)
 			{
 				os.WriteByte(start);
@@ -91,12 +90,12 @@
 		}
 		
 		
-		private void  AssertSameStreams(System.String msg, InputStream expected, InputStream test)
+		private void  AssertSameStreams(System.String msg, IndexInput expected, IndexInput test)
 		{
 			Assert.IsNotNull(expected, msg + " null expected");
 			Assert.IsNotNull(test, msg + " null test");
-			Assert.AreEqual(expected.Length(), test.Length(), msg + " length");
-			Assert.AreEqual(expected.GetFilePointer(), test.GetFilePointer(), msg + " position");
+			Assert.AreEqual(test.Length(), expected.Length(), msg + " length");
+			Assert.AreEqual(test.GetFilePointer(), expected.GetFilePointer(), msg + " position");
 			
 			byte[] expectedBuffer = new byte[512];
 			byte[] testBuffer = new byte[expectedBuffer.Length];
@@ -113,7 +112,7 @@
 		}
 		
 		
-		private void  AssertSameStreams(System.String msg, InputStream expected, InputStream actual, long seekTo)
+		private void  AssertSameStreams(System.String msg, IndexInput expected, IndexInput actual, long seekTo)
 		{
 			if (seekTo >= 0 && seekTo < expected.Length())
 			{
@@ -125,7 +124,7 @@
 		
 		
 		
-		private void  AssertSameSeekBehavior(System.String msg, InputStream expected, InputStream actual)
+		private void  AssertSameSeekBehavior(System.String msg, IndexInput expected, IndexInput actual)
 		{
 			// seek to 0
 			long point = 0;
@@ -174,7 +173,7 @@
 		/// Files of different sizes are tested: 0, 1, 10, 100 bytes.
 		/// </summary>
 		[Test]
-		public virtual void  TestSingleFile()
+        public virtual void  TestSingleFile()
 		{
 			int[] data = new int[]{0, 1, 10, 100};
 			for (int i = 0; i < data.Length; i++)
@@ -186,8 +185,8 @@
 				csw.Close();
 				
 				CompoundFileReader csr = new CompoundFileReader(dir, name + ".cfs");
-				InputStream expected = dir.OpenFile(name);
-				InputStream actual = csr.OpenFile(name);
+				IndexInput expected = dir.OpenInput(name);
+				IndexInput actual = csr.OpenInput(name);
 				AssertSameStreams(name, expected, actual);
 				AssertSameSeekBehavior(name, expected, actual);
 				expected.Close();
@@ -212,15 +211,15 @@
 			csw.Close();
 			
 			CompoundFileReader csr = new CompoundFileReader(dir, "d.csf");
-			InputStream expected = dir.OpenFile("d1");
-			InputStream actual = csr.OpenFile("d1");
+			IndexInput expected = dir.OpenInput("d1");
+			IndexInput actual = csr.OpenInput("d1");
 			AssertSameStreams("d1", expected, actual);
 			AssertSameSeekBehavior("d1", expected, actual);
 			expected.Close();
 			actual.Close();
 			
-			expected = dir.OpenFile("d2");
-			actual = csr.OpenFile("d2");
+			expected = dir.OpenInput("d2");
+			actual = csr.OpenInput("d2");
 			AssertSameStreams("d2", expected, actual);
 			AssertSameSeekBehavior("d2", expected, actual);
 			expected.Close();
@@ -269,8 +268,8 @@
 			CompoundFileReader csr = new CompoundFileReader(dir, "test.cfs");
 			for (int i = 0; i < data.Length; i++)
 			{
-				InputStream check = dir.OpenFile(segment + data[i]);
-				InputStream test = csr.OpenFile(segment + data[i]);
+				IndexInput check = dir.OpenInput(segment + data[i]);
+				IndexInput test = csr.OpenInput(segment + data[i]);
 				AssertSameStreams(data[i], check, test);
 				AssertSameSeekBehavior(data[i], check, test);
 				test.Close();
@@ -299,22 +298,22 @@
 		[Test]
 		public virtual void  TestReadAfterClose()
 		{
-			Demo_FSInputStreamBug((FSDirectory) dir, "test");
+			Demo_FSIndexInputBug((FSDirectory) dir, "test");
 		}
 		
-		private void  Demo_FSInputStreamBug(FSDirectory fsdir, System.String file)
+		private void  Demo_FSIndexInputBug(FSDirectory fsdir, System.String file)
 		{
 			// Setup the test file - we need more than 1024 bytes
-			OutputStream os = fsdir.CreateFile(file);
+			IndexOutput os = fsdir.CreateOutput(file);
 			for (int i = 0; i < 2000; i++)
 			{
 				os.WriteByte((byte) i);
 			}
 			os.Close();
 			
-			InputStream in_Renamed = fsdir.OpenFile(file);
+			IndexInput in_Renamed = fsdir.OpenInput(file);
 			
-			// This read primes the buffer in InputStream
+			// This read primes the buffer in IndexInput
 			byte b = in_Renamed.ReadByte();
 			
 			// Close the file
@@ -327,45 +326,37 @@
 			// ERROR: this call should fail, but succeeds for some reason as well
 			in_Renamed.Seek(1099);
 			
-            try
-            {
-                // OK: this call correctly fails. We are now past the 1024 internal
-                // buffer, so an actual IO is attempted, which fails
-                b = in_Renamed.ReadByte();
-            }
-            catch (System.IO.IOException e)
-            {
-            }
-            catch (System.Exception)
-            {
-            }
+			try
+			{
+				// OK: this call correctly fails. We are now past the 1024 internal
+				// buffer, so an actual IO is attempted, which fails
+				b = in_Renamed.ReadByte();
+				Assert.Fail("expected readByte() to throw exception");
+			}
+			catch (System.Exception e)
+			{
+				// expected exception
+			}
 		}
 		
 		
-		internal static bool IsCSInputStream(InputStream is_Renamed)
-		{
-			return is_Renamed is CompoundFileReader.CSInputStream;
-		}
-		
-		internal static bool IsCSInputStreamOpen(InputStream is_Renamed)
-		{
-            try
-            {
-                if (IsCSInputStream(is_Renamed))
-                {
-                    CompoundFileReader.CSInputStream cis = (CompoundFileReader.CSInputStream) is_Renamed;
-				
-                    return _TestHelper.IsFSInputStreamOpen(cis.base_Renamed);
-                }
-                else
-                {
-                    return false;
-                }
-            }
-            catch
-            {
-                return false;
-            }
+		internal static bool IsCSIndexInput(IndexInput is_Renamed)
+		{
+			return is_Renamed is CompoundFileReader.CSIndexInput;
+		}
+		
+		internal static bool IsCSIndexInputOpen(IndexInput is_Renamed)
+		{
+			if (IsCSIndexInput(is_Renamed))
+			{
+				CompoundFileReader.CSIndexInput cis = (CompoundFileReader.CSIndexInput) is_Renamed;
+				
+				return _TestHelper.IsFSIndexInputOpen(cis.base_Renamed);
+			}
+			else
+			{
+				return false;
+			}
 		}
 		
 		[Test]
@@ -375,47 +366,53 @@
 			CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
 			
 			// basic clone
-			InputStream expected = dir.OpenFile("f11");
-			Assert.IsTrue(_TestHelper.IsFSInputStreamOpen(expected));
-			
-			InputStream one = cr.OpenFile("f11");
-			Assert.IsTrue(IsCSInputStreamOpen(one));
-			
-			InputStream two = (InputStream) one.Clone();
-			Assert.IsTrue(IsCSInputStreamOpen(two));
-			
-			AssertSameStreams("basic clone one", expected, one);
-			expected.Seek(0);
-			AssertSameStreams("basic clone two", expected, two);
-			
-			// Now close the first stream
-			one.Close();
-			Assert.IsTrue(IsCSInputStreamOpen(one), "Only close when cr is closed");
-			
-			// The following should really fail since we couldn't expect to
-			// access a file once close has been called on it (regardless of
-			// buffering and/or clone magic)
-			expected.Seek(0);
-			two.Seek(0);
-			AssertSameStreams("basic clone two/2", expected, two);
+			IndexInput expected = dir.OpenInput("f11");
 			
-			
-			// Now close the compound reader
-			cr.Close();
-			Assert.IsFalse(IsCSInputStreamOpen(one), "Now closed one");
-			Assert.IsFalse(IsCSInputStreamOpen(two), "Now closed two");
-			
-			// The following may also fail since the compound stream is closed
-			expected.Seek(0);
-			two.Seek(0);
-			//AssertSameStreams("basic clone two/3", expected, two);
-			
-			
-			// Now close the second clone
-			two.Close();
-			expected.Seek(0);
-			two.Seek(0);
-			//AssertSameStreams("basic clone two/4", expected, two);
+			// this test only works for FSIndexInput
+			if (_TestHelper.IsFSIndexInput(expected))
+			{
+				
+				Assert.IsTrue(_TestHelper.IsFSIndexInputOpen(expected));
+				
+				IndexInput one = cr.OpenInput("f11");
+				Assert.IsTrue(IsCSIndexInputOpen(one));
+				
+				IndexInput two = (IndexInput) one.Clone();
+				Assert.IsTrue(IsCSIndexInputOpen(two));
+				
+				AssertSameStreams("basic clone one", expected, one);
+				expected.Seek(0);
+				AssertSameStreams("basic clone two", expected, two);
+				
+				// Now close the first stream
+				one.Close();
+				Assert.IsTrue(IsCSIndexInputOpen(one), "Only close when cr is closed");
+				
+				// The following should really fail since we couldn't expect to
+				// access a file once close has been called on it (regardless of
+				// buffering and/or clone magic)
+				expected.Seek(0);
+				two.Seek(0);
+				AssertSameStreams("basic clone two/2", expected, two);
+				
+				
+				// Now close the compound reader
+				cr.Close();
+				Assert.IsFalse(IsCSIndexInputOpen(one), "Now closed one");
+				Assert.IsFalse(IsCSIndexInputOpen(two), "Now closed two");
+				
+				// The following may also fail since the compound stream is closed
+				expected.Seek(0);
+				two.Seek(0);
+				//assertSameStreams("basic clone two/3", expected, two);
+				
+				
+				// Now close the second clone
+				two.Close();
+				expected.Seek(0);
+				two.Seek(0);
+				//assertSameStreams("basic clone two/4", expected, two);
+			}
 			
 			expected.Close();
 		}
@@ -431,11 +428,11 @@
 			CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
 			
 			// Open two files
-			InputStream e1 = dir.OpenFile("f11");
-			InputStream e2 = dir.OpenFile("f3");
+			IndexInput e1 = dir.OpenInput("f11");
+			IndexInput e2 = dir.OpenInput("f3");
 			
-			InputStream a1 = cr.OpenFile("f11");
-			InputStream a2 = dir.OpenFile("f3");
+			IndexInput a1 = cr.OpenInput("f11");
+			IndexInput a2 = dir.OpenInput("f3");
 			
 			// Seek the first pair
 			e1.Seek(100);
@@ -512,11 +509,11 @@
 			CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
 			
 			// Open two files
-			InputStream e1 = cr.OpenFile("f11");
-			InputStream e2 = cr.OpenFile("f3");
+			IndexInput e1 = cr.OpenInput("f11");
+			IndexInput e2 = cr.OpenInput("f3");
 			
-			InputStream a1 = (InputStream) e1.Clone();
-			InputStream a2 = (InputStream) e2.Clone();
+			IndexInput a1 = (IndexInput) e1.Clone();
+			IndexInput a2 = (IndexInput) e2.Clone();
 			
 			// Seek the first pair
 			e1.Seek(100);
@@ -592,7 +589,7 @@
 			// Open two files
 			try
 			{
-				InputStream e1 = cr.OpenFile("bogus");
+				IndexInput e1 = cr.OpenInput("bogus");
 				Assert.Fail("File not found");
 			}
 			catch (System.IO.IOException e)
@@ -609,7 +606,7 @@
 		{
 			SetUp_2();
 			CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
-			InputStream is_Renamed = cr.OpenFile("f2");
+			IndexInput is_Renamed = cr.OpenInput("f2");
 			is_Renamed.Seek(is_Renamed.Length() - 10);
 			byte[] b = new byte[100];
 			is_Renamed.ReadBytes(b, 0, 10);

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDoc.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs Sat Jun  3 19:41:13 2006
@@ -13,10 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
-////using NUnit.Framework.TestSuite;
-////using NUnit.Framework.TestRunner;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using FileDocument = Lucene.Net.Demo.FileDocument;
@@ -24,6 +23,7 @@
 using Similarity = Lucene.Net.Search.Similarity;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
+
 namespace Lucene.Net.Index
 {
 	
@@ -31,7 +31,7 @@
 	/// <summary>JUnit adaptation of an older test case DocTest.</summary>
 	/// <author>  dmitrys@earthlink.net
 	/// </author>
-	/// <version>  $Id: TestDoc.java,v 1.5 2004/04/20 19:33:35 goller Exp $
+	/// <version>  $Id: TestDoc.java 150536 2004-09-28 18:15:52Z cutting $
 	/// </version>
 	[TestFixture]
     public class TestDoc
@@ -41,6 +41,7 @@
 		[STAThread]
 		public static void  Main(System.String[] args)
 		{
+			// NUnit.Core.TestRunner.Run(new NUnit.Core.TestSuite(typeof(TestDoc)));    // {{Aroush}} where is 'TestRunner' in NUnit
 		}
 		
 		
@@ -55,29 +56,29 @@
 		[TestFixtureSetUp]
         public virtual void  SetUp()
 		{
-			workDir = new System.IO.FileInfo(System.Configuration.ConfigurationSettings.AppSettings.Get("tempDir") + "\\" + "TestDoc");
+			workDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", "tempDir"), "TestDoc"));
 			System.IO.Directory.CreateDirectory(workDir.FullName);
 			
-			indexDir = new System.IO.FileInfo(workDir.FullName + "\\" + "testIndex");
+			indexDir = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, "testIndex"));
 			System.IO.Directory.CreateDirectory(indexDir.FullName);
 			
 			Directory directory = FSDirectory.GetDirectory(indexDir, true);
 			directory.Close();
 			
 			files = new System.Collections.ArrayList();
-			files.Add(CreateFile("test.txt", "This is the first test file"));
+			files.Add(CreateOutput("test.txt", "This is the first test file"));
 			
-			files.Add(CreateFile("test2.txt", "This is the second test file"));
+			files.Add(CreateOutput("test2.txt", "This is the second test file"));
 		}
 		
-		private System.IO.FileInfo CreateFile(System.String name, System.String text)
+		private System.IO.FileInfo CreateOutput(System.String name, System.String text)
 		{
 			System.IO.StreamWriter fw = null;
 			System.IO.StreamWriter pw = null;
 			
 			try
 			{
-				System.IO.FileInfo f = new System.IO.FileInfo(workDir.FullName + "\\" + name);
+				System.IO.FileInfo f = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, name));
 				bool tmpBool;
 				if (System.IO.File.Exists(f.FullName))
 					tmpBool = true;
@@ -112,8 +113,8 @@
 				{
 					pw.Close();
 				}
-				if ((fw != null) && (fw.BaseStream.CanWrite))
-					fw.Close();
+				//if (fw != null)
+				//	fw.Close();
 			}
 		}
 		
@@ -127,9 +128,10 @@
 		/// assert various things about the segment.
 		/// </summary>
 		[Test]
-		public virtual void  TestIndexAndMerge()
+        public virtual void  TestIndexAndMerge()
 		{
-			System.IO.StringWriter out_Renamed = new System.IO.StringWriter();
+            System.IO.MemoryStream sw = new System.IO.MemoryStream();
+            System.IO.StreamWriter out_Renamed = new System.IO.StreamWriter(sw);
 			
 			Directory directory = FSDirectory.GetDirectory(indexDir, true);
 			directory.Close();
@@ -150,9 +152,12 @@
 			PrintSegment(out_Renamed, "merge3");
 			
 			out_Renamed.Close();
-			System.String multiFileOutput = out_Renamed.GetStringBuilder().ToString();
+			sw.Close();
+            System.String multiFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
+            //System.out.println(multiFileOutput);
 			
-			out_Renamed = new System.IO.StringWriter();
+            sw = new System.IO.MemoryStream();
+            out_Renamed = new System.IO.StreamWriter(sw);
 			
 			directory = FSDirectory.GetDirectory(indexDir, true);
 			directory.Close();
@@ -173,7 +178,8 @@
 			PrintSegment(out_Renamed, "merge3");
 			
 			out_Renamed.Close();
-			System.String singleFileOutput = out_Renamed.GetStringBuilder().ToString();
+			sw.Close();
+            System.String singleFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
 			
 			Assert.AreEqual(multiFileOutput, singleFileOutput);
 		}
@@ -186,7 +192,7 @@
 			DocumentWriter writer = new DocumentWriter(directory, analyzer, Similarity.GetDefault(), 1000);
 			
 			System.IO.FileInfo file = new System.IO.FileInfo(workDir.FullName + "\\" + fileName);
-			Document doc = FileDocument.Document(file);
+			Lucene.Net.Documents.Document doc = FileDocument.Document(file);
 			
 			writer.AddDocument(segment, doc);
 			
@@ -198,24 +204,33 @@
 		{
 			Directory directory = FSDirectory.GetDirectory(indexDir, false);
 			
-			SegmentReader r1 = new SegmentReader(new SegmentInfo(seg1, 1, directory));
-			SegmentReader r2 = new SegmentReader(new SegmentInfo(seg2, 1, directory));
+			SegmentReader r1 = SegmentReader.Get(new SegmentInfo(seg1, 1, directory));
+			SegmentReader r2 = SegmentReader.Get(new SegmentInfo(seg2, 1, directory));
 			
-			SegmentMerger merger = new SegmentMerger(directory, merged, useCompoundFile);
+			SegmentMerger merger = new SegmentMerger(directory, merged);
 			
 			merger.Add(r1);
 			merger.Add(r2);
 			merger.Merge();
 			merger.CloseReaders();
 			
+			if (useCompoundFile)
+			{
+				System.Collections.ArrayList filesToDelete = merger.CreateCompoundFile(merged + ".cfs");
+				for (System.Collections.IEnumerator iter = filesToDelete.GetEnumerator(); iter.MoveNext(); )
+				{
+					directory.DeleteFile((System.String) iter.Current);
+				}
+			}
+			
 			directory.Close();
 		}
 		
 		
-		private void  PrintSegment(System.IO.StringWriter out_Renamed, System.String segment)
+		private void  PrintSegment(System.IO.StreamWriter out_Renamed, System.String segment)
 		{
 			Directory directory = FSDirectory.GetDirectory(indexDir, false);
-			SegmentReader reader = new SegmentReader(new SegmentInfo(segment, 1, directory));
+			SegmentReader reader = SegmentReader.Get(new SegmentInfo(segment, 1, directory));
 			
 			for (int i = 0; i < reader.NumDocs(); i++)
 			{
@@ -235,8 +250,8 @@
 					{
 						out_Renamed.Write(" doc=" + positions.Doc());
 						out_Renamed.Write(" TF=" + positions.Freq());
-                        out_Renamed.Write(" pos=");
-                        out_Renamed.Write(positions.NextPosition());
+						out_Renamed.Write(" pos=");
+						out_Renamed.Write(positions.NextPosition());
 						for (int j = 1; j < positions.Freq(); j++)
 							out_Renamed.Write("," + positions.NextPosition());
 						out_Renamed.WriteLine("");

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDocumentWriter.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs Sat Jun  3 19:41:13 2006
@@ -13,76 +13,146 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Similarity = Lucene.Net.Search.Similarity;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestDocumentWriter
 	{
-		private RAMDirectory dir = new RAMDirectory();
-		private Document testDoc = new Document();
-		
+		private class AnonymousClassAnalyzer : Analyzer
+		{
+			public AnonymousClassAnalyzer(TestDocumentWriter enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestDocumentWriter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDocumentWriter enclosingInstance;
+			public TestDocumentWriter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+
+            public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				return new WhitespaceTokenizer(reader);
+			}
+			
+			public override int GetPositionIncrementGap(System.String fieldName)
+			{
+				return 500;
+			}
+		}
+		private RAMDirectory dir;
+
+
 		[TestFixtureSetUp]
-		protected virtual void  SetUp()
+        public virtual void  SetUp()
 		{
-			DocHelper.SetupDoc(testDoc);
+			dir = new RAMDirectory();
 		}
 		
-        [TestFixtureTearDown]
-		protected virtual void  TearDown()
+		[TestFixtureTearDown]
+        public virtual void  TearDown()
 		{
 			
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			Assert.IsTrue(dir != null);
 		}
 		
-        [Test]
-		public virtual void  TestAddDocument()
+		[Test]
+        public virtual void  TestAddDocument()
 		{
+			Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
+			DocHelper.SetupDoc(testDoc);
 			Analyzer analyzer = new WhitespaceAnalyzer();
-			Similarity similarity = Similarity.GetDefault();
+			Lucene.Net.Search.Similarity similarity = Lucene.Net.Search.Similarity.GetDefault();
 			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
-			Assert.IsTrue(writer != null);
-			try
-			{
-				writer.AddDocument("test", testDoc);
-				//After adding the document, we should be able to read it back in
-				SegmentReader reader = new SegmentReader(new SegmentInfo("test", 1, dir));
-				Assert.IsTrue(reader != null);
-				Document doc = reader.Document(0);
-				Assert.IsTrue(doc != null);
-				
-				//System.out.println("Document: " + doc);
-				Field[] fields = doc.GetFields("textField2");
-				Assert.IsTrue(fields != null && fields.Length == 1);
-				Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_2_TEXT));
-				Assert.IsTrue(fields[0].IsTermVectorStored() == true);
-				
-				fields = doc.GetFields("textField1");
-				Assert.IsTrue(fields != null && fields.Length == 1);
-				Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_1_TEXT));
-				Assert.IsTrue(fields[0].IsTermVectorStored() == false);
-				
-				fields = doc.GetFields("keyField");
-				Assert.IsTrue(fields != null && fields.Length == 1);
-				Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.KEYWORD_TEXT));
-			}
-			catch (System.IO.IOException e)
+			System.String segName = "test";
+			writer.AddDocument(segName, testDoc);
+			//After adding the document, we should be able to read it back in
+			SegmentReader reader = SegmentReader.Get(new SegmentInfo(segName, 1, dir));
+			Assert.IsTrue(reader != null);
+			Lucene.Net.Documents.Document doc = reader.Document(0);
+			Assert.IsTrue(doc != null);
+			
+			//System.out.println("Document: " + doc);
+			Field[] fields = doc.GetFields("textField2");
+			Assert.IsTrue(fields != null && fields.Length == 1);
+			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_2_TEXT));
+			Assert.IsTrue(fields[0].IsTermVectorStored());
+			
+			fields = doc.GetFields("textField1");
+			Assert.IsTrue(fields != null && fields.Length == 1);
+			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_1_TEXT));
+			Assert.IsFalse(fields[0].IsTermVectorStored());
+			
+			fields = doc.GetFields("keyField");
+			Assert.IsTrue(fields != null && fields.Length == 1);
+			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.KEYWORD_TEXT));
+			
+			fields = doc.GetFields(DocHelper.NO_NORMS_KEY);
+			Assert.IsTrue(fields != null && fields.Length == 1);
+			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.NO_NORMS_TEXT));
+			
+			fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY);
+			Assert.IsTrue(fields != null && fields.Length == 1);
+			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_3_TEXT));
+			
+			// test that the norm file is not present if omitNorms is true
+			for (int i = 0; i < reader.FieldInfos.Size(); i++)
 			{
-                System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				FieldInfo fi = reader.FieldInfos.FieldInfo(i);
+				if (fi.IsIndexed)
+				{
+					Assert.IsTrue(fi.omitNorms == !dir.FileExists(segName + ".f" + i));
+				}
 			}
+		}
+		
+		[Test]
+        public virtual void  TestPositionIncrementGap()
+		{
+			Analyzer analyzer = new AnonymousClassAnalyzer(this);
+			
+			Lucene.Net.Search.Similarity similarity = Lucene.Net.Search.Similarity.GetDefault();
+			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.TOKENIZED));
+			
+			System.String segName = "test";
+			writer.AddDocument(segName, doc);
+			SegmentReader reader = SegmentReader.Get(new SegmentInfo(segName, 1, dir));
+			
+			TermPositions termPositions = reader.TermPositions(new Term("repeated", "repeated"));
+			Assert.IsTrue(termPositions.Next());
+			int freq = termPositions.Freq();
+			Assert.AreEqual(2, freq);
+			Assert.AreEqual(0, termPositions.NextPosition());
+			Assert.AreEqual(502, termPositions.NextPosition());
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldInfos.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs Sat Jun  3 19:41:13 2006
@@ -13,46 +13,47 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
-using OutputStream = Lucene.Net.Store.OutputStream;
+using IndexOutput = Lucene.Net.Store.IndexOutput;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using RAMOutputStream = Lucene.Net.Store.RAMOutputStream;
+
 namespace Lucene.Net.Index
 {
 	
+	
 	//import org.cnlp.utils.properties.ResourceBundleHelper;
 	[TestFixture]
 	public class TestFieldInfos
 	{
 		
-		private Document testDoc = new Document();
+		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		
-        [TestFixtureSetUp]
-		protected virtual void  SetUp()
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
 			DocHelper.SetupDoc(testDoc);
 		}
 		
-        [TestFixtureTearDown]
-		protected virtual void  TearDown()
+		[TestFixtureTearDown]
+        public virtual void  TearDown()
 		{
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			//Positive test of FieldInfos
 			Assert.IsTrue(testDoc != null);
 			FieldInfos fieldInfos = new FieldInfos();
 			fieldInfos.Add(testDoc);
 			//Since the complement is stored as well in the fields map
-			Assert.IsTrue(fieldInfos.Size() == 7); //this is 7 b/c we are using the no-arg constructor
+			Assert.IsTrue(fieldInfos.Size() == ((System.Collections.Hashtable) DocHelper.all).Count); //this is all b/c we are using the no-arg constructor
 			RAMDirectory dir = new RAMDirectory();
 			System.String name = "testFile";
-			OutputStream output = dir.CreateFile(name);
+			IndexOutput output = dir.CreateOutput(name);
 			Assert.IsTrue(output != null);
 			//Use a RAMOutputStream
 			
@@ -66,10 +67,22 @@
 				FieldInfo info = readIn.FieldInfo("textField1");
 				Assert.IsTrue(info != null);
 				Assert.IsTrue(info.storeTermVector == false);
+				Assert.IsTrue(info.omitNorms == false);
 				
 				info = readIn.FieldInfo("textField2");
 				Assert.IsTrue(info != null);
 				Assert.IsTrue(info.storeTermVector == true);
+				Assert.IsTrue(info.omitNorms == false);
+				
+				info = readIn.FieldInfo("textField3");
+				Assert.IsTrue(info != null);
+				Assert.IsTrue(info.storeTermVector == false);
+				Assert.IsTrue(info.omitNorms == true);
+				
+				info = readIn.FieldInfo("omitNorms");
+				Assert.IsTrue(info != null);
+				Assert.IsTrue(info.storeTermVector == false);
+				Assert.IsTrue(info.omitNorms == true);
 				
 				dir.Close();
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldsReader.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs Sat Jun  3 19:41:13 2006
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -20,62 +21,58 @@
 using Field = Lucene.Net.Documents.Field;
 using Similarity = Lucene.Net.Search.Similarity;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestFieldsReader
 	{
 		private RAMDirectory dir = new RAMDirectory();
-		private Document testDoc = new Document();
+		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		private FieldInfos fieldInfos = null;
 		
-        [SetUp]
-		protected virtual void  SetUp()
+		
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
 			fieldInfos = new FieldInfos();
 			DocHelper.SetupDoc(testDoc);
 			fieldInfos.Add(testDoc);
 			DocumentWriter writer = new DocumentWriter(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), 50);
 			Assert.IsTrue(writer != null);
-			try
-			{
-				writer.AddDocument("test", testDoc);
-			}
-			catch (System.IO.IOException e)
-			{
-				
-			}
-		}
-		
-        [TearDown]
-		protected virtual void  TearDown()
-		{
-			
+			writer.AddDocument("test", testDoc);
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			Assert.IsTrue(dir != null);
 			Assert.IsTrue(fieldInfos != null);
-			try
-			{
-				FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
-				Assert.IsTrue(reader != null);
-				Assert.IsTrue(reader.Size() == 1);
-				Document doc = reader.Doc(0);
-				Assert.IsTrue(doc != null);
-				Assert.IsTrue(doc.GetField("textField1") != null);
-				Field field = doc.GetField("textField2");
-				Assert.IsTrue(field != null);
-				Assert.IsTrue(field.IsTermVectorStored() == true);
-				reader.Close();
-			}
-			catch (System.IO.IOException e)
-			{
-                System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
-			}
+			FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
+			Assert.IsTrue(reader != null);
+			Assert.IsTrue(reader.Size() == 1);
+			Lucene.Net.Documents.Document doc = reader.Doc(0);
+			Assert.IsTrue(doc != null);
+			Assert.IsTrue(doc.GetField("textField1") != null);
+			
+			Field field = doc.GetField("textField2");
+			Assert.IsTrue(field != null);
+			Assert.IsTrue(field.IsTermVectorStored() == true);
+			
+			Assert.IsTrue(field.IsStoreOffsetWithTermVector() == true);
+			Assert.IsTrue(field.IsStorePositionWithTermVector() == true);
+			Assert.IsTrue(field.GetOmitNorms() == false);
+			
+			field = doc.GetField("textField3");
+			Assert.IsTrue(field != null);
+			Assert.IsTrue(field.IsTermVectorStored() == false);
+			Assert.IsTrue(field.IsStoreOffsetWithTermVector() == false);
+			Assert.IsTrue(field.IsStorePositionWithTermVector() == false);
+			Assert.IsTrue(field.GetOmitNorms() == true);
+			
+			
+			reader.Close();
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFilterIndexReader.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs Sat Jun  3 19:41:13 2006
@@ -13,31 +13,28 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Hits = Lucene.Net.Search.Hits;
-using IndexSearcher = Lucene.Net.Search.IndexSearcher;
-using Searcher = Lucene.Net.Search.Searcher;
-using TermQuery = Lucene.Net.Search.TermQuery;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestFilterIndexReader
 	{
+		
 		private class TestReader : FilterIndexReader
 		{
 			
 			/// <summary>Filter that only permits terms containing 'e'.</summary>
 			private class TestTermEnum : FilterTermEnum
 			{
-				public TestTermEnum(TermEnum termEnum) : base(termEnum)
+				public TestTermEnum(TermEnum termEnum):base(termEnum)
 				{
 				}
 				
@@ -46,7 +43,7 @@
 				{
 					while (in_Renamed.Next())
 					{
-						if (in_Renamed.Term().Text().IndexOf((System.Char) 'e') != - 1)
+						if (in_Renamed.Term().Text().IndexOf('e') != - 1)
 							return true;
 					}
 					return false;
@@ -56,7 +53,7 @@
 			/// <summary>Filter that only returns odd numbered documents. </summary>
 			private class TestTermPositions : FilterTermPositions
 			{
-				public TestTermPositions(TermPositions in_Renamed):base(in_Renamed)
+                public TestTermPositions(TermPositions in_Renamed) : base(in_Renamed)
 				{
 				}
 				
@@ -94,26 +91,27 @@
 		[STAThread]
 		public static void  Main(System.String[] args)
 		{
+			// NUnit.Core.TestRunner.Run(new NUnit.Core.TestSuite(typeof(TestIndexReader)));   // {{Aroush}} where is 'Run' in NUnit?
 		}
 		
 		/// <summary> Tests the IndexReader.getFieldNames implementation</summary>
 		/// <throws>  Exception on error </throws>
 		[Test]
-		public virtual void  TestFilterIndexReader_()
+        public virtual void  TestFilterIndexReader_Renamed_Method()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
 			
-			Document d1 = new Document();
-			d1.Add(Field.Text("default", "one two"));
+			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			d1.Add(new Field("default", "one two", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(d1);
 			
-			Document d2 = new Document();
-			d2.Add(Field.Text("default", "one three"));
+			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+			d2.Add(new Field("default", "one three", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(d2);
 			
-			Document d3 = new Document();
-			d3.Add(Field.Text("default", "two four"));
+			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+			d3.Add(new Field("default", "two four", Field.Store.YES, Field.Index.TOKENIZED));
 			writer.AddDocument(d3);
 			
 			writer.Close();
@@ -123,7 +121,7 @@
 			TermEnum terms = reader.Terms();
 			while (terms.Next())
 			{
-				Assert.IsTrue(terms.Term().Text().IndexOf((System.Char) 'e') != - 1);
+				Assert.IsTrue(terms.Term().Text().IndexOf('e') != - 1);
 			}
 			terms.Close();
 			

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexInput.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs Sat Jun  3 19:41:13 2006
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using IndexInput = Lucene.Net.Store.IndexInput;
+
+namespace Lucene.Net.Index
+{
+	[TestFixture]
+	public class TestIndexInput
+	{
+		[Test]
+        public virtual void  TestRead()
+		{
+			IndexInput is_Renamed = new MockIndexInput(new byte[]{(byte) (0x80), (byte) (0x01), (byte) (0xFF), (byte) (0x7F), (byte) (0x80), (byte) (0x80), (byte) (0x01), (byte) (0x81), (byte) (0x80), (byte) (0x01), (byte) (0x06), (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e'});
+			Assert.AreEqual(128, is_Renamed.ReadVInt());
+			Assert.AreEqual(16383, is_Renamed.ReadVInt());
+			Assert.AreEqual(16384, is_Renamed.ReadVInt());
+			Assert.AreEqual(16385, is_Renamed.ReadVInt());
+			Assert.AreEqual("Lucene", is_Renamed.ReadString());
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexModifier.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs Sat Jun  3 19:41:13 2006
@@ -0,0 +1,360 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Index
+{
+	
+	/// <summary> Tests for the "IndexModifier" class, including accesses from two threads at the
+	/// same time.
+	/// 
+	/// </summary>
+	/// <author>  Daniel Naber
+	/// </author>
+	[TestFixture]
+    public class TestIndexModifier
+	{
+		
+		private int docCount = 0;
+		
+		private Term allDocTerm = new Term("all", "x");
+		
+		[Test]
+        public virtual void  TestIndex()
+		{
+			Directory ramDir = new RAMDirectory();
+			IndexModifier i = new IndexModifier(ramDir, new StandardAnalyzer(), true);
+			i.AddDocument(GetDoc());
+			Assert.AreEqual(1, i.DocCount());
+			i.Flush();
+			i.AddDocument(GetDoc(), new SimpleAnalyzer());
+			Assert.AreEqual(2, i.DocCount());
+			i.Optimize();
+			Assert.AreEqual(2, i.DocCount());
+			i.Flush();
+			i.Delete(0);
+			Assert.AreEqual(1, i.DocCount());
+			i.Flush();
+			Assert.AreEqual(1, i.DocCount());
+			i.AddDocument(GetDoc());
+			i.AddDocument(GetDoc());
+			i.Flush();
+			Assert.AreEqual(3, i.DocCount());
+			i.Delete(allDocTerm);
+			Assert.AreEqual(0, i.DocCount());
+			i.Optimize();
+			Assert.AreEqual(0, i.DocCount());
+			
+			//  Lucene defaults:
+			Assert.IsNull(i.GetInfoStream());
+			Assert.IsTrue(i.GetUseCompoundFile());
+			Assert.AreEqual(10, i.GetMaxBufferedDocs());
+			Assert.AreEqual(10000, i.GetMaxFieldLength());
+			Assert.AreEqual(10, i.GetMergeFactor());
+			// test setting properties:
+			i.SetMaxBufferedDocs(100);
+			i.SetMergeFactor(25);
+			i.SetMaxFieldLength(250000);
+			i.AddDocument(GetDoc());
+			i.SetUseCompoundFile(false);
+			i.Flush();
+			Assert.AreEqual(100, i.GetMaxBufferedDocs());
+			Assert.AreEqual(25, i.GetMergeFactor());
+			Assert.AreEqual(250000, i.GetMaxFieldLength());
+			Assert.IsFalse(i.GetUseCompoundFile());
+			
+			// test setting properties when internally the reader is opened:
+			i.Delete(allDocTerm);
+			i.SetMaxBufferedDocs(100);
+			i.SetMergeFactor(25);
+			i.SetMaxFieldLength(250000);
+			i.AddDocument(GetDoc());
+			i.SetUseCompoundFile(false);
+			i.Optimize();
+			Assert.AreEqual(100, i.GetMaxBufferedDocs());
+			Assert.AreEqual(25, i.GetMergeFactor());
+			Assert.AreEqual(250000, i.GetMaxFieldLength());
+			Assert.IsFalse(i.GetUseCompoundFile());
+			
+			i.Close();
+			try
+			{
+				i.DocCount();
+				Assert.Fail();
+			}
+			catch (System.SystemException e)
+			{
+				// expected exception
+			}
+		}
+		
+		[Test]
+        public virtual void  TestExtendedIndex()
+		{
+			Directory ramDir = new RAMDirectory();
+			PowerIndex powerIndex = new PowerIndex(this, ramDir, new StandardAnalyzer(), true);
+			powerIndex.AddDocument(GetDoc());
+			powerIndex.AddDocument(GetDoc());
+			powerIndex.AddDocument(GetDoc());
+			powerIndex.AddDocument(GetDoc());
+			powerIndex.AddDocument(GetDoc());
+			powerIndex.Flush();
+			Assert.AreEqual(5, powerIndex.DocFreq(allDocTerm));
+			powerIndex.Close();
+		}
+		
+		private Lucene.Net.Documents.Document GetDoc()
+		{
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("body", ((System.Int32) docCount).ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			docCount++;
+			return doc;
+		}
+		
+		[Test]
+        public virtual void  TestIndexWithThreads()
+		{
+			_TestIndexInternal(0);
+			_TestIndexInternal(10);
+			_TestIndexInternal(50);
+		}
+		
+        private void  _TestIndexInternal(int maxWait)
+		{
+			bool create = true;
+			//Directory rd = new RAMDirectory();
+			// work on disk to make sure potential lock problems are tested:
+			System.String tempDir = System.IO.Path.GetTempPath();
+			if (tempDir == null)
+				throw new System.IO.IOException("java.io.tmpdir undefined, cannot run test");
+			System.IO.FileInfo indexDir = new System.IO.FileInfo(tempDir + "\\" + "lucenetestindex");
+			Directory rd = FSDirectory.GetDirectory(indexDir, create);
+			IndexThread.id = 0;
+			IndexThread.idStack.Clear();
+			IndexModifier index = new IndexModifier(rd, new StandardAnalyzer(), create);
+			IndexThread thread1 = new IndexThread(index, maxWait, 1);
+			thread1.Start();
+			IndexThread thread2 = new IndexThread(index, maxWait, 2);
+			thread2.Start();
+			while (thread1.IsAlive || thread2.IsAlive)
+			{
+				try
+				{
+					System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 100));
+				}
+				catch (System.Threading.ThreadInterruptedException e)
+				{
+					throw new System.SystemException(e.Message);
+				}
+			}
+			index.Optimize();
+			int added = thread1.added + thread2.added;
+			int deleted = thread1.deleted + thread2.deleted;
+			Assert.AreEqual(added - deleted, index.DocCount());
+			index.Close();
+			
+			try
+			{
+				index.Close();
+				Assert.Fail();
+			}
+			catch (System.SystemException e)
+			{
+				// expected exception
+			}
+			RmDir(indexDir);
+		}
+		
+		private void  RmDir(System.IO.FileInfo dir)
+		{
+			System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(dir);
+			for (int i = 0; i < files.Length; i++)
+			{
+				bool tmpBool;
+				if (System.IO.File.Exists(files[i].FullName))
+				{
+					System.IO.File.Delete(files[i].FullName);
+					tmpBool = true;
+				}
+				else if (System.IO.Directory.Exists(files[i].FullName))
+				{
+					System.IO.Directory.Delete(files[i].FullName);
+					tmpBool = true;
+				}
+				else
+					tmpBool = false;
+				bool generatedAux = tmpBool;
+			}
+			bool tmpBool2;
+			if (System.IO.File.Exists(dir.FullName))
+			{
+				System.IO.File.Delete(dir.FullName);
+				tmpBool2 = true;
+			}
+			else if (System.IO.Directory.Exists(dir.FullName))
+			{
+				System.IO.Directory.Delete(dir.FullName);
+				tmpBool2 = true;
+			}
+			else
+				tmpBool2 = false;
+			bool generatedAux2 = tmpBool2;
+		}
+		
+		private class PowerIndex : IndexModifier
+		{
+			private void  InitBlock(TestIndexModifier enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestIndexModifier enclosingInstance;
+			public TestIndexModifier Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public PowerIndex(TestIndexModifier enclosingInstance, Directory dir, Analyzer analyzer, bool create):base(dir, analyzer, create)
+			{
+				InitBlock(enclosingInstance);
+			}
+			public virtual int DocFreq(Term term)
+			{
+				lock (directory)
+				{
+					AssureOpen();
+					CreateIndexReader();
+					return indexReader.DocFreq(term);
+				}
+			}
+		}
+	}
+	
+	class IndexThread : SupportClass.ThreadClass
+	{
+		
+		private const int ITERATIONS = 500; // iterations of thread test
+		
+		internal static int id = 0;
+		internal static System.Collections.ArrayList idStack = new System.Collections.ArrayList();
+		
+		internal int added = 0;
+		internal int deleted = 0;
+		
+		private int maxWait = 10;
+		private IndexModifier index;
+		private int threadNumber;
+		private System.Random random;
+		
+		internal IndexThread(IndexModifier index, int maxWait, int threadNumber)
+		{
+			this.index = index;
+			this.maxWait = maxWait;
+			this.threadNumber = threadNumber;
+			// TODO: test case is not reproducible despite pseudo-random numbers:
+			random = new System.Random((System.Int32) (101 + threadNumber)); // constant seed for better reproducability
+		}
+		
+		override public void  Run()
+		{
+			try
+			{
+				for (int i = 0; i < ITERATIONS; i++)
+				{
+					int rand = random.Next(101);
+					if (rand < 5)
+					{
+						index.Optimize();
+					}
+					else if (rand < 60)
+					{
+						Lucene.Net.Documents.Document doc = GetDocument();
+						index.AddDocument(doc);
+						idStack.Add(doc.Get("id"));
+						added++;
+					}
+					else
+					{
+						// we just delete the last document added and remove it
+						// from the id stack so that it won't be removed twice:
+						System.String delId = null;
+						try
+						{
+                            delId = idStack[idStack.Count - 1] as System.String;
+                            idStack.RemoveAt(idStack.Count - 1);
+						}
+						catch (System.ArgumentOutOfRangeException e)
+						{
+							continue;
+						}
+						Term delTerm = new Term("id", System.Int32.Parse(delId).ToString());
+						int delCount = index.Delete(delTerm);
+						if (delCount != 1)
+						{
+							throw new System.SystemException("Internal error: " + threadNumber + " deleted " + delCount + " documents, term=" + delTerm);
+						}
+						deleted++;
+					}
+					if (maxWait > 0)
+					{
+						try
+						{
+							rand = random.Next(maxWait);
+							//System.out.println("waiting " + rand + "ms");
+							System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * rand));
+						}
+						catch (System.Threading.ThreadInterruptedException e)
+						{
+							throw new System.SystemException(e.Message);
+						}
+					}
+				}
+			}
+			catch (System.IO.IOException e)
+			{
+				throw new System.SystemException(e.Message);
+			}
+		}
+		
+		private Lucene.Net.Documents.Document GetDocument()
+		{
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			lock (GetType())
+			{
+				doc.Add(new Field("id", ((System.Int32) id).ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+				id++;
+			}
+			// add random stuff:
+			doc.Add(new Field("content", ((System.Int32) random.Next(1000)).ToString(), Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("content", ((System.Int32) random.Next(1000)).ToString(), Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.TOKENIZED));
+			return doc;
+		}
+	}
+}
\ No newline at end of file



Mime
View raw message