lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject svn commit: r1299911 [5/14] - in /incubator/lucene.net/trunk: src/core/ src/core/Analysis/ src/core/Analysis/Standard/ src/core/Analysis/Tokenattributes/ src/core/Document/ src/core/Index/ src/core/Messages/ src/core/QueryParser/ src/core/Search/ src/c...
Date Mon, 12 Mar 2012 22:29:37 GMT
Modified: incubator/lucene.net/trunk/src/core/Index/TermVectorOffsetInfo.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/TermVectorOffsetInfo.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/TermVectorOffsetInfo.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/TermVectorOffsetInfo.cs Mon Mar 12 22:29:26 2012
@@ -44,36 +44,26 @@ namespace Lucene.Net.Index
 			this.endOffset = endOffset;
 			this.startOffset = startOffset;
 		}
-		
-		/// <summary> The accessor for the ending offset for the term</summary>
-		/// <returns> The offset
-		/// </returns>
-		public virtual int GetEndOffset()
-		{
-			return endOffset;
-		}
-		
-		public virtual void  SetEndOffset(int endOffset)
-		{
-			this.endOffset = endOffset;
-		}
-		
-		/// <summary> The accessor for the starting offset of the term.
-		/// 
-		/// </summary>
-		/// <returns> The offset
-		/// </returns>
-		public virtual int GetStartOffset()
-		{
-			return startOffset;
-		}
-		
-		public virtual void  SetStartOffset(int startOffset)
-		{
-			this.startOffset = startOffset;
-		}
-		
-		/// <summary> Two TermVectorOffsetInfos are equals if both the start and end offsets are the same</summary>
+
+	    /// <summary> The accessor for the ending offset for the term</summary>
+	    /// <value> The offset </value>
+	    public virtual int EndOffset
+	    {
+	        get { return endOffset; }
+	        set { this.endOffset = value; }
+	    }
+
+	    /// <summary> The accessor for the starting offset of the term.
+	    /// 
+	    /// </summary>
+	    /// <value> The offset </value>
+	    public virtual int StartOffset
+	    {
+	        get { return startOffset; }
+	        set { this.startOffset = value; }
+	    }
+
+	    /// <summary> Two TermVectorOffsetInfos are equals if both the start and end offsets are the same</summary>
 		/// <param name="o">The comparison Object
 		/// </param>
 		/// <returns> true if both <see cref="GetStartOffset()" /> and <see cref="GetEndOffset()" /> are the same for both objects.

Modified: incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriter.cs Mon Mar 12 22:29:26 2012
@@ -74,7 +74,7 @@ namespace Lucene.Net.Index
 					if (state.numDocsInStore > 0)
 					// In case there are some final documents that we
 					// didn't see (because they hit a non-aborting exception):
-						Fill(state.numDocsInStore - docWriter.GetDocStoreOffset());
+						Fill(state.numDocsInStore - docWriter.DocStoreOffset);
 					
 					tvx.Flush();
 					tvd.Flush();
@@ -104,7 +104,7 @@ namespace Lucene.Net.Index
 				{
 					// At least one doc in this run had term vectors
 					// enabled
-					Fill(state.numDocsInStore - docWriter.GetDocStoreOffset());
+					Fill(state.numDocsInStore - docWriter.DocStoreOffset);
 					tvx.Close();
 					tvf.Close();
 					tvd.Close();
@@ -156,14 +156,14 @@ namespace Lucene.Net.Index
 		/// </summary>
 		internal void  Fill(int docID)
 		{
-			int docStoreOffset = docWriter.GetDocStoreOffset();
+			int docStoreOffset = docWriter.DocStoreOffset;
 			int end = docID + docStoreOffset;
 			if (lastDocID < end)
 			{
-				long tvfPosition = tvf.GetFilePointer();
+				long tvfPosition = tvf.FilePointer;
 				while (lastDocID < end)
 				{
-					tvx.WriteLong(tvd.GetFilePointer());
+					tvx.WriteLong(tvd.FilePointer);
 					tvd.WriteVInt(0);
 					tvx.WriteLong(tvfPosition);
 					lastDocID++;
@@ -178,7 +178,7 @@ namespace Lucene.Net.Index
 				if (tvx == null)
 				{
 					
-					System.String docStoreSegment = docWriter.GetDocStoreSegment();
+					System.String docStoreSegment = docWriter.DocStoreSegment;
 					
 					if (docStoreSegment == null)
 						return ;
@@ -218,8 +218,8 @@ namespace Lucene.Net.Index
 				Fill(perDoc.docID);
 				
 				// Append term vectors to the real outputs:
-				tvx.WriteLong(tvd.GetFilePointer());
-				tvx.WriteLong(tvf.GetFilePointer());
+				tvx.WriteLong(tvd.FilePointer);
+				tvx.WriteLong(tvf.FilePointer);
 				tvd.WriteVInt(perDoc.numVectorFields);
 				if (perDoc.numVectorFields > 0)
 				{
@@ -237,7 +237,7 @@ namespace Lucene.Net.Index
 					perDoc.numVectorFields = 0;
 				}
 				
-				System.Diagnostics.Debug.Assert(lastDocID == perDoc.docID + docWriter.GetDocStoreOffset());
+				System.Diagnostics.Debug.Assert(lastDocID == perDoc.docID + docWriter.DocStoreOffset);
 				
 				lastDocID++;
                 perDoc.Reset();
@@ -350,13 +350,13 @@ namespace Lucene.Net.Index
 					fieldPointers = ArrayUtil.Grow(fieldPointers);
 				}
 				fieldNumbers[numVectorFields] = fieldNumber;
-                fieldPointers[numVectorFields] = perDocTvf.GetFilePointer();
+                fieldPointers[numVectorFields] = perDocTvf.FilePointer;
 				numVectorFields++;
 			}
 			
 			public override long SizeInBytes()
 			{
-                return buffer.GetSizeInBytes();
+                return buffer.SizeInBytes;
 			}
 			
 			public override void  Finish()

Modified: incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriterPerField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriterPerField.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriterPerField.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriterPerField.cs Mon Mar 12 22:29:26 2012
@@ -16,9 +16,8 @@
  */
 
 using System;
-
+using Lucene.Net.Documents;
 using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
-using Fieldable = Lucene.Net.Documents.Fieldable;
 using IndexOutput = Lucene.Net.Store.IndexOutput;
 using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
 
@@ -57,7 +56,7 @@ namespace Lucene.Net.Index
 			return 2;
 		}
 		
-		internal override bool Start(Fieldable[] fields, int count)
+		internal override bool Start(IFieldable[] fields, int count)
 		{
 			doVectors = false;
 			doVectorPositions = false;
@@ -65,12 +64,12 @@ namespace Lucene.Net.Index
 			
 			for (int i = 0; i < count; i++)
 			{
-				Fieldable field = fields[i];
-				if (field.IsIndexed() && field.IsTermVectorStored())
+				IFieldable field = fields[i];
+				if (field.IsIndexed && field.IsTermVectorStored)
 				{
 					doVectors = true;
-					doVectorPositions |= field.IsStorePositionWithTermVector();
-					doVectorOffsets |= field.IsStoreOffsetWithTermVector();
+					doVectorPositions |= field.IsStorePositionWithTermVector;
+					doVectorOffsets |= field.IsStoreOffsetWithTermVector;
 				}
 			}
 			
@@ -81,8 +80,8 @@ namespace Lucene.Net.Index
 					perThread.doc = termsWriter.GetPerDoc();
 					perThread.doc.docID = docState.docID;
 					System.Diagnostics.Debug.Assert(perThread.doc.numVectorFields == 0);
-					System.Diagnostics.Debug.Assert(0 == perThread.doc.perDocTvf.Length());
-					System.Diagnostics.Debug.Assert(0 == perThread.doc.perDocTvf.GetFilePointer());
+					System.Diagnostics.Debug.Assert(0 == perThread.doc.perDocTvf.Length);
+					System.Diagnostics.Debug.Assert(0 == perThread.doc.perDocTvf.FilePointer);
 				}
 
                 System.Diagnostics.Debug.Assert(perThread.doc.docID == docState.docID);
@@ -221,7 +220,7 @@ namespace Lucene.Net.Index
 			maxNumPostings = 0;
 		}
 		
-		internal override void  Start(Fieldable f)
+		internal override void  Start(IFieldable f)
 		{
 			if (doVectorOffsets)
 			{

Modified: incubator/lucene.net/trunk/src/core/Index/TermVectorsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/TermVectorsWriter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/TermVectorsWriter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/TermVectorsWriter.cs Mon Mar 12 22:29:26 2012
@@ -54,8 +54,8 @@ namespace Lucene.Net.Index
 		public void  AddAllDocVectors(TermFreqVector[] vectors)
 		{
 			
-			tvx.WriteLong(tvd.GetFilePointer());
-			tvx.WriteLong(tvf.GetFilePointer());
+			tvx.WriteLong(tvd.FilePointer);
+			tvx.WriteLong(tvf.FilePointer);
 			
 			if (vectors != null)
 			{
@@ -66,7 +66,7 @@ namespace Lucene.Net.Index
 				
 				for (int i = 0; i < numFields; i++)
 				{
-					fieldPointers[i] = tvf.GetFilePointer();
+					fieldPointers[i] = tvf.FilePointer;
 					
 					int fieldNumber = fieldInfos.FieldNumber(vectors[i].GetField());
 					
@@ -150,8 +150,8 @@ namespace Lucene.Net.Index
 							int lastEndOffset = 0;
 							for (int k = 0; k < offsets.Length; k++)
 							{
-								int startOffset = offsets[k].GetStartOffset();
-								int endOffset = offsets[k].GetEndOffset();
+								int startOffset = offsets[k].StartOffset;
+								int endOffset = offsets[k].EndOffset;
 								tvf.WriteVInt(startOffset - lastEndOffset);
 								tvf.WriteVInt(endOffset - startOffset);
 								lastEndOffset = endOffset;
@@ -182,8 +182,8 @@ namespace Lucene.Net.Index
 		/// </summary>
 		internal void  AddRawDocuments(TermVectorsReader reader, int[] tvdLengths, int[] tvfLengths, int numDocs)
 		{
-			long tvdPosition = tvd.GetFilePointer();
-			long tvfPosition = tvf.GetFilePointer();
+			long tvdPosition = tvd.FilePointer;
+			long tvfPosition = tvf.FilePointer;
 			long tvdStart = tvdPosition;
 			long tvfStart = tvfPosition;
 			for (int i = 0; i < numDocs; i++)
@@ -195,8 +195,8 @@ namespace Lucene.Net.Index
 			}
 			tvd.CopyBytes(reader.GetTvdStream(), tvdPosition - tvdStart);
 			tvf.CopyBytes(reader.GetTvfStream(), tvfPosition - tvfStart);
-			System.Diagnostics.Debug.Assert(tvd.GetFilePointer() == tvdPosition);
-			System.Diagnostics.Debug.Assert(tvf.GetFilePointer() == tvfPosition);
+			System.Diagnostics.Debug.Assert(tvd.FilePointer == tvdPosition);
+			System.Diagnostics.Debug.Assert(tvf.FilePointer == tvfPosition);
 		}
 		
 		/// <summary>Close all streams. </summary>

Modified: incubator/lucene.net/trunk/src/core/Index/TermsHashConsumerPerField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/TermsHashConsumerPerField.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/TermsHashConsumerPerField.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/TermsHashConsumerPerField.cs Mon Mar 12 22:29:26 2012
@@ -16,8 +16,7 @@
  */
 
 using System;
-
-using Fieldable = Lucene.Net.Documents.Fieldable;
+using Lucene.Net.Documents;
 
 namespace Lucene.Net.Index
 {
@@ -28,10 +27,10 @@ namespace Lucene.Net.Index
     /// </summary>
 	abstract class TermsHashConsumerPerField
 	{
-		internal abstract bool Start(Fieldable[] fields, int count);
+		internal abstract bool Start(IFieldable[] fields, int count);
 		internal abstract void  Finish();
 		internal abstract void  SkippingLongTerm();
-		internal abstract void  Start(Fieldable field);
+		internal abstract void  Start(IFieldable field);
 		internal abstract void  NewTerm(RawPostingList p);
 		internal abstract void  AddTerm(RawPostingList p);
 		internal abstract int GetStreamCount();

Modified: incubator/lucene.net/trunk/src/core/Index/TermsHashPerField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/TermsHashPerField.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/TermsHashPerField.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/TermsHashPerField.cs Mon Mar 12 22:29:26 2012
@@ -16,9 +16,9 @@
  */
 
 using System;
+using Lucene.Net.Documents;
 using Lucene.Net.Support;
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
-using Fieldable = Lucene.Net.Documents.Fieldable;
 using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
 
 namespace Lucene.Net.Index
@@ -285,7 +285,7 @@ namespace Lucene.Net.Index
 		private bool doCall;
 		private bool doNextCall;
 		
-		internal override void  Start(Fieldable f)
+		internal override void  Start(IFieldable f)
 		{
 			termAtt = fieldState.attributeSource.AddAttribute<TermAttribute>();
 			consumer.Start(f);
@@ -295,7 +295,7 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		internal override bool Start(Fieldable[] fields, int count)
+		internal override bool Start(IFieldable[] fields, int count)
 		{
 			doCall = consumer.Start(fields, count);
 			if (nextPerField != null)

Modified: incubator/lucene.net/trunk/src/core/Lucene.Net.csproj
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Lucene.Net.csproj?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Lucene.Net.csproj (original)
+++ incubator/lucene.net/trunk/src/core/Lucene.Net.csproj Mon Mar 12 22:29:26 2012
@@ -479,7 +479,7 @@
     <Compile Include="Messages\Message.cs" />
     <Compile Include="Messages\MessageImpl.cs" />
     <Compile Include="Messages\NLS.cs" />
-    <Compile Include="Messages\NLSException.cs" />
+    <Compile Include="Messages\INLSException.cs" />
     <Compile Include="QueryParser\CharStream.cs">
       <SubType>Code</SubType>
     </Compile>

Added: incubator/lucene.net/trunk/src/core/Messages/INLSException.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Messages/INLSException.cs?rev=1299911&view=auto
==============================================================================
--- incubator/lucene.net/trunk/src/core/Messages/INLSException.cs (added)
+++ incubator/lucene.net/trunk/src/core/Messages/INLSException.cs Mon Mar 12 22:29:26 2012
@@ -0,0 +1,36 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+namespace Lucene.Net.Messages
+{
+	
+	/// <summary> Interface that exceptions should implement to support lazy loading of messages.
+	/// 
+	/// For Native Language Support (NLS), system of software internationalization.
+	/// 
+	/// This Interface should be implemented by all exceptions that require
+	/// translation
+	/// 
+	/// </summary>
+	public interface INLSException
+	{
+	    /// <value> a instance of a class that implements the Message interface </value>
+	    Message MessageObject { get; }
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Messages/Message.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Messages/Message.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Messages/Message.cs (original)
+++ incubator/lucene.net/trunk/src/core/Messages/Message.cs Mon Mar 12 22:29:26 2012
@@ -25,12 +25,11 @@ namespace Lucene.Net.Messages
 	/// </summary>
 	public interface Message
 	{
-		
-		System.String GetKey();
-		
-		System.Object[] GetArguments();
-		
-		System.String GetLocalizedMessage();
+	    string Key { get; }
+
+	    object[] Arguments { get; }
+
+	    System.String GetLocalizedMessage();
 		
 		System.String GetLocalizedMessage(System.Globalization.CultureInfo locale);
 	}

Modified: incubator/lucene.net/trunk/src/core/Messages/MessageImpl.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Messages/MessageImpl.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Messages/MessageImpl.cs (original)
+++ incubator/lucene.net/trunk/src/core/Messages/MessageImpl.cs Mon Mar 12 22:29:26 2012
@@ -43,30 +43,30 @@ namespace Lucene.Net.Messages
 		{
 			this.arguments = args;
 		}
-		
-		public virtual System.Object[] GetArguments()
-		{
-			return this.arguments;
-		}
-		
-		public virtual System.String GetKey()
-		{
-			return this.key;
-		}
-		
-		public virtual System.String GetLocalizedMessage()
+
+	    public virtual object[] Arguments
+	    {
+	        get { return this.arguments; }
+	    }
+
+	    public virtual string Key
+	    {
+	        get { return this.key; }
+	    }
+
+	    public virtual System.String GetLocalizedMessage()
 		{
 			return GetLocalizedMessage(System.Threading.Thread.CurrentThread.CurrentCulture);
 		}
 		
 		public virtual System.String GetLocalizedMessage(System.Globalization.CultureInfo locale)
 		{
-			return NLS.GetLocalizedMessage(GetKey(), locale, GetArguments());
+			return NLS.GetLocalizedMessage(Key, locale, Arguments);
 		}
 		
 		public override System.String ToString()
 		{
-			System.Object[] args = GetArguments();
+			System.Object[] args = Arguments;
 			StringBuilder argsString = new StringBuilder();
 			if (args != null)
 			{

Modified: incubator/lucene.net/trunk/src/core/QueryParser/CharStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/QueryParser/CharStream.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/QueryParser/CharStream.cs (original)
+++ incubator/lucene.net/trunk/src/core/QueryParser/CharStream.cs Mon Mar 12 22:29:26 2012
@@ -37,51 +37,51 @@ namespace Lucene.Net.QueryParsers
 	/// operation.
 	/// </summary>
 	
-	public interface CharStream
+	public interface ICharStream
 	{
 		/// <summary> Returns the next character from the selected input.  The method
 		/// of selecting the input is the responsibility of the class
 		/// implementing this interface.  Can throw any java.io.IOException.
 		/// </summary>
 		char ReadChar();
-		
-		/// <summary> Returns the column position of the character last read.</summary>
-		/// <deprecated>
-		/// </deprecated>
-		/// <seealso cref="GetEndColumn">
-		/// </seealso>
-        [Obsolete]
-		int GetColumn();
-		
-		/// <summary> Returns the line number of the character last read.</summary>
+
+	    /// <summary> Returns the column position of the character last read.</summary>
+	    /// <deprecated>
+	    /// </deprecated>
+	    /// <seealso cref="GetEndColumn">
+	    /// </seealso>
+	    [Obsolete]
+	    int Column { get; }
+
+	    /// <summary> Returns the line number of the character last read.</summary>
 		/// <deprecated>
 		/// </deprecated>
 		/// <seealso cref="GetEndLine">
 		/// </seealso>
         [Obsolete]
 		int GetLine();
-		
-		/// <summary> Returns the column number of the last character for current token (being
-		/// matched after the last call to BeginTOken).
-		/// </summary>
-		int GetEndColumn();
-		
-		/// <summary> Returns the line number of the last character for current token (being
-		/// matched after the last call to BeginTOken).
-		/// </summary>
-		int GetEndLine();
-		
-		/// <summary> Returns the column number of the first character for current token (being
-		/// matched after the last call to BeginTOken).
-		/// </summary>
-		int GetBeginColumn();
-		
-		/// <summary> Returns the line number of the first character for current token (being
-		/// matched after the last call to BeginTOken).
-		/// </summary>
-		int GetBeginLine();
-		
-		/// <summary> Backs up the input stream by amount steps. Lexer calls this method if it
+
+	    /// <summary> Returns the column number of the last character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int EndColumn { get; }
+
+	    /// <summary> Returns the line number of the last character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int EndLine { get; }
+
+	    /// <summary> Returns the column number of the first character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int BeginColumn { get; }
+
+	    /// <summary> Returns the line number of the first character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int BeginLine { get; }
+
+	    /// <summary> Backs up the input stream by amount steps. Lexer calls this method if it
 		/// had already read some characters, but could not use them to match a
 		/// (longer) token. So, they will be used again as the prefix of the next
 		/// token and it is the implemetation's responsibility to do this right.
@@ -93,15 +93,15 @@ namespace Lucene.Net.QueryParsers
 		/// to this method to implement backup correctly.
 		/// </summary>
 		char BeginToken();
-		
-		/// <summary> Returns a string made up of characters from the marked token beginning
-		/// to the current buffer position. Implementations have the choice of returning
-		/// anything that they want to. For example, for efficiency, one might decide
-		/// to just return null, which is a valid implementation.
-		/// </summary>
-		System.String GetImage();
-		
-		/// <summary> Returns an array of characters that make up the suffix of length 'len' for
+
+	    /// <summary> Returns a string made up of characters from the marked token beginning
+	    /// to the current buffer position. Implementations have the choice of returning
+	    /// anything that they want to. For example, for efficiency, one might decide
+	    /// to just return null, which is a valid implementation.
+	    /// </summary>
+	    string Image { get; }
+
+	    /// <summary> Returns an array of characters that make up the suffix of length 'len' for
 		/// the currently matched token. This is used to build up the matched string
 		/// for use in actions in the case of MORE. A simple and inefficient
 		/// implementation of this is as follows :

Modified: incubator/lucene.net/trunk/src/core/QueryParser/FastCharStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/QueryParser/FastCharStream.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/QueryParser/FastCharStream.cs (original)
+++ incubator/lucene.net/trunk/src/core/QueryParser/FastCharStream.cs Mon Mar 12 22:29:26 2012
@@ -28,7 +28,7 @@ namespace Lucene.Net.QueryParsers
 	/// API. 
 	/// 
 	/// </summary>
-	public sealed class FastCharStream : CharStream
+	public sealed class FastCharStream : ICharStream
 	{
 		internal char[] buffer = null;
 		
@@ -101,13 +101,13 @@ namespace Lucene.Net.QueryParsers
 		{
 			bufferPosition -= amount;
 		}
-		
-		public System.String GetImage()
-		{
-			return new System.String(buffer, tokenStart, bufferPosition - tokenStart);
-		}
-		
-		public char[] GetSuffix(int len)
+
+	    public string Image
+	    {
+	        get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
+	    }
+
+	    public char[] GetSuffix(int len)
 		{
 			char[] value_Renamed = new char[len];
 			Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
@@ -125,30 +125,35 @@ namespace Lucene.Net.QueryParsers
 				System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
 			}
 		}
-		
-		public int GetColumn()
-		{
-			return bufferStart + bufferPosition;
-		}
-		public int GetLine()
-		{
-			return 1;
-		}
-		public int GetEndColumn()
-		{
-			return bufferStart + bufferPosition;
-		}
-		public int GetEndLine()
-		{
-			return 1;
-		}
-		public int GetBeginColumn()
-		{
-			return bufferStart + tokenStart;
-		}
-		public int GetBeginLine()
+
+	    public int Column
+	    {
+	        get { return bufferStart + bufferPosition; }
+	    }
+
+	    public int GetLine()
 		{
 			return 1;
 		}
+
+	    public int EndColumn
+	    {
+	        get { return bufferStart + bufferPosition; }
+	    }
+
+	    public int EndLine
+	    {
+	        get { return 1; }
+	    }
+
+	    public int BeginColumn
+	    {
+	        get { return bufferStart + tokenStart; }
+	    }
+
+	    public int BeginLine
+	    {
+	        get { return 1; }
+	    }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/QueryParser/MultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/QueryParser/MultiFieldQueryParser.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/QueryParser/MultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/trunk/src/core/QueryParser/MultiFieldQueryParser.cs Mon Mar 12 22:29:26 2012
@@ -123,7 +123,7 @@ namespace Lucene.Net.QueryParsers
 						{
 							//Get the boost from the map and apply them
 							Single boost = boosts[fields[i]];
-							q.SetBoost(boost);
+							q.Boost = boost;
 						}
 						ApplySlop(q, slop);
 						clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
@@ -143,11 +143,11 @@ namespace Lucene.Net.QueryParsers
 		{
 			if (q is PhraseQuery)
 			{
-				((PhraseQuery) q).SetSlop(slop);
+				((PhraseQuery) q).Slop = slop;
 			}
 			else if (q is MultiPhraseQuery)
 			{
-				((MultiPhraseQuery) q).SetSlop(slop);
+				((MultiPhraseQuery) q).Slop = slop;
 			}
 		}
 		

Modified: incubator/lucene.net/trunk/src/core/QueryParser/QueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/QueryParser/QueryParser.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/QueryParser/QueryParser.cs (original)
+++ incubator/lucene.net/trunk/src/core/QueryParser/QueryParser.cs Mon Mar 12 22:29:26 2012
@@ -19,6 +19,7 @@
 
 using System;
 using System.Collections.Generic;
+using System.Globalization;
 using System.Linq;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
@@ -119,7 +120,7 @@ namespace Lucene.Net.QueryParsers
 	/// <b>NOTE</b>: You must specify the required <see cref="Version" /> compatibility when
 	/// creating QueryParser:
 	/// <list type="bullet">
-	/// <item>As of 2.9, <see cref="SetEnablePositionIncrements" /> is true by default.</item>
+	/// <item>As of 2.9, <see cref="EnablePositionIncrements" /> is true by default.</item>
 	/// </list>
 	/// </summary>
 	public class QueryParser : QueryParserConstants
@@ -230,88 +231,77 @@ namespace Lucene.Net.QueryParsers
 				throw e;
 			}
 		}
-		
-		/// <returns> Returns the analyzer.
-		/// </returns>
-		public virtual Analyzer GetAnalyzer()
-		{
-			return analyzer;
-		}
-		
-		/// <returns> Returns the field.
-		/// </returns>
-		public virtual System.String GetField()
-		{
-			return field;
-		}
-		
-		/// <summary> Get the minimal similarity for fuzzy queries.</summary>
-		public virtual float GetFuzzyMinSim()
-		{
-			return fuzzyMinSim;
-		}
-		
-		/// <summary> Set the minimum similarity for fuzzy queries.
-		/// Default is 0.5f.
-		/// </summary>
-		public virtual void  SetFuzzyMinSim(float fuzzyMinSim)
-		{
-			this.fuzzyMinSim = fuzzyMinSim;
-		}
-		
-		/// <summary> Get the prefix length for fuzzy queries. </summary>
-		/// <returns> Returns the fuzzyPrefixLength.
-		/// </returns>
-		public virtual int GetFuzzyPrefixLength()
-		{
-			return fuzzyPrefixLength;
-		}
-		
-		/// <summary> Set the prefix length for fuzzy queries. Default is 0.</summary>
-		/// <param name="fuzzyPrefixLength">The fuzzyPrefixLength to set.
-		/// </param>
-		public virtual void  SetFuzzyPrefixLength(int fuzzyPrefixLength)
-		{
-			this.fuzzyPrefixLength = fuzzyPrefixLength;
-		}
-		
-		/// <summary> Sets the default slop for phrases.  If zero, then exact phrase matches
-		/// are required.  Default value is zero.
-		/// </summary>
-		public virtual void  SetPhraseSlop(int phraseSlop)
-		{
-			this.phraseSlop = phraseSlop;
-		}
-		
-		/// <summary> Gets the default slop for phrases.</summary>
-		public virtual int GetPhraseSlop()
-		{
-			return phraseSlop;
-		}
-		
-		
-		/// <summary> Set to <c>true</c> to allow leading wildcard characters.
-		/// <p/>
-		/// When set, <c>*</c> or <c>?</c> are allowed as 
-		/// the first character of a PrefixQuery and WildcardQuery.
-		/// Note that this can produce very slow
-		/// queries on big indexes. 
-		/// <p/>
-		/// Default: false.
-		/// </summary>
-		public virtual void  SetAllowLeadingWildcard(bool allowLeadingWildcard)
-		{
-			this.allowLeadingWildcard = allowLeadingWildcard;
-		}
 
-        /// <seealso cref="SetAllowLeadingWildcard(bool)">
-		/// </seealso>
-		public virtual bool GetAllowLeadingWildcard()
-		{
-			return allowLeadingWildcard;
-		}
-		
-		/// <summary> Set to <c>true</c> to enable position increments in result query.
+	    /// <value> Returns the analyzer. </value>
+	    public virtual Analyzer Analyzer
+	    {
+	        get { return analyzer; }
+	    }
+
+	    /// <value> Returns the field. </value>
+	    public virtual string Field
+	    {
+	        get { return field; }
+	    }
+
+	    /// <summary> 
+        /// Gets or sets the minimal similarity for fuzzy queries.
+	    /// Default is 0.5f.
+	    /// </summary>
+	    public virtual float FuzzyMinSim
+	    {
+	        get { return fuzzyMinSim; }
+	        set { this.fuzzyMinSim = value; }
+	    }
+
+        /// <summary> Gets or sets the prefix length for fuzzy queries. </summary>
+	    /// <value> Returns the fuzzyPrefixLength. </value>
+	    public virtual int FuzzyPrefixLength
+	    {
+	        get { return fuzzyPrefixLength; }
+	        set { this.fuzzyPrefixLength = value; }
+	    }
+
+	    /// <summary> Gets or sets the default slop for phrases.  If zero, then exact phrase matches
+	    /// are required.  Default value is zero.
+	    /// </summary>
+	    public virtual int PhraseSlop
+	    {
+	        set { this.phraseSlop = value; }
+	        get { return phraseSlop; }
+	    }
+
+	    /// <summary> Set to <c>true</c> to allow leading wildcard characters.
+	    /// <p/>
+	    /// When set, <c>*</c> or <c>?</c> are allowed as 
+	    /// the first character of a PrefixQuery and WildcardQuery.
+	    /// Note that this can produce very slow
+	    /// queries on big indexes. 
+	    /// <p/>
+	    /// Default: false.
+	    /// </summary>
+	    public virtual bool AllowLeadingWildcard
+	    {
+	        set { this.allowLeadingWildcard = value; }
+	        get { return allowLeadingWildcard; }
+	    }
+
+	    public class SetEnablePositionIncrementsParams
+	    {
+	        private bool _enable;
+
+	        public SetEnablePositionIncrementsParams(bool enable)
+	        {
+	            _enable = enable;
+	        }
+
+	        public bool Enable
+	        {
+	            get { return _enable; }
+	        }
+	    }
+
+	    /// <summary> Set to <c>true</c> to enable position increments in result query.
 		/// <p/>
 		/// When set, result phrase and multi-phrase queries will
 		/// be aware of position increments.
@@ -320,91 +310,63 @@ namespace Lucene.Net.QueryParsers
 		/// <p/>
 		/// Default: false.
 		/// </summary>
-		public virtual void  SetEnablePositionIncrements(bool enable)
+		public virtual void SetEnablePositionIncrements(SetEnablePositionIncrementsParams setEnablePositionIncrementsParams)
 		{
-			this.enablePositionIncrements = enable;
+			this.enablePositionIncrements = setEnablePositionIncrementsParams.Enable;
 		}
 
-        /// <seealso cref="SetEnablePositionIncrements(bool)">
-		/// </seealso>
-		public virtual bool GetEnablePositionIncrements()
-		{
-			return enablePositionIncrements;
-		}
-		
-		/// <summary> Sets the boolean operator of the QueryParser.
-		/// In default mode (<c>OR_OPERATOR</c>) terms without any modifiers
-		/// are considered optional: for example <c>capital of Hungary</c> is equal to
-		/// <c>capital OR of OR Hungary</c>.<br/>
-		/// In <c>AND_OPERATOR</c> mode terms are considered to be in conjunction: the
-		/// above mentioned query is parsed as <c>capital AND of AND Hungary</c>
-		/// </summary>
-		public virtual void  SetDefaultOperator(Operator op)
-		{
-			this.operator_Renamed = op;
-		}
-		
-		
-		/// <summary> Gets implicit operator setting, which will be either AND_OPERATOR
-		/// or OR_OPERATOR.
-		/// </summary>
-		public virtual Operator GetDefaultOperator()
-		{
-			return operator_Renamed;
-		}
-		
-		
-		/// <summary> Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
-		/// lower-cased or not.  Default is <c>true</c>.
-		/// </summary>
-		public virtual void  SetLowercaseExpandedTerms(bool lowercaseExpandedTerms)
-		{
-			this.lowercaseExpandedTerms = lowercaseExpandedTerms;
-		}
-		
-		
-		/// <seealso cref="SetLowercaseExpandedTerms(bool)">
-		/// </seealso>
-		public virtual bool GetLowercaseExpandedTerms()
-		{
-			return lowercaseExpandedTerms;
-		}
+	    public virtual bool EnablePositionIncrements
+	    {
+	        get { return enablePositionIncrements; }
+	    }
 
-		/// <summary> By default QueryParser uses <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT" />
-		/// when creating a PrefixQuery, WildcardQuery or RangeQuery. This implementation is generally preferable because it 
-		/// a) Runs faster b) Does not have the scarcity of terms unduly influence score 
-		/// c) avoids any "TooManyBooleanClauses" exception.
-		/// However, if your application really needs to use the
-		/// old-fashioned BooleanQuery expansion rewriting and the above
-		/// points are not relevant then use this to change
-		/// the rewrite method.
-		/// </summary>
-		public virtual void  SetMultiTermRewriteMethod(MultiTermQuery.RewriteMethod method)
-		{
-			multiTermRewriteMethod = method;
-		}
-		
-		
-		/// <seealso cref="SetMultiTermRewriteMethod">
-		/// </seealso>
-		public virtual MultiTermQuery.RewriteMethod GetMultiTermRewriteMethod()
-		{
-			return multiTermRewriteMethod;
-		}
-		
-		/// <summary> Set locale used by date range parsing.</summary>
-		public virtual void  SetLocale(System.Globalization.CultureInfo locale)
-		{
-			this.locale = locale;
-		}
-		
-		/// <summary> Returns current locale, allowing access by subclasses.</summary>
-		public virtual System.Globalization.CultureInfo GetLocale()
-		{
-			return locale;
-		}
-		
-		/// <summary> Sets the default date resolution used by RangeQueries for fields for which no
+	    /// <summary> Gets or sets the boolean operator of the QueryParser.
+	    /// In default mode (<c>OR_OPERATOR</c>) terms without any modifiers
+	    /// are considered optional: for example <c>capital of Hungary</c> is equal to
+	    /// <c>capital OR of OR Hungary</c>.<br/>
+	    /// In <c>AND_OPERATOR</c> mode terms are considered to be in conjunction: the
+	    /// above mentioned query is parsed as <c>capital AND of AND Hungary</c>
+	    /// </summary>
+	    public virtual Operator DefaultOperator
+	    {
+	        set { this.operator_Renamed = value; }
+	        get { return operator_Renamed; }
+	    }
+
+	    /// <summary> Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
+	    /// lower-cased or not.  Default is <c>true</c>.
+	    /// </summary>
+	    public virtual bool LowercaseExpandedTerms
+	    {
+	        set { this.lowercaseExpandedTerms = value; }
+	        get { return lowercaseExpandedTerms; }
+	    }
+
+
+	    /// <summary> By default QueryParser uses <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT" />
+	    /// when creating a PrefixQuery, WildcardQuery or RangeQuery. This implementation is generally preferable because it 
+	    /// a) Runs faster b) Does not have the scarcity of terms unduly influence score 
+	    /// c) avoids any "TooManyBooleanClauses" exception.
+	    /// However, if your application really needs to use the
+	    /// old-fashioned BooleanQuery expansion rewriting and the above
+	    /// points are not relevant then use this to change
+	    /// the rewrite method.
+	    /// </summary>
+	    public virtual MultiTermQuery.RewriteMethod MultiTermRewriteMethod
+	    {
+	        set { multiTermRewriteMethod = value; }
+	        get { return multiTermRewriteMethod; }
+	    }
+
+
+	    /// <summary>Gets or sets locale used by date range parsing.</summary>
+	    public virtual CultureInfo Locale
+	    {
+	        set { this.locale = value; }
+	        get { return locale; }
+	    }
+
+	    /// <summary> Sets the default date resolution used by RangeQueries for fields for which no
 		/// specific date resolutions has been set. Field specific resolutions can be set
 		/// with <see cref="SetDateResolution(String, DateTools.Resolution)" />.
 		/// 
@@ -415,9 +377,10 @@ namespace Lucene.Net.QueryParsers
 		{
 			this.dateResolution = dateResolution;
 		}
-		
-		/// <summary> Sets the date resolution used by RangeQueries for a specific field.
-		/// 
+
+        /// <summary> Returns the date resolution that is used by RangeQueries for the given field. 
+        /// Returns null, if no default or field specific date resolution has been set
+        /// for the given field.
 		/// </summary>
 		/// <param name="fieldName">field for which the date resolution is to be set 
 		/// </param>
@@ -466,33 +429,25 @@ namespace Lucene.Net.QueryParsers
 			
 			return resolution;
 		}
-		
-		/// <summary> Sets the collator used to determine index term inclusion in ranges
-		/// for RangeQuerys.
-		/// <p/>
-		/// <strong>WARNING:</strong> Setting the rangeCollator to a non-null
-		/// collator using this method will cause every single index Term in the
-		/// Field referenced by lowerTerm and/or upperTerm to be examined.
-		/// Depending on the number of index Terms in this Field, the operation could
-		/// be very slow.
-		/// 
-		/// </summary>
-		/// <param name="rc"> the collator to use when constructing RangeQuerys
-		/// </param>
-		public virtual void  SetRangeCollator(System.Globalization.CompareInfo rc)
-		{
-			rangeCollator = rc;
-		}
-		
-		/// <returns> the collator used to determine index term inclusion in ranges
-		/// for RangeQuerys.
-		/// </returns>
-		public virtual System.Globalization.CompareInfo GetRangeCollator()
-		{
-			return rangeCollator;
-		}
 
-		protected internal virtual void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
+	    /// <summary> Gets or sets the collator used to determine index term inclusion in ranges
+	    /// for RangeQuerys.
+	    /// <p/>
+	    /// <strong>WARNING:</strong> Setting the rangeCollator to a non-null
+	    /// collator using this method will cause every single index Term in the
+	    /// Field referenced by lowerTerm and/or upperTerm to be examined.
+	    /// Depending on the number of index Terms in this Field, the operation could
+	    /// be very slow.
+	    /// 
+	    /// </summary>
+	    /// <value> the collator to use when constructing RangeQuerys </value>
+	    public virtual CompareInfo RangeCollator
+	    {
+	        set { rangeCollator = value; }
+	        get { return rangeCollator; }
+	    }
+
+	    protected internal virtual void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
 		{
 			bool required, prohibited;
 			
@@ -606,7 +561,7 @@ namespace Lucene.Net.QueryParsers
 					while (hasMoreTokens)
 					{
 						numTokens++;
-						int positionIncrement = (posIncrAtt != null)?posIncrAtt.GetPositionIncrement():1;
+						int positionIncrement = (posIncrAtt != null)?posIncrAtt.PositionIncrement:1;
 						if (positionIncrement != 0)
 						{
 							positionCount += positionIncrement;
@@ -684,7 +639,7 @@ namespace Lucene.Net.QueryParsers
 					{
 						// phrase query:
 						MultiPhraseQuery mpq = NewMultiPhraseQuery();
-						mpq.SetSlop(phraseSlop);
+						mpq.Slop = phraseSlop;
 						IList<Term> multiTerms = new List<Term>();
 						int position = - 1;
 						for (int i = 0; i < numTokens; i++)
@@ -698,7 +653,7 @@ namespace Lucene.Net.QueryParsers
 								term = termAtt.Term();
 								if (posIncrAtt != null)
 								{
-									positionIncrement = posIncrAtt.GetPositionIncrement();
+									positionIncrement = posIncrAtt.PositionIncrement;
 								}
 							}
 							catch (System.IO.IOException e)
@@ -735,7 +690,7 @@ namespace Lucene.Net.QueryParsers
 				else
 				{
 					PhraseQuery pq = NewPhraseQuery();
-					pq.SetSlop(phraseSlop);
+					pq.Slop = phraseSlop;
 					int position = - 1;
 					
 					
@@ -751,7 +706,7 @@ namespace Lucene.Net.QueryParsers
 							term = termAtt.Term();
 							if (posIncrAtt != null)
 							{
-								positionIncrement = posIncrAtt.GetPositionIncrement();
+								positionIncrement = posIncrAtt.PositionIncrement;
 							}
 						}
 						catch (System.IO.IOException e)
@@ -789,11 +744,11 @@ namespace Lucene.Net.QueryParsers
 			
 			if (query is PhraseQuery)
 			{
-				((PhraseQuery) query).SetSlop(slop);
+				((PhraseQuery) query).Slop = slop;
 			}
 			if (query is MultiPhraseQuery)
 			{
-				((MultiPhraseQuery) query).SetSlop(slop);
+				((MultiPhraseQuery) query).Slop = slop;
 			}
 			
 			return query;
@@ -921,7 +876,7 @@ namespace Lucene.Net.QueryParsers
 		protected internal virtual Query NewPrefixQuery(Term prefix)
 		{
 			PrefixQuery query = new PrefixQuery(prefix);
-			query.SetRewriteMethod(multiTermRewriteMethod);
+			query.QueryRewriteMethod = multiTermRewriteMethod;
 			return query;
 		}
 		
@@ -954,7 +909,7 @@ namespace Lucene.Net.QueryParsers
 		protected internal virtual Query NewRangeQuery(System.String field, System.String part1, System.String part2, bool inclusive)
 		{
 			TermRangeQuery query = new TermRangeQuery(field, part1, part2, inclusive, inclusive, rangeCollator);
-			query.SetRewriteMethod(multiTermRewriteMethod);
+			query.QueryRewriteMethod = multiTermRewriteMethod;
 			return query;
 		}
 		
@@ -974,7 +929,7 @@ namespace Lucene.Net.QueryParsers
 		protected internal virtual Query NewWildcardQuery(Term t)
 		{
 			WildcardQuery query = new WildcardQuery(t);
-			query.SetRewriteMethod(multiTermRewriteMethod);
+			query.QueryRewriteMethod = multiTermRewriteMethod;
 			return query;
 		}
 		
@@ -1517,7 +1472,7 @@ label_1_brk: ;  // {{Aroush-2.9}} this l
 				try
 				{
 					f = (float) Single.Parse(boost.image);
-					q.SetBoost(f);
+					q.Boost = f;
 				}
 				catch (System.Exception ignored)
 				{
@@ -1871,7 +1826,7 @@ label_1_brk: ;  // {{Aroush-2.9}} this l
 				// avoid boosting null queries, such as those caused by stop words
 				if (q != null)
 				{
-					q.SetBoost(f);
+					q.Boost = f;
 				}
 			}
 			{
@@ -1955,7 +1910,7 @@ label_1_brk: ;  // {{Aroush-2.9}} this l
 		private int jj_gc = 0;
 		
 		/// <summary>Constructor with user supplied CharStream. </summary>
-		protected internal QueryParser(CharStream stream)
+		protected internal QueryParser(ICharStream stream)
 		{
 			InitBlock();
 			token_source = new QueryParserTokenManager(stream);
@@ -1969,7 +1924,7 @@ label_1_brk: ;  // {{Aroush-2.9}} this l
 		}
 		
 		/// <summary>Reinitialise. </summary>
-		public virtual void  ReInit(CharStream stream)
+		public virtual void  ReInit(ICharStream stream)
 		{
 			token_source.ReInit(stream);
 			token = new Token();

Modified: incubator/lucene.net/trunk/src/core/QueryParser/QueryParserTokenManager.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/QueryParser/QueryParserTokenManager.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/QueryParser/QueryParserTokenManager.cs (original)
+++ incubator/lucene.net/trunk/src/core/QueryParser/QueryParserTokenManager.cs Mon Mar 12 22:29:26 2012
@@ -1252,25 +1252,25 @@ namespace Lucene.Net.QueryParsers
 		public static readonly int[] jjnewLexState = new int[]{- 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, 0, - 1, - 1, - 1, - 1, - 1, 2, 1, 3, - 1, 3, - 1, - 1, - 1, 3, - 1, - 1};
 		internal static readonly ulong[] jjtoToken = new ulong[]{0x3ffffff01L};
 		internal static readonly long[] jjtoSkip = new long[]{0x80L};
-		protected internal CharStream input_stream;
+		protected internal ICharStream input_stream;
 		private uint[] jjrounds = new uint[36];
 		private int[] jjstateSet = new int[72];
 		protected internal char curChar;
 		/// <summary>Constructor. </summary>
-		public QueryParserTokenManager(CharStream stream)
+		public QueryParserTokenManager(ICharStream stream)
 		{
 			InitBlock();
 			input_stream = stream;
 		}
 		
 		/// <summary>Constructor. </summary>
-		public QueryParserTokenManager(CharStream stream, int lexState):this(stream)
+		public QueryParserTokenManager(ICharStream stream, int lexState):this(stream)
 		{
 			SwitchTo(lexState);
 		}
 		
 		/// <summary>Reinitialise parser. </summary>
-		public virtual void  ReInit(CharStream stream)
+		public virtual void  ReInit(ICharStream stream)
 		{
 			jjmatchedPos = jjnewStateCnt = 0;
 			curLexState = defaultLexState;
@@ -1286,7 +1286,7 @@ namespace Lucene.Net.QueryParsers
 		}
 		
 		/// <summary>Reinitialise parser. </summary>
-		public virtual void  ReInit(CharStream stream, int lexState)
+		public virtual void  ReInit(ICharStream stream, int lexState)
 		{
 			ReInit(stream);
 			SwitchTo(lexState);
@@ -1310,11 +1310,11 @@ namespace Lucene.Net.QueryParsers
 			int beginColumn;
 			int endColumn;
 			System.String im = jjstrLiteralImages[jjmatchedKind];
-			curTokenImage = (im == null)?input_stream.GetImage():im;
-			beginLine = input_stream.GetBeginLine();
-			beginColumn = input_stream.GetBeginColumn();
-			endLine = input_stream.GetEndLine();
-			endColumn = input_stream.GetEndColumn();
+			curTokenImage = (im == null)?input_stream.Image:im;
+			beginLine = input_stream.BeginLine;
+			beginColumn = input_stream.BeginColumn;
+			endLine = input_stream.EndLine;
+			endColumn = input_stream.EndColumn;
 			t = Token.NewToken(jjmatchedKind, curTokenImage);
 			
 			t.beginLine = beginLine;
@@ -1396,8 +1396,8 @@ namespace Lucene.Net.QueryParsers
 						goto EOFLoop;
 					}
 				}
-				int error_line = input_stream.GetEndLine();
-				int error_column = input_stream.GetEndColumn();
+				int error_line = input_stream.EndLine;
+				int error_column = input_stream.EndColumn;
 				System.String error_after = null;
 				bool EOFSeen = false;
 				try
@@ -1407,7 +1407,7 @@ namespace Lucene.Net.QueryParsers
 				catch (System.IO.IOException e1)
 				{
 					EOFSeen = true;
-					error_after = curPos <= 1?"":input_stream.GetImage();
+					error_after = curPos <= 1?"":input_stream.Image;
 					if (curChar == '\n' || curChar == '\r')
 					{
 						error_line++;
@@ -1419,7 +1419,7 @@ namespace Lucene.Net.QueryParsers
 				if (!EOFSeen)
 				{
 					input_stream.Backup(1);
-					error_after = curPos <= 1?"":input_stream.GetImage();
+					error_after = curPos <= 1?"":input_stream.Image;
 				}
 				throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
 

Modified: incubator/lucene.net/trunk/src/core/QueryParser/Token.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/QueryParser/Token.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/QueryParser/Token.cs (original)
+++ incubator/lucene.net/trunk/src/core/QueryParser/Token.cs Mon Mar 12 22:29:26 2012
@@ -67,20 +67,20 @@ namespace Lucene.Net.QueryParsers
 		/// is no such token, this field is null.
 		/// </summary>
 		public Token specialToken;
-		
-		/// <summary> An optional attribute value of the Token.
-		/// Tokens which are not used as syntactic sugar will often contain
-		/// meaningful values that will be used later on by the compiler or
-		/// interpreter. This attribute value is often different from the image.
-		/// Any subclass of Token that actually wants to return a non-null value can
-		/// override this method as appropriate.
-		/// </summary>
-		public virtual System.Object GetValue()
-		{
-			return null;
-		}
-		
-		/// <summary> No-argument constructor</summary>
+
+	    /// <summary> An optional attribute value of the Token.
+	    /// Tokens which are not used as syntactic sugar will often contain
+	    /// meaningful values that will be used later on by the compiler or
+	    /// interpreter. This attribute value is often different from the image.
+	    /// Any subclass of Token that actually wants to return a non-null value can
+	    /// override this method as appropriate.
+	    /// </summary>
+	    public virtual object Value
+	    {
+	        get { return null; }
+	    }
+
+	    /// <summary> No-argument constructor</summary>
 		public Token()
 		{
 		}

Modified: incubator/lucene.net/trunk/src/core/Search/BooleanQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/BooleanQuery.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/BooleanQuery.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/BooleanQuery.cs Mon Mar 12 22:29:26 2012
@@ -310,37 +310,41 @@ namespace Lucene.Net.Search
 				    weights.Add(Enclosing_Instance.clauses[i].Query.CreateWeight(searcher));
 				}
 			}
-			
-			public override Query GetQuery()
-			{
-				return Enclosing_Instance;
-			}
-			public override float GetValue()
-			{
-				return Enclosing_Instance.GetBoost();
-			}
-			
-			public override float SumOfSquaredWeights()
-			{
-				float sum = 0.0f;
-				for (int i = 0; i < weights.Count; i++)
-				{
-					// call sumOfSquaredWeights for all clauses in case of side effects
-					float s = weights[i].SumOfSquaredWeights(); // sum sub weights
-                    if (!Enclosing_Instance.clauses[i].Prohibited)
-					// only add to sum for non-prohibited clauses
-						sum += s;
-				}
-				
-				sum *= Enclosing_Instance.GetBoost() * Enclosing_Instance.GetBoost(); // boost each sub-weight
-				
-				return sum;
-			}
-			
-			
-			public override void  Normalize(float norm)
+
+		    public override Query Query
+		    {
+		        get { return Enclosing_Instance; }
+		    }
+
+		    public override float Value
+		    {
+		        get { return Enclosing_Instance.Boost; }
+		    }
+
+		    public override float SumOfSquaredWeights
+		    {
+		        get
+		        {
+		            float sum = 0.0f;
+		            for (int i = 0; i < weights.Count; i++)
+		            {
+		                // call sumOfSquaredWeights for all clauses in case of side effects
+		                float s = weights[i].SumOfSquaredWeights; // sum sub weights
+		                if (!Enclosing_Instance.clauses[i].Prohibited)
+		                    // only add to sum for non-prohibited clauses
+		                    sum += s;
+		            }
+
+		            sum *= Enclosing_Instance.Boost*Enclosing_Instance.Boost; // boost each sub-weight
+
+		            return sum;
+		        }
+		    }
+
+
+		    public override void  Normalize(float norm)
 			{
-				norm *= Enclosing_Instance.GetBoost(); // incorporate boost
+				norm *= Enclosing_Instance.Boost; // incorporate boost
 				foreach (Weight w in weights)
 				{
 					// normalize all clauses, (even if prohibited in case of side affects)
@@ -486,31 +490,34 @@ namespace Lucene.Net.Search
 				// Return a BooleanScorer2
 				return new BooleanScorer2(similarity, Enclosing_Instance.minNrShouldMatch, required, prohibited, optional);
 			}
-			
-			public override bool ScoresDocsOutOfOrder()
-			{
-				int numProhibited = 0;
-				foreach (BooleanClause c in Enclosing_Instance.clauses)
-				{
-                    if (c.Required)
-					{
-						return false; // BS2 (in-order) will be used by scorer()
-					}
-                    else if (c.Prohibited)
-					{
-						++numProhibited;
-					}
-				}
-				
-				if (numProhibited > 32)
-				{
-					// cannot use BS
-					return false;
-				}
-				
-				// scorer() will return an out-of-order scorer if requested.
-				return true;
-			}
+
+		    public override bool ScoresDocsOutOfOrder
+		    {
+		        get
+		        {
+		            int numProhibited = 0;
+		            foreach (BooleanClause c in Enclosing_Instance.clauses)
+		            {
+		                if (c.Required)
+		                {
+		                    return false; // BS2 (in-order) will be used by scorer()
+		                }
+		                else if (c.Prohibited)
+		                {
+		                    ++numProhibited;
+		                }
+		            }
+
+		            if (numProhibited > 32)
+		            {
+		                // cannot use BS
+		                return false;
+		            }
+
+		            // scorer() will return an out-of-order scorer if requested.
+		            return true;
+		        }
+		    }
 		}
 		
 		public override Weight CreateWeight(Searcher searcher)
@@ -530,13 +537,13 @@ namespace Lucene.Net.Search
 
                     Query query = c.Query.Rewrite(reader); // rewrite first
 					
-					if (GetBoost() != 1.0f)
+					if (Boost != 1.0f)
 					{
 						// incorporate boost
                         if (query == c.Query)
 						// if rewrite was no-op
 							query = (Query) query.Clone(); // then clone before boost
-						query.SetBoost(GetBoost() * query.GetBoost());
+						query.Boost = Boost * query.Boost;
 					}
 					
 					return query;
@@ -584,7 +591,7 @@ namespace Lucene.Net.Search
 		public override System.String ToString(System.String field)
 		{
 			System.Text.StringBuilder buffer = new System.Text.StringBuilder();
-			bool needParens = (GetBoost() != 1.0) || (MinimumNumberShouldMatch > 0);
+			bool needParens = (Boost != 1.0) || (MinimumNumberShouldMatch > 0);
 			if (needParens)
 			{
 				buffer.Append("(");
@@ -633,9 +640,9 @@ namespace Lucene.Net.Search
 				buffer.Append(MinimumNumberShouldMatch);
 			}
 			
-			if (GetBoost() != 1.0f)
+			if (Boost != 1.0f)
 			{
-				buffer.Append(ToStringUtils.Boost(GetBoost()));
+				buffer.Append(ToStringUtils.Boost(Boost));
 			}
 			
 			return buffer.ToString();
@@ -647,7 +654,7 @@ namespace Lucene.Net.Search
             if (!(o is BooleanQuery))
                 return false;
             BooleanQuery other = (BooleanQuery)o;
-            return (this.GetBoost() == other.GetBoost())
+            return (this.Boost == other.Boost)
                     && this.clauses.Equals(other.clauses)
                     && this.MinimumNumberShouldMatch == other.MinimumNumberShouldMatch
                     && this.disableCoord == other.disableCoord;
@@ -656,7 +663,7 @@ namespace Lucene.Net.Search
 		/// <summary>Returns a hash code value for this object.</summary>
 		public override int GetHashCode()
 		{
-            return BitConverter.ToInt32(BitConverter.GetBytes(GetBoost()), 0) ^ clauses.GetHashCode() + MinimumNumberShouldMatch + (disableCoord ? 17 : 0);
+            return BitConverter.ToInt32(BitConverter.GetBytes(Boost), 0) ^ clauses.GetHashCode() + MinimumNumberShouldMatch + (disableCoord ? 17 : 0);
 		}
 
 	    IEnumerator IEnumerable.GetEnumerator()

Modified: incubator/lucene.net/trunk/src/core/Search/BooleanScorer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/BooleanScorer.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/BooleanScorer.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/BooleanScorer.cs Mon Mar 12 22:29:26 2012
@@ -258,7 +258,7 @@ namespace Lucene.Net.Search
 			}
 			
 			coordFactors = new float[maxCoord];
-			Similarity sim = GetSimilarity();
+			Similarity sim = Similarity;
 			for (int i = 0; i < maxCoord; i++)
 			{
 				coordFactors[i] = sim.Coord(i, maxCoord - 1);

Modified: incubator/lucene.net/trunk/src/core/Search/BooleanScorer2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/BooleanScorer2.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/BooleanScorer2.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/BooleanScorer2.cs Mon Mar 12 22:29:26 2012
@@ -146,7 +146,7 @@ namespace Lucene.Net.Search
 			{
 				// use after all scorers have been added.
 				coordFactors = new float[maxCoord + 1];
-				Similarity sim = Enclosing_Instance.GetSimilarity();
+				Similarity sim = Enclosing_Instance.Similarity;
 				for (int i = 0; i <= maxCoord; i++)
 				{
 					coordFactors[i] = sim.Coord(i, maxCoord);
@@ -230,7 +230,7 @@ namespace Lucene.Net.Search
 			// once in score().
 			private float lastDocScore = System.Single.NaN;
 			
-			internal SingleMatchScorer(BooleanScorer2 enclosingInstance, Scorer scorer):base(scorer.GetSimilarity())
+			internal SingleMatchScorer(BooleanScorer2 enclosingInstance, Scorer scorer):base(scorer.Similarity)
 			{
 				InitBlock(enclosingInstance);
 				this.scorer = scorer;
@@ -411,7 +411,7 @@ namespace Lucene.Net.Search
 		
 		static BooleanScorer2()
 		{
-			defaultSimilarity = Similarity.GetDefault();
+			defaultSimilarity = Search.Similarity.Default;
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Search/CachingSpanFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/CachingSpanFilter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/CachingSpanFilter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/CachingSpanFilter.cs Mon Mar 12 22:29:26 2012
@@ -74,7 +74,7 @@ namespace Lucene.Net.Search
 		public override DocIdSet GetDocIdSet(IndexReader reader)
 		{
 			SpanFilterResult result = GetCachedResult(reader);
-			return result != null?result.GetDocIdSet():null;
+			return result != null?result.DocIdSet:null;
 		}
 
         // for testing
@@ -82,8 +82,8 @@ namespace Lucene.Net.Search
 
 		private SpanFilterResult GetCachedResult(IndexReader reader)
 		{
-            object coreKey = reader.GetFieldCacheKey();
-            object delCoreKey = reader.HasDeletions() ? reader.GetDeletesCacheKey() : coreKey;
+            object coreKey = reader.FieldCacheKey;
+            object delCoreKey = reader.HasDeletions ? reader.DeletesCacheKey : coreKey;
 
             SpanFilterResult result = cache.Get(reader, coreKey, delCoreKey);
             if (result != null) {

Modified: incubator/lucene.net/trunk/src/core/Search/CachingWrapperFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/CachingWrapperFilter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/CachingWrapperFilter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/CachingWrapperFilter.cs Mon Mar 12 22:29:26 2012
@@ -110,7 +110,7 @@ namespace Lucene.Net.Search
                             // now for core match, but dynamically AND NOT
                             // deletions
                             value = cache[coreKey];
-                            if (value != null && reader.HasDeletions())
+                            if (value != null && reader.HasDeletions)
                             {
                                 value = MergeDeletes(reader, value);
                             }
@@ -219,7 +219,7 @@ namespace Lucene.Net.Search
                 // this is better than returning null, as the nonnull result can be cached
                 return DocIdSet.EMPTY_DOCIDSET;
             }
-            else if (docIdSet.IsCacheable()) {
+            else if (docIdSet.IsCacheable) {
 				return docIdSet;
 			}
 			else
@@ -228,7 +228,7 @@ namespace Lucene.Net.Search
 				// null is allowed to be returned by iterator(),
 				// in this case we wrap with the empty set,
 				// which is cacheable.
-				return (it == null) ? DocIdSet.EMPTY_DOCIDSET : new OpenBitSetDISI(it, reader.MaxDoc());
+				return (it == null) ? DocIdSet.EMPTY_DOCIDSET : new OpenBitSetDISI(it, reader.MaxDoc);
 			}
 		}
 
@@ -237,8 +237,8 @@ namespace Lucene.Net.Search
 		
 		public override DocIdSet GetDocIdSet(IndexReader reader)
 		{
-			object coreKey = reader.GetFieldCacheKey();
-            object delCoreKey = reader.HasDeletions() ? reader.GetDeletesCacheKey() : coreKey;
+			object coreKey = reader.FieldCacheKey;
+            object delCoreKey = reader.HasDeletions ? reader.DeletesCacheKey : coreKey;
 
             DocIdSet docIdSet = cache.Get(reader, coreKey, delCoreKey);
 

Modified: incubator/lucene.net/trunk/src/core/Search/ConstantScoreQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/ConstantScoreQuery.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/ConstantScoreQuery.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/ConstantScoreQuery.cs Mon Mar 12 22:29:26 2012
@@ -34,14 +34,14 @@ namespace Lucene.Net.Search
 		{
 			this.filter = filter;
 		}
-		
-		/// <summary>Returns the encapsulated filter </summary>
-		public virtual Filter GetFilter()
-		{
-			return filter;
-		}
-		
-		public override Query Rewrite(IndexReader reader)
+
+	    /// <summary>Returns the encapsulated filter </summary>
+	    public virtual Filter Filter
+	    {
+	        get { return filter; }
+	    }
+
+	    public override Query Rewrite(IndexReader reader)
 		{
 			return this;
 		}
@@ -77,24 +77,27 @@ namespace Lucene.Net.Search
 				InitBlock(enclosingInstance);
 				this.similarity = Enclosing_Instance.GetSimilarity(searcher);
 			}
-			
-			public override Query GetQuery()
-			{
-				return Enclosing_Instance;
-			}
-			
-			public override float GetValue()
-			{
-				return queryWeight;
-			}
-			
-			public override float SumOfSquaredWeights()
-			{
-				queryWeight = Enclosing_Instance.GetBoost();
-				return queryWeight * queryWeight;
-			}
-			
-			public override void  Normalize(float norm)
+
+		    public override Query Query
+		    {
+		        get { return Enclosing_Instance; }
+		    }
+
+		    public override float Value
+		    {
+		        get { return queryWeight; }
+		    }
+
+		    public override float SumOfSquaredWeights
+		    {
+		        get
+		        {
+		            queryWeight = Enclosing_Instance.Boost;
+		            return queryWeight*queryWeight;
+		        }
+		    }
+
+		    public override void  Normalize(float norm)
 			{
 				this.queryNorm = norm;
 				queryWeight *= this.queryNorm;
@@ -119,7 +122,7 @@ namespace Lucene.Net.Search
 					result.Value = queryWeight;
 					System.Boolean tempAux = true;
 					result.Match = tempAux;
-					result.AddDetail(new Explanation(Enclosing_Instance.GetBoost(), "boost"));
+					result.AddDetail(new Explanation(Enclosing_Instance.Boost, "boost"));
 					result.AddDetail(new Explanation(queryNorm, "queryNorm"));
 				}
 				else
@@ -155,7 +158,7 @@ namespace Lucene.Net.Search
 			public ConstantScorer(ConstantScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, Weight w):base(similarity)
 			{
 				InitBlock(enclosingInstance);
-				theScore = w.GetValue();
+				theScore = w.Value;
 				DocIdSet docIdSet = Enclosing_Instance.filter.GetDocIdSet(reader);
 				if (docIdSet == null)
 				{
@@ -204,7 +207,7 @@ namespace Lucene.Net.Search
 		/// <summary>Prints a user-readable version of this query. </summary>
 		public override System.String ToString(System.String field)
 		{
-			return "ConstantScore(" + filter.ToString() + (GetBoost() == 1.0?")":"^" + GetBoost());
+			return "ConstantScore(" + filter.ToString() + (Boost == 1.0?")":"^" + Boost);
 		}
 		
 		/// <summary>Returns true if <c>o</c> is equal to this. </summary>
@@ -215,14 +218,14 @@ namespace Lucene.Net.Search
 			if (!(o is ConstantScoreQuery))
 				return false;
 			ConstantScoreQuery other = (ConstantScoreQuery) o;
-			return this.GetBoost() == other.GetBoost() && filter.Equals(other.filter);
+			return this.Boost == other.Boost && filter.Equals(other.filter);
 		}
 		
 		/// <summary>Returns a hash code value for this object. </summary>
 		public override int GetHashCode()
 		{
 			// Simple add is OK since no existing filter hashcode has a float component.
-			return filter.GetHashCode() + BitConverter.ToInt32(BitConverter.GetBytes(GetBoost()), 0);
+			return filter.GetHashCode() + BitConverter.ToInt32(BitConverter.GetBytes(Boost), 0);
         }
 
 		override public System.Object Clone()

Modified: incubator/lucene.net/trunk/src/core/Search/DefaultSimilarity.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/DefaultSimilarity.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/DefaultSimilarity.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/DefaultSimilarity.cs Mon Mar 12 22:29:26 2012
@@ -41,10 +41,10 @@ namespace Lucene.Net.Search
 		{
 			int numTerms;
 			if (discountOverlaps)
-				numTerms = state.GetLength() - state.GetNumOverlap();
+				numTerms = state.Length - state.NumOverlap;
 			else
-				numTerms = state.GetLength();
-			return (state.GetBoost() * LengthNorm(field, numTerms));
+				numTerms = state.Length;
+			return (state.Boost * LengthNorm(field, numTerms));
 		}
 		
 		/// <summary>Implemented as <c>1/sqrt(numTerms)</c>. </summary>
@@ -101,12 +101,12 @@ namespace Lucene.Net.Search
 		{
 			discountOverlaps = v;
 		}
-		
-		/// <seealso cref="SetDiscountOverlaps">
-		/// </seealso>
-		public virtual bool GetDiscountOverlaps()
-		{
-			return discountOverlaps;
-		}
+
+	    /// <seealso cref="SetDiscountOverlaps">
+	    /// </seealso>
+	    public virtual bool DiscountOverlaps
+	    {
+	        get { return discountOverlaps; }
+	    }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Search/DisjunctionMaxQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/DisjunctionMaxQuery.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/DisjunctionMaxQuery.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/DisjunctionMaxQuery.cs Mon Mar 12 22:29:26 2012
@@ -127,7 +127,7 @@ namespace Lucene.Net.Search
 			public DisjunctionMaxWeight(DisjunctionMaxQuery enclosingInstance, Searcher searcher)
 			{
 				InitBlock(enclosingInstance);
-				this.similarity = searcher.GetSimilarity();
+				this.similarity = searcher.Similarity;
 				foreach(Query disjunctQuery in enclosingInstance.disjuncts)
 				{
                     weights.Add(disjunctQuery.CreateWeight(searcher));
@@ -135,35 +135,42 @@ namespace Lucene.Net.Search
 			}
 			
 			/* Return our associated DisjunctionMaxQuery */
-			public override Query GetQuery()
-			{
-				return Enclosing_Instance;
-			}
-			
-			/* Return our boost */
-			public override float GetValue()
-			{
-				return Enclosing_Instance.GetBoost();
-			}
-			
-			/* Compute the sub of squared weights of us applied to our subqueries.  Used for normalization. */
-			public override float SumOfSquaredWeights()
-			{
-				float max = 0.0f, sum = 0.0f;
-				foreach(Weight currentWeight in weights)
-				{
-                    float sub = currentWeight.SumOfSquaredWeights();
-					sum += sub;
-					max = System.Math.Max(max, sub);
-				}
-				float boost = Enclosing_Instance.GetBoost();
-				return (((sum - max) * Enclosing_Instance.tieBreakerMultiplier * Enclosing_Instance.tieBreakerMultiplier) + max) * boost * boost;
-			}
-			
-			/* Apply the computed normalization factor to our subqueries */
+
+		    public override Query Query
+		    {
+		        get { return Enclosing_Instance; }
+		    }
+
+		    /* Return our boost */
+
+		    public override float Value
+		    {
+		        get { return Enclosing_Instance.Boost; }
+		    }
+
+		    /* Compute the sub of squared weights of us applied to our subqueries.  Used for normalization. */
+
+		    public override float SumOfSquaredWeights
+		    {
+		        get
+		        {
+		            float max = 0.0f, sum = 0.0f;
+		            foreach (Weight currentWeight in weights)
+		            {
+		                float sub = currentWeight.SumOfSquaredWeights;
+		                sum += sub;
+		                max = System.Math.Max(max, sub);
+		            }
+		            float boost = Enclosing_Instance.Boost;
+		            return (((sum - max)*Enclosing_Instance.tieBreakerMultiplier*Enclosing_Instance.tieBreakerMultiplier) + max)*
+		                   boost*boost;
+		        }
+		    }
+
+		    /* Apply the computed normalization factor to our subqueries */
 			public override void  Normalize(float norm)
 			{
-				norm *= Enclosing_Instance.GetBoost(); // Incorporate our boost
+				norm *= Enclosing_Instance.Boost; // Incorporate our boost
 				foreach(Weight wt in weights)
 				{
                     wt.Normalize(norm);
@@ -232,11 +239,11 @@ namespace Lucene.Net.Search
 			{
 				Query singleton = disjuncts[0];
 				Query result = singleton.Rewrite(reader);
-				if (GetBoost() != 1.0f)
+				if (Boost != 1.0f)
 				{
 					if (result == singleton)
 						result = (Query) result.Clone();
-					result.SetBoost(GetBoost() * result.GetBoost());
+					result.Boost = Boost * result.Boost;
 				}
 				return result;
 			}
@@ -308,10 +315,10 @@ namespace Lucene.Net.Search
 				buffer.Append("~");
 				buffer.Append(tieBreakerMultiplier);
 			}
-			if (GetBoost() != 1.0)
+			if (Boost != 1.0)
 			{
 				buffer.Append("^");
-				buffer.Append(GetBoost());
+				buffer.Append(Boost);
 			}
 			return buffer.ToString();
 		}
@@ -326,7 +333,7 @@ namespace Lucene.Net.Search
 			if (!(o is DisjunctionMaxQuery))
 				return false;
 			DisjunctionMaxQuery other = (DisjunctionMaxQuery) o;
-			return this.GetBoost() == other.GetBoost() && this.tieBreakerMultiplier == other.tieBreakerMultiplier && this.disjuncts.Equals(other.disjuncts);
+			return this.Boost == other.Boost && this.tieBreakerMultiplier == other.tieBreakerMultiplier && this.disjuncts.Equals(other.disjuncts);
 		}
 		
 		/// <summary>Compute a hash code for hashing us</summary>
@@ -334,7 +341,7 @@ namespace Lucene.Net.Search
 		/// </returns>
 		public override int GetHashCode()
 		{
-			return BitConverter.ToInt32(BitConverter.GetBytes(GetBoost()), 0) + BitConverter.ToInt32(BitConverter.GetBytes(tieBreakerMultiplier), 0) + disjuncts.GetHashCode();
+			return BitConverter.ToInt32(BitConverter.GetBytes(Boost), 0) + BitConverter.ToInt32(BitConverter.GetBytes(tieBreakerMultiplier), 0) + disjuncts.GetHashCode();
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Search/DocIdSet.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/DocIdSet.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/DocIdSet.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/DocIdSet.cs Mon Mar 12 22:29:26 2012
@@ -76,10 +76,10 @@ namespace Lucene.Net.Search
 				return iterator;
 			}
 
-			public override bool IsCacheable()
-			{
-				return true;
-			}
+		    public override bool IsCacheable
+		    {
+		        get { return true; }
+		    }
 		}
 
         /// <summary>An empty <see cref="DocIdSet"/> instance for easy use, e.g. in Filters that hit no documents. </summary>
@@ -93,17 +93,18 @@ namespace Lucene.Net.Search
 		/// </summary>
 		public abstract DocIdSetIterator Iterator();
 
-		/// <summary>This method is a hint for <see cref="CachingWrapperFilter" />, if this <c>DocIdSet</c>
-		/// should be cached without copying it into a BitSet. The default is to return
-		/// <c>false</c>. If you have an own <c>DocIdSet</c> implementation
-		/// that does its iteration very effective and fast without doing disk I/O,
-		/// override this method and return true.
-		/// </summary>
-		public virtual bool IsCacheable()
-		{
-			return false;
-		}
-		static DocIdSet()
+	    /// <summary>This method is a hint for <see cref="CachingWrapperFilter" />, if this <c>DocIdSet</c>
+	    /// should be cached without copying it into a BitSet. The default is to return
+	    /// <c>false</c>. If you have an own <c>DocIdSet</c> implementation
+	    /// that does its iteration very effective and fast without doing disk I/O,
+	    /// override this method and return true.
+	    /// </summary>
+	    public virtual bool IsCacheable
+	    {
+	        get { return false; }
+	    }
+
+	    static DocIdSet()
 		{
 			EMPTY_DOCIDSET = new AnonymousClassDocIdSet();
 		}

Modified: incubator/lucene.net/trunk/src/core/Search/Explanation.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/Explanation.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/Explanation.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/Explanation.cs Mon Mar 12 22:29:26 2012
@@ -199,10 +199,10 @@ namespace Lucene.Net.Search
 		[Serializable]
 		public abstract class IDFExplanation
 		{
-			/// <returns> the idf factor
-			/// </returns>
-			public abstract float GetIdf();
-			/// <summary> This should be calculated lazily if possible.
+		    /// <value> the idf factor </value>
+		    public abstract float Idf { get; }
+
+		    /// <summary> This should be calculated lazily if possible.
 			/// 
 			/// </summary>
 			/// <returns> the explanation for the idf factor.

Modified: incubator/lucene.net/trunk/src/core/Search/FieldCache.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/FieldCache.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/FieldCache.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/FieldCache.cs Mon Mar 12 22:29:26 2012
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.IO;
 using Lucene.Net.Support;
 using Double = Lucene.Net.Support.Double;
 using NumericTokenStream = Lucene.Net.Analysis.NumericTokenStream;
@@ -95,49 +96,43 @@ namespace Lucene.Net.Search
 	/// </summary>
 	public abstract class CacheEntry
 	{
-		public abstract System.Object GetReaderKey();
-		public abstract System.String GetFieldName();
-		public abstract System.Type GetCacheType();
-		public abstract System.Object GetCustom();
-		public abstract System.Object GetValue();
-		private System.String size = null;
-		protected internal void  SetEstimatedSize(System.String size)
-		{
-			this.size = size;
-		}
-		/// <seealso cref="EstimateSize(RamUsageEstimator)">
+	    public abstract object ReaderKey { get; }
+	    public abstract string FieldName { get; }
+	    public abstract Type CacheType { get; }
+	    public abstract object Custom { get; }
+	    public abstract object Value { get; }
+
+	    /// <seealso cref="EstimateSize(RamUsageEstimator)">
 		/// </seealso>
 		public virtual void  EstimateSize()
 		{
 			EstimateSize(new RamUsageEstimator(false)); // doesn't check for interned
 		}
 		/// <summary> Computes (and stores) the estimated size of the cache Value </summary>
-		/// <seealso cref="GetEstimatedSize">
+		/// <seealso cref="EstimatedSize">
 		/// </seealso>
 		public virtual void  EstimateSize(RamUsageEstimator ramCalc)
 		{
-			long size = ramCalc.EstimateRamUsage(GetValue());
-            SetEstimatedSize(RamUsageEstimator.HumanReadableUnits(size, new System.Globalization.NumberFormatInfo()));  // {{Aroush-2.9}} in Java, the formater is set to "0.#", so we need to do the same in C#
+			long size = ramCalc.EstimateRamUsage(Value);
+            EstimatedSize = RamUsageEstimator.HumanReadableUnits(size, new System.Globalization.NumberFormatInfo());  // {{Aroush-2.9}} in Java, the formater is set to "0.#", so we need to do the same in C#
 		}
-		/// <summary> The most recently estimated size of the value, null unless 
-		/// estimateSize has been called.
-		/// </summary>
-		public System.String GetEstimatedSize()
-		{
-			return size;
-		}
-		
-		
-		public override System.String ToString()
+
+	    /// <summary> The most recently estimated size of the value, null unless 
+	    /// estimateSize has been called.
+	    /// </summary>
+	    public string EstimatedSize { get; protected internal set; }
+
+
+	    public override System.String ToString()
 		{
-			System.Text.StringBuilder b = new System.Text.StringBuilder();
-			b.Append("'").Append(GetReaderKey()).Append("'=>");
-			b.Append("'").Append(GetFieldName()).Append("',");
-			b.Append(GetCacheType()).Append(",").Append(GetCustom());
-			b.Append("=>").Append(GetValue().GetType().FullName).Append("#");
-			b.Append(GetValue().GetHashCode());
+			var b = new System.Text.StringBuilder();
+			b.Append("'").Append(ReaderKey).Append("'=>");
+			b.Append("'").Append(FieldName).Append("',");
+			b.Append(CacheType).Append(",").Append(Custom);
+			b.Append("=>").Append(Value.GetType().FullName).Append("#");
+			b.Append(Value.GetHashCode());
 			
-			System.String s = GetEstimatedSize();
+			System.String s = EstimatedSize;
 			if (null != s)
 			{
 				b.Append(" (size =~ ").Append(s).Append(')');
@@ -634,15 +629,14 @@ namespace Lucene.Net.Search
         /// Lucene now caches at the segment reader level.
         /// </summary>
         void Purge(IndexReader r);
-		
-		/// <summary> If non-null, FieldCacheImpl will warn whenever
-		/// entries are created that are not sane according to
-		/// <see cref="Lucene.Net.Util.FieldCacheSanityChecker" />.
-		/// </summary>
-		void  SetInfoStream(System.IO.StreamWriter stream);
-		
-		/// <summary>counterpart of <see cref="SetInfoStream(System.IO.StreamWriter)" /> </summary>
-		System.IO.StreamWriter GetInfoStream();
+
+        /// <summary> Gets or sets the InfoStream for this FieldCache.
+        /// <para>If non-null, FieldCacheImpl will warn whenever
+        /// entries are created that are not sane according to
+        /// <see cref="Lucene.Net.Util.FieldCacheSanityChecker" />.
+        /// </para>
+        /// </summary>
+	    StreamWriter InfoStream { get; set; }
 	}
 	
 	/// <summary> Marker interface as super-interface to all parsers. It

Modified: incubator/lucene.net/trunk/src/core/Search/FieldCacheImpl.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/FieldCacheImpl.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/FieldCacheImpl.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/FieldCacheImpl.cs Mon Mar 12 22:29:26 2012
@@ -17,6 +17,7 @@
 
 using System;
 using System.Collections.Generic;
+using System.IO;
 using System.Linq;
 using Lucene.Net.Support;
 using NumericField = Lucene.Net.Documents.NumericField;
@@ -107,40 +108,45 @@ namespace Lucene.Net.Search
 			private System.String fieldName;
 			private System.Type cacheType;
 			private System.Object custom;
-			private System.Object value_Renamed;
-			internal CacheEntryImpl(System.Object readerKey, System.String fieldName, System.Type cacheType, System.Object custom, System.Object value_Renamed)
+			private System.Object value;
+			internal CacheEntryImpl(System.Object readerKey, System.String fieldName, System.Type cacheType, System.Object custom, System.Object value)
 			{
 				this.readerKey = readerKey;
 				this.fieldName = fieldName;
 				this.cacheType = cacheType;
 				this.custom = custom;
-				this.value_Renamed = value_Renamed;
+				this.value = value;
 				
 				// :HACK: for testing.
 				//         if (null != locale || SortField.CUSTOM != sortFieldType) {
 				//           throw new RuntimeException("Locale/sortFieldType: " + this);
 				//         }
 			}
-			public override System.Object GetReaderKey()
-			{
-				return readerKey;
-			}
-			public override System.String GetFieldName()
-			{
-				return fieldName;
-			}
-			public override System.Type GetCacheType()
-			{
-				return cacheType;
-			}
-			public override System.Object GetCustom()
-			{
-				return custom;
-			}
-			public override System.Object GetValue()
-			{
-				return value_Renamed;
-			}
+
+		    public override object ReaderKey
+		    {
+		        get { return readerKey; }
+		    }
+
+		    public override string FieldName
+		    {
+		        get { return fieldName; }
+		    }
+
+		    public override Type CacheType
+		    {
+		        get { return cacheType; }
+		    }
+
+		    public override object Custom
+		    {
+		        get { return custom; }
+		    }
+
+		    public override object Value
+		    {
+		        get { return value; }
+		    }
 		}
 		
 		/// <summary> Hack: When thrown from a Parser (NUMERIC_UTILS_* ones), this stops
@@ -174,7 +180,7 @@ namespace Lucene.Net.Search
             /** Remove this reader from the cache, if present. */
             public void Purge(IndexReader r)
             {
-                object readerKey = r.GetFieldCacheKey();
+                object readerKey = r.FieldCacheKey;
                 lock (readerCache)
                 {
                     readerCache.Remove(readerKey);
@@ -185,7 +191,7 @@ namespace Lucene.Net.Search
 			{
 				IDictionary<Entry, object> innerCache;
 				System.Object value;
-				System.Object readerKey = reader.GetFieldCacheKey();
+				System.Object readerKey = reader.FieldCacheKey;
 				lock (readerCache)
 				{
 					innerCache = readerCache[readerKey];
@@ -223,7 +229,7 @@ namespace Lucene.Net.Search
 							// call to FieldCache.getXXX
 							if (key.custom != null && wrapper != null)
 							{
-								System.IO.StreamWriter infoStream = wrapper.GetInfoStream();
+								System.IO.StreamWriter infoStream = wrapper.InfoStream;
 								if (infoStream != null)
 								{
 									PrintNewInsanity(infoStream, progress.value_Renamed);
@@ -245,7 +251,7 @@ namespace Lucene.Net.Search
 					CacheEntry[] entries = insanity.GetCacheEntries();
 					for (int j = 0; j < entries.Length; j++)
 					{
-						if (entries[j].GetValue() == value_Renamed)
+						if (entries[j].Value == value_Renamed)
 						{
 							// OK this insanity involves our entry
 							infoStream.WriteLine("WARNING: new FieldCache insanity created\nDetails: " + insanity.ToString());
@@ -326,7 +332,7 @@ namespace Lucene.Net.Search
 				{
 					return wrapper.GetBytes(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_BYTE_PARSER);
 				}
-				sbyte[] retArray = new sbyte[reader.MaxDoc()];
+				sbyte[] retArray = new sbyte[reader.MaxDoc];
 				TermDocs termDocs = reader.TermDocs();
 				TermEnum termEnum = reader.Terms(new Term(field));
 				try
@@ -385,7 +391,7 @@ namespace Lucene.Net.Search
 				{
 					return wrapper.GetShorts(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_SHORT_PARSER);
 				}
-				short[] retArray = new short[reader.MaxDoc()];
+				short[] retArray = new short[reader.MaxDoc];
 				TermDocs termDocs = reader.TermDocs();
 				TermEnum termEnum = reader.Terms(new Term(field));
 				try
@@ -464,7 +470,7 @@ namespace Lucene.Net.Search
 						int termval = parser.ParseInt(term.Text());
 						if (retArray == null)
 						// late init
-							retArray = new int[reader.MaxDoc()];
+							retArray = new int[reader.MaxDoc];
 						termDocs.Seek(termEnum);
 						while (termDocs.Next())
 						{
@@ -483,7 +489,7 @@ namespace Lucene.Net.Search
 				}
 				if (retArray == null)
 				// no values
-					retArray = new int[reader.MaxDoc()];
+					retArray = new int[reader.MaxDoc];
 				return retArray;
 			}
 		}
@@ -538,7 +544,7 @@ namespace Lucene.Net.Search
 						float termval = parser.ParseFloat(term.Text());
 						if (retArray == null)
 						// late init
-							retArray = new float[reader.MaxDoc()];
+							retArray = new float[reader.MaxDoc];
 						termDocs.Seek(termEnum);
 						while (termDocs.Next())
 						{
@@ -557,7 +563,7 @@ namespace Lucene.Net.Search
 				}
 				if (retArray == null)
 				// no values
-					retArray = new float[reader.MaxDoc()];
+					retArray = new float[reader.MaxDoc];
 				return retArray;
 			}
 		}
@@ -610,7 +616,7 @@ namespace Lucene.Net.Search
 						long termval = parser.ParseLong(term.Text());
 						if (retArray == null)
 						// late init
-							retArray = new long[reader.MaxDoc()];
+							retArray = new long[reader.MaxDoc];
 						termDocs.Seek(termEnum);
 						while (termDocs.Next())
 						{
@@ -629,7 +635,7 @@ namespace Lucene.Net.Search
 				}
 				if (retArray == null)
 				// no values
-					retArray = new long[reader.MaxDoc()];
+					retArray = new long[reader.MaxDoc];
 				return retArray;
 			}
 		}
@@ -682,7 +688,7 @@ namespace Lucene.Net.Search
 						double termval = parser.ParseDouble(term.Text());
 						if (retArray == null)
 						// late init
-							retArray = new double[reader.MaxDoc()];
+							retArray = new double[reader.MaxDoc];
 						termDocs.Seek(termEnum);
 						while (termDocs.Next())
 						{
@@ -701,7 +707,7 @@ namespace Lucene.Net.Search
 				}
 				if (retArray == null)
 				// no values
-					retArray = new double[reader.MaxDoc()];
+					retArray = new double[reader.MaxDoc];
 				return retArray;
 			}
 		}
@@ -722,7 +728,7 @@ namespace Lucene.Net.Search
 			protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey)
 			{
 				System.String field = StringHelper.Intern(entryKey.field);
-				System.String[] retArray = new System.String[reader.MaxDoc()];
+				System.String[] retArray = new System.String[reader.MaxDoc];
 				TermDocs termDocs = reader.TermDocs();
 				TermEnum termEnum = reader.Terms(new Term(field));
 				try
@@ -766,8 +772,8 @@ namespace Lucene.Net.Search
 			protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey)
 			{
 				System.String field = StringHelper.Intern(entryKey.field);
-				int[] retArray = new int[reader.MaxDoc()];
-				System.String[] mterms = new System.String[reader.MaxDoc() + 1];
+				int[] retArray = new int[reader.MaxDoc];
+				System.String[] mterms = new System.String[reader.MaxDoc + 1];
 				TermDocs termDocs = reader.TermDocs();
 				TermEnum termEnum = reader.Terms(new Term(field));
 				int t = 0; // current term number
@@ -825,15 +831,11 @@ namespace Lucene.Net.Search
 		}
 		
 		private volatile System.IO.StreamWriter infoStream;
-		
-		public virtual void  SetInfoStream(System.IO.StreamWriter stream)
-		{
-			infoStream = stream;
-		}
-		
-		public virtual System.IO.StreamWriter GetInfoStream()
-		{
-			return infoStream;
-		}
+
+	    public virtual StreamWriter InfoStream
+	    {
+	        get { return infoStream; }
+	        set { infoStream = value; }
+	    }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Search/FieldCacheRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Search/FieldCacheRangeFilter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Search/FieldCacheRangeFilter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Search/FieldCacheRangeFilter.cs Mon Mar 12 22:29:26 2012
@@ -929,13 +929,13 @@ namespace Lucene.Net.Search
 			/// <summary>this method checks, if a doc is a hit, should throw AIOBE, when position invalid </summary>
 			internal abstract bool MatchDoc(int doc);
 
-			/// <summary>this DocIdSet is cacheable, if it works solely with FieldCache and no TermDocs </summary>
-			public override bool IsCacheable()
-			{
-				return !(mayUseTermDocs && reader.HasDeletions());
-			}
-			
-			public override DocIdSetIterator Iterator()
+		    /// <summary>this DocIdSet is cacheable, if it works solely with FieldCache and no TermDocs </summary>
+		    public override bool IsCacheable
+		    {
+		        get { return !(mayUseTermDocs && reader.HasDeletions); }
+		    }
+
+		    public override DocIdSetIterator Iterator()
 			{
 				// Synchronization needed because deleted docs BitVector
 				// can change after call to hasDeletions until TermDocs creation.
@@ -944,7 +944,7 @@ namespace Lucene.Net.Search
 				TermDocs termDocs;
 				lock (reader)
 				{
-					termDocs = IsCacheable() ? null : reader.TermDocs(null);
+					termDocs = IsCacheable ? null : reader.TermDocs(null);
 				}
 				if (termDocs != null)
 				{



Mime
View raw message