lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bode...@apache.org
Subject svn commit: r1344562 [3/4] - in /incubator/lucene.net/trunk: src/contrib/Analyzers/De/ src/contrib/Analyzers/Filters/ src/contrib/Core/Analysis/Ext/ src/contrib/Memory/ src/contrib/Memory/Properties/ src/contrib/Spatial/ src/contrib/Spatial/Prefix/ src...
Date Thu, 31 May 2012 04:33:43 GMT
Propchange: incubator/lucene.net/trunk/src/contrib/Memory/MemoryIndex.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermEnum.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermEnum.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermEnum.cs Thu May 31 04:33:41 2012
@@ -1,82 +1,82 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace Lucene.Net.Index.Memory
-{
-    public partial class MemoryIndex
-    {
-        private sealed partial class MemoryIndexReader
-        {
-            private class MemoryTermEnum : TermEnum
-            {
-                private readonly MemoryIndex _index;
-                private readonly MemoryIndexReader _reader;
-                private int _i; // index into info.sortedTerms
-                private int _j; // index into sortedFields
-
-                public MemoryTermEnum(MemoryIndex index, MemoryIndexReader reader, int ix, int jx)
-                {
-                    _index = index;
-                    _reader = reader;
-                    _i = ix; // index into info.sortedTerms
-                    _j = jx; // index into sortedFields
-                }
-
-                public override bool Next()
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.next");
-                    if (_j >= _index.sortedFields.Length) return false;
-                    Info info = _reader.GetInfo(_j);
-                    if (++_i < info.SortedTerms.Length) return true;
-
-                    // move to successor
-                    _j++;
-                    _i = 0;
-                    if (_j >= _index.sortedFields.Length) return false;
-                    _reader.GetInfo(_j).SortTerms();
-                    return true;
-                }
-
-                public override Term Term()
-                {
-                              if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.term: " + _i);
-                              if (_j >= _index.sortedFields.Length) return null;
-                              Info info = _reader.GetInfo(_j);
-                              if (_i >= info.SortedTerms.Length) return null;
-                    //          if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.term: " + i + ", " + info.sortedTerms[i].getKey());
-                              return CreateTerm(info, _j, info.SortedTerms[_i].Key);
-                }
-
-                public override int DocFreq()
-                {                
-                              if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.docFreq");
-                              if (_j >= _index.sortedFields.Length) return 0;
-                              Info info = _reader.GetInfo(_j);
-                              if (_i >= info.SortedTerms.Length) return 0;
-                              return _index.NumPositions(info.GetPositions(_i));
-                }
-
-                protected override void Dispose(bool disposing)
-                {
-                              if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.close");
-                }
-
-                private Term CreateTerm(Info info, int pos, string text)
-                {
-                    // Assertion: sortFields has already been called before
-                    Term template = info.template;
-                    if (template == null) { // not yet cached?
-                        String fieldName = _index.sortedFields[pos].Key;
-                    template = new Term(fieldName);
-                    info.template = template;
-                    }
-
-                    return template.CreateTerm(text);
-                }
-            }
-        }
-    }
-}
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Index.Memory
+{
+    public partial class MemoryIndex
+    {
+        private sealed partial class MemoryIndexReader
+        {
+            private class MemoryTermEnum : TermEnum
+            {
+                private readonly MemoryIndex _index;
+                private readonly MemoryIndexReader _reader;
+                private int _i; // index into info.sortedTerms
+                private int _j; // index into sortedFields
+
+                public MemoryTermEnum(MemoryIndex index, MemoryIndexReader reader, int ix, int jx)
+                {
+                    _index = index;
+                    _reader = reader;
+                    _i = ix; // index into info.sortedTerms
+                    _j = jx; // index into sortedFields
+                }
+
+                public override bool Next()
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.next");
+                    if (_j >= _index.sortedFields.Length) return false;
+                    Info info = _reader.GetInfo(_j);
+                    if (++_i < info.SortedTerms.Length) return true;
+
+                    // move to successor
+                    _j++;
+                    _i = 0;
+                    if (_j >= _index.sortedFields.Length) return false;
+                    _reader.GetInfo(_j).SortTerms();
+                    return true;
+                }
+
+                public override Term Term()
+                {
+                              if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.term: " + _i);
+                              if (_j >= _index.sortedFields.Length) return null;
+                              Info info = _reader.GetInfo(_j);
+                              if (_i >= info.SortedTerms.Length) return null;
+                    //          if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.term: " + i + ", " + info.sortedTerms[i].getKey());
+                              return CreateTerm(info, _j, info.SortedTerms[_i].Key);
+                }
+
+                public override int DocFreq()
+                {                
+                              if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.docFreq");
+                              if (_j >= _index.sortedFields.Length) return 0;
+                              Info info = _reader.GetInfo(_j);
+                              if (_i >= info.SortedTerms.Length) return 0;
+                              return _index.NumPositions(info.GetPositions(_i));
+                }
+
+                protected override void Dispose(bool disposing)
+                {
+                              if (DEBUG) System.Diagnostics.Debug.WriteLine("TermEnum.close");
+                }
+
+                private Term CreateTerm(Info info, int pos, string text)
+                {
+                    // Assertion: sortFields has already been called before
+                    Term template = info.template;
+                    if (template == null) { // not yet cached?
+                        String fieldName = _index.sortedFields[pos].Key;
+                    template = new Term(fieldName);
+                    info.template = template;
+                    }
+
+                    return template.CreateTerm(text);
+                }
+            }
+        }
+    }
+}

Propchange: incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermEnum.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositionVector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositionVector.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositionVector.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositionVector.cs Thu May 31 04:33:41 2012
@@ -1,96 +1,96 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace Lucene.Net.Index.Memory
-{
-    public partial class MemoryIndex
-    {
-        private sealed partial class MemoryIndexReader
-        {
-            private class MemoryTermPositionVector : TermPositionVector
-            {
-                private readonly MemoryIndex _index;
-                private readonly string _fieldName;
-                private readonly KeyValuePair<String,ArrayIntList>[] sortedTerms;
-
-                public MemoryTermPositionVector(MemoryIndex index, Info info, string fieldName)
-                {
-                    _index = index;
-                    _fieldName = fieldName;
-                    sortedTerms = info.SortedTerms;
-                }
-
-                public string Field
-                {
-                    get { return _fieldName; }
-                }
-
-                public int Size
-                {
-                    get { return sortedTerms.Length; }
-                }
-
-                public string[] GetTerms()
-                {
-                    var terms = new String[sortedTerms.Length];
-                    for (int i = sortedTerms.Length; --i >= 0; )
-                    {
-                        terms[i] = sortedTerms[i].Key;
-                    }
-                    return terms;
-                }
-
-                public int[] GetTermFrequencies()
-                {
-                    int[] freqs = new int[sortedTerms.Length];
-                    for (int i = sortedTerms.Length; --i >= 0; )
-                    {
-                        freqs[i] = _index.NumPositions(sortedTerms[i].Value);
-                    }
-                    return freqs;
-                }
-
-                public int IndexOf(string term)
-                {
-                    int i = Array.BinarySearch(sortedTerms, new KeyValuePair<string, ArrayIntList>(term, null), Info.ArrayIntListComparer);
-                    return i >= 0 ? i : -1;
-                }
-
-                public int[] IndexesOf(string[] terms, int start, int len)
-                {
-                    int[] indexes = new int[len];
-                    for (int i = 0; i < len; i++)
-                    {
-                        indexes[i] = IndexOf(terms[start++]);
-                    }
-                    return indexes;
-                }
-
-                public int[] GetTermPositions(int index)
-                {
-                    return sortedTerms[index].Value.ToArray(_index.stride);
-                }
-
-                public TermVectorOffsetInfo[] GetOffsets(int index)
-                {
-                    if (_index.stride == 1) return null; // no offsets stored
-
-                    ArrayIntList positions = sortedTerms[index].Value;
-                    int size = positions.Size();
-                    TermVectorOffsetInfo[] offsets = new TermVectorOffsetInfo[size / _index.stride];
-
-                    for (int i = 0, j = 1; j < size; i++, j += _index.stride)
-                    {
-                        int start = positions.Get(j);
-                        int end = positions.Get(j + 1);
-                        offsets[i] = new TermVectorOffsetInfo(start, end);
-                    }
-                    return offsets;
-                }
-            }
-        }
-    }
-}
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Index.Memory
+{
+    public partial class MemoryIndex
+    {
+        private sealed partial class MemoryIndexReader
+        {
+            private class MemoryTermPositionVector : TermPositionVector
+            {
+                private readonly MemoryIndex _index;
+                private readonly string _fieldName;
+                private readonly KeyValuePair<String,ArrayIntList>[] sortedTerms;
+
+                public MemoryTermPositionVector(MemoryIndex index, Info info, string fieldName)
+                {
+                    _index = index;
+                    _fieldName = fieldName;
+                    sortedTerms = info.SortedTerms;
+                }
+
+                public string Field
+                {
+                    get { return _fieldName; }
+                }
+
+                public int Size
+                {
+                    get { return sortedTerms.Length; }
+                }
+
+                public string[] GetTerms()
+                {
+                    var terms = new String[sortedTerms.Length];
+                    for (int i = sortedTerms.Length; --i >= 0; )
+                    {
+                        terms[i] = sortedTerms[i].Key;
+                    }
+                    return terms;
+                }
+
+                public int[] GetTermFrequencies()
+                {
+                    int[] freqs = new int[sortedTerms.Length];
+                    for (int i = sortedTerms.Length; --i >= 0; )
+                    {
+                        freqs[i] = _index.NumPositions(sortedTerms[i].Value);
+                    }
+                    return freqs;
+                }
+
+                public int IndexOf(string term)
+                {
+                    int i = Array.BinarySearch(sortedTerms, new KeyValuePair<string, ArrayIntList>(term, null), Info.ArrayIntListComparer);
+                    return i >= 0 ? i : -1;
+                }
+
+                public int[] IndexesOf(string[] terms, int start, int len)
+                {
+                    int[] indexes = new int[len];
+                    for (int i = 0; i < len; i++)
+                    {
+                        indexes[i] = IndexOf(terms[start++]);
+                    }
+                    return indexes;
+                }
+
+                public int[] GetTermPositions(int index)
+                {
+                    return sortedTerms[index].Value.ToArray(_index.stride);
+                }
+
+                public TermVectorOffsetInfo[] GetOffsets(int index)
+                {
+                    if (_index.stride == 1) return null; // no offsets stored
+
+                    ArrayIntList positions = sortedTerms[index].Value;
+                    int size = positions.Size();
+                    TermVectorOffsetInfo[] offsets = new TermVectorOffsetInfo[size / _index.stride];
+
+                    for (int i = 0, j = 1; j < size; i++, j += _index.stride)
+                    {
+                        int start = positions.Get(j);
+                        int end = positions.Get(j + 1);
+                        offsets[i] = new TermVectorOffsetInfo(start, end);
+                    }
+                    return offsets;
+                }
+            }
+        }
+    }
+}

Propchange: incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositionVector.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositions.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositions.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositions.cs Thu May 31 04:33:41 2012
@@ -1,131 +1,131 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace Lucene.Net.Index.Memory
-{
-    public partial class MemoryIndex
-    {
-        private sealed partial class MemoryIndexReader
-        {
-            private sealed class MemoryTermPositions : TermPositions
-            {
-                private readonly MemoryIndex _index;
-                private readonly MemoryIndexReader _reader;
-                private bool hasNext;
-                private int cursor = 0;
-                private ArrayIntList current;
-                private Term term;
-
-                public MemoryTermPositions(MemoryIndex index, MemoryIndexReader reader)
-                {
-                    _index = index;
-                    _reader = reader;
-                }
-
-                public void Seek(Term term)
-                {
-                    this.term = term;
-
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".seek: " + term);
-
-                    if (term == null)
-                    {
-                        hasNext = true; // term==null means match all docs
-                    }
-                    else
-                    {
-                        Info info = _reader.GetInfo(term.Field);
-                        current = info == null ? null : info.GetPositions(term.Text);
-                        hasNext = (current != null);
-                        cursor = 0;
-                    }
-                }
-
-                public void Seek(TermEnum termEnum)
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".seekEnum");
-                    Seek(termEnum.Term());
-                }
-
-                public int Doc
-                {
-                    get
-                    {
-                        if (DEBUG) System.Diagnostics.Debug.WriteLine(".doc");
-                        return 0;
-                    }
-                }
-
-                public int Freq
-                {
-                    get
-                    {
-                        int freq = current != null ? _index.NumPositions(current) : (term == null ? 1 : 0);
-                        if (DEBUG) System.Diagnostics.Debug.WriteLine(".freq: " + freq);
-                        return freq;
-                    }
-                }
-
-                public bool Next()
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".next: " + current + ", oldHasNext=" + hasNext);
-                    bool next = hasNext;
-                    hasNext = false;
-                    return next;
-                }
-
-                public int Read(int[] docs, int[] freqs)
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".read: " + docs.Length);
-                    if (!hasNext) return 0;
-                    hasNext = false;
-                    docs[0] = 0;
-                    freqs[0] = Freq;
-                    return 1;
-                }
-
-                public bool SkipTo(int target)
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".skipTo: " + target);
-                    return Next();
-                }
-
-                public void Close()
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".close");
-                }
-
-                public void Dispose()
-                {
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".close");
-                }
-
-                public int NextPosition()
-                {
-                    int pos = current.Get(cursor);
-                    cursor += _index.stride;
-                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".nextPosition: " + pos);
-                    return pos;
-                }
-
-                public int PayloadLength
-                {
-                    get { throw new NotSupportedException(); }
-                }
-
-                public byte[] GetPayload(byte[] data, int offset)
-                {
-                    throw new NotSupportedException();
-                }
-
-                public bool IsPayloadAvailable
-                {
-                    get { return false; }
-                }
-            }
-        }
-    }
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Index.Memory
+{
+    public partial class MemoryIndex
+    {
+        private sealed partial class MemoryIndexReader
+        {
+            private sealed class MemoryTermPositions : TermPositions
+            {
+                private readonly MemoryIndex _index;
+                private readonly MemoryIndexReader _reader;
+                private bool hasNext;
+                private int cursor = 0;
+                private ArrayIntList current;
+                private Term term;
+
+                public MemoryTermPositions(MemoryIndex index, MemoryIndexReader reader)
+                {
+                    _index = index;
+                    _reader = reader;
+                }
+
+                public void Seek(Term term)
+                {
+                    this.term = term;
+
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".seek: " + term);
+
+                    if (term == null)
+                    {
+                        hasNext = true; // term==null means match all docs
+                    }
+                    else
+                    {
+                        Info info = _reader.GetInfo(term.Field);
+                        current = info == null ? null : info.GetPositions(term.Text);
+                        hasNext = (current != null);
+                        cursor = 0;
+                    }
+                }
+
+                public void Seek(TermEnum termEnum)
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".seekEnum");
+                    Seek(termEnum.Term());
+                }
+
+                public int Doc
+                {
+                    get
+                    {
+                        if (DEBUG) System.Diagnostics.Debug.WriteLine(".doc");
+                        return 0;
+                    }
+                }
+
+                public int Freq
+                {
+                    get
+                    {
+                        int freq = current != null ? _index.NumPositions(current) : (term == null ? 1 : 0);
+                        if (DEBUG) System.Diagnostics.Debug.WriteLine(".freq: " + freq);
+                        return freq;
+                    }
+                }
+
+                public bool Next()
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".next: " + current + ", oldHasNext=" + hasNext);
+                    bool next = hasNext;
+                    hasNext = false;
+                    return next;
+                }
+
+                public int Read(int[] docs, int[] freqs)
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".read: " + docs.Length);
+                    if (!hasNext) return 0;
+                    hasNext = false;
+                    docs[0] = 0;
+                    freqs[0] = Freq;
+                    return 1;
+                }
+
+                public bool SkipTo(int target)
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".skipTo: " + target);
+                    return Next();
+                }
+
+                public void Close()
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".close");
+                }
+
+                public void Dispose()
+                {
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".close");
+                }
+
+                public int NextPosition()
+                {
+                    int pos = current.Get(cursor);
+                    cursor += _index.stride;
+                    if (DEBUG) System.Diagnostics.Debug.WriteLine(".nextPosition: " + pos);
+                    return pos;
+                }
+
+                public int PayloadLength
+                {
+                    get { throw new NotSupportedException(); }
+                }
+
+                public byte[] GetPayload(byte[] data, int offset)
+                {
+                    throw new NotSupportedException();
+                }
+
+                public bool IsPayloadAvailable
+                {
+                    get { return false; }
+                }
+            }
+        }
+    }
 }
\ No newline at end of file

Propchange: incubator/lucene.net/trunk/src/contrib/Memory/MemoryTermPositions.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/src/contrib/Memory/Properties/AssemblyInfo.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Memory/Properties/AssemblyInfo.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Memory/Properties/AssemblyInfo.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Memory/Properties/AssemblyInfo.cs Thu May 31 04:33:41 2012
@@ -1,36 +1,36 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following 
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("Contrib.Memory")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("Contrib.Memory")]
-[assembly: AssemblyCopyright("Copyright ©  2012")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible 
-// to COM components.  If you need to access a type in this assembly from 
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("5d1e7f1d-ae69-4cf0-875e-64c3a5f3a53b")]
-
-// Version information for an assembly consists of the following four values:
-//
-//      Major Version
-//      Minor Version 
-//      Build Number
-//      Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers 
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following 
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("Contrib.Memory")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Contrib.Memory")]
+[assembly: AssemblyCopyright("Copyright ©  2012")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible 
+// to COM components.  If you need to access a type in this assembly from 
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("5d1e7f1d-ae69-4cf0-875e-64c3a5f3a53b")]
+
+// Version information for an assembly consists of the following four values:
+//
+//      Major Version
+//      Minor Version 
+//      Build Number
+//      Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers 
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]

Propchange: incubator/lucene.net/trunk/src/contrib/Memory/Properties/AssemblyInfo.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/src/contrib/Memory/TermComparer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Memory/TermComparer.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Memory/TermComparer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Memory/TermComparer.cs Thu May 31 04:33:41 2012
@@ -1,32 +1,32 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace Lucene.Net.Index.Memory
-{
-    class TermComparer
-    {
-        /// <summary>
-        /// Sorts term entries into ascending order; also works for
-        /// Arrays.binarySearch() and Arrays.sort()
-        /// </summary>
-        public static int KeyComparer<TKey, TValue>(KeyValuePair<TKey, TValue> x, KeyValuePair<TKey, TValue> y)
-            where TKey : class, IComparable<TKey>
-        {
-            if (x.Key == y.Key) return 0;
-            return typeof (TKey) == typeof (string)
-                       ? string.Compare(x.Key as string, y.Key as string, StringComparison.Ordinal)
-                       : x.Key.CompareTo(y.Key);
-        }
-    }
-
-    sealed class TermComparer<T> : TermComparer, IComparer<KeyValuePair<string, T>>
-    {
-        public int Compare(KeyValuePair<string, T> x, KeyValuePair<string, T> y)
-        {
-            return KeyComparer(x, y);
-        }
-    }
-}
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Index.Memory
+{
+    class TermComparer
+    {
+        /// <summary>
+        /// Sorts term entries into ascending order; also works for
+        /// Arrays.binarySearch() and Arrays.sort()
+        /// </summary>
+        public static int KeyComparer<TKey, TValue>(KeyValuePair<TKey, TValue> x, KeyValuePair<TKey, TValue> y)
+            where TKey : class, IComparable<TKey>
+        {
+            if (x.Key == y.Key) return 0;
+            return typeof (TKey) == typeof (string)
+                       ? string.Compare(x.Key as string, y.Key as string, StringComparison.Ordinal)
+                       : x.Key.CompareTo(y.Key);
+        }
+    }
+
+    sealed class TermComparer<T> : TermComparer, IComparer<KeyValuePair<string, T>>
+    {
+        public int Compare(KeyValuePair<string, T> x, KeyValuePair<string, T> y)
+        {
+            return KeyComparer(x, y);
+        }
+    }
+}

Propchange: incubator/lucene.net/trunk/src/contrib/Memory/TermComparer.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/PointPrefixTreeFieldCacheProvider.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/PrefixCellsTokenizer.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/PrefixTreeStrategy.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/RecursivePrefixTreeFilter.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/RecursivePrefixTreeStrategy.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/TermQueryPrefixTreeStrategy.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/Tree/GeohashPrefixTree.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/Tree/Node.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/Tree/QuadPrefixTree.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/Tree/SpatialPrefixTree.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Prefix/Tree/SpatialPrefixTreeFactory.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/SimpleSpatialFieldInfo.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/SpatialFieldInfo.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/SpatialStrategy.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/Bits.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/CachedDistanceValueSource.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/CachingDoubleValueSource.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/CompatibilityExtensions.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/FixedBitSet.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/FunctionQuery.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/NumericFieldInfo.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/OpenBitSetIterator.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/ShapeFieldCache.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/ShapeFieldCacheProvider.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/StringListTokenizer.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/TermsEnumCompatibility.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/TruncateFilter.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Util/ValueSourceFilter.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Vector/DistanceValueSource.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Vector/TwoDoublesFieldInfo.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lucene.net/trunk/src/contrib/Spatial/Vector/TwoDoublesStrategy.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/test/contrib/Analyzers/Filters/ChainedFilterTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/contrib/Analyzers/Filters/ChainedFilterTest.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/contrib/Analyzers/Filters/ChainedFilterTest.cs (original)
+++ incubator/lucene.net/trunk/test/contrib/Analyzers/Filters/ChainedFilterTest.cs Thu May 31 04:33:41 2012
@@ -1,218 +1,218 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Store;
-using Lucene.Net.Analysis;
-using Lucene.Net.Util;
-
-using NUnit.Framework;
-
-namespace Lucene.Net.Analysis
-{
-    public class ChainedFilterTest : Lucene.Net.TestCase
-    {
-        public static int MAX = 500;
-
-        private RAMDirectory directory;
-        private IndexSearcher searcher;
-        private Query query;
-        // private DateFilter dateFilter;   DateFilter was deprecated and removed
-        private TermRangeFilter dateFilter;
-        private QueryWrapperFilter bobFilter;
-        private QueryWrapperFilter sueFilter;
-
-        [SetUp]
-        public void SetUp()
-        {
-            directory = new RAMDirectory();
-            IndexWriter writer =
-               new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
-
-            DateTime cal = new DateTime(1041397200000L * TimeSpan.TicksPerMillisecond); // 2003 January 01
-
-            for (int i = 0; i < MAX; i++)
-            {
-                Document doc = new Document();
-                doc.Add(new Field("key", "" + (i + 1), Field.Store.YES, Field.Index.NOT_ANALYZED));
-                doc.Add(new Field("owner", (i < MAX / 2) ? "bob" : "sue", Field.Store.YES, Field.Index.NOT_ANALYZED));
-                doc.Add(new Field("date", (cal.Ticks / TimeSpan.TicksPerMillisecond).ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
-                writer.AddDocument(doc);
-
-                cal.AddMilliseconds(1);
-            }
-
-            writer.Close();
-
-            searcher = new IndexSearcher(directory, true);
-
-            // query for everything to make life easier
-            BooleanQuery bq = new BooleanQuery();
-            bq.Add(new TermQuery(new Term("owner", "bob")), BooleanClause.Occur.SHOULD);
-            bq.Add(new TermQuery(new Term("owner", "sue")), BooleanClause.Occur.SHOULD);
-            query = bq;
-
-            // date filter matches everything too
-            //Date pastTheEnd = parseDate("2099 Jan 1");
-            // dateFilter = DateFilter.Before("date", pastTheEnd);
-            // just treat dates as strings and select the whole range for now...
-            dateFilter = new TermRangeFilter("date", "", "ZZZZ", true, true);
-
-            bobFilter = new QueryWrapperFilter(
-                new TermQuery(new Term("owner", "bob")));
-            sueFilter = new QueryWrapperFilter(
-                new TermQuery(new Term("owner", "sue")));
-        }
-
-        private ChainedFilter GetChainedFilter(Filter[] chain, ChainedFilter.Logic[] logic)
-        {
-            if (logic == null)
-            {
-                return new ChainedFilter(chain);
-            }
-            else
-            {
-                return new ChainedFilter(chain, logic);
-            }
-        }
-
-        private ChainedFilter GetChainedFilter(Filter[] chain, ChainedFilter.Logic logic)
-        {
-            return new ChainedFilter(chain, logic);
-        }
-
-
-        [Test]
-        public void TestSingleFilter()
-        {
-            ChainedFilter chain = GetChainedFilter(new Filter[] { dateFilter }, null);
-
-            int numHits = searcher.Search(query, chain, 1000).TotalHits;
-            Assert.AreEqual(MAX, numHits);
-
-            chain = new ChainedFilter(new Filter[] { bobFilter });
-            numHits = searcher.Search(query, chain, 1000).TotalHits;
-            Assert.AreEqual(MAX / 2, numHits);
-
-            chain = GetChainedFilter(new Filter[] { bobFilter }, new ChainedFilter.Logic[] { ChainedFilter.Logic.AND });
-            TopDocs hits = searcher.Search(query, chain, 1000);
-            numHits = hits.TotalHits;
-            Assert.AreEqual(MAX / 2, numHits);
-            Assert.AreEqual("bob", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
-
-            chain = GetChainedFilter(new Filter[] { bobFilter }, new ChainedFilter.Logic[] { ChainedFilter.Logic.ANDNOT });
-            hits = searcher.Search(query, chain, 1000);
-            numHits = hits.TotalHits;
-            Assert.AreEqual(MAX / 2, numHits);
-            Assert.AreEqual("sue", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
-        }
-
-        [Test]
-        public void TestOR()
-        {
-            ChainedFilter chain = GetChainedFilter(
-              new Filter[] { sueFilter, bobFilter }, null);
-
-            int numHits = searcher.Search(query, chain, 1000).TotalHits;
-            Assert.AreEqual(MAX, numHits, "OR matches all");
-        }
-
-        [Test]
-        public void TestAND()
-        {
-            ChainedFilter chain = GetChainedFilter(
-              new Filter[] { dateFilter, bobFilter }, ChainedFilter.Logic.AND);
-
-            TopDocs hits = searcher.Search(query, chain, 1000);
-            Assert.AreEqual(MAX / 2, hits.TotalHits, "AND matches just bob");
-            Assert.AreEqual("bob", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
-        }
-
-        [Test]
-        public void TestXOR()
-        {
-            ChainedFilter chain = GetChainedFilter(
-              new Filter[] { dateFilter, bobFilter }, ChainedFilter.Logic.XOR);
-
-            TopDocs hits = searcher.Search(query, chain, 1000);
-            Assert.AreEqual(MAX / 2, hits.TotalHits, "XOR matches sue");
-            Assert.AreEqual("sue", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
-        }
-
-        [Test]
-        public void TestANDNOT()
-        {
-            ChainedFilter chain = GetChainedFilter(
-              new Filter[] { dateFilter, sueFilter },
-                new ChainedFilter.Logic[] { ChainedFilter.Logic.AND, ChainedFilter.Logic.ANDNOT });
-
-            TopDocs hits = searcher.Search(query, chain, 1000);
-            Assert.AreEqual(MAX / 2, hits.TotalHits, "ANDNOT matches just bob");
-            Assert.AreEqual("bob", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
-
-            chain = GetChainedFilter(
-                new Filter[] { bobFilter, bobFilter },
-                  new ChainedFilter.Logic[] { ChainedFilter.Logic.ANDNOT, ChainedFilter.Logic.ANDNOT });
-
-            hits = searcher.Search(query, chain, 1000);
-            Assert.AreEqual(MAX / 2, hits.TotalHits, "ANDNOT bob ANDNOT bob matches all sues");
-            Assert.AreEqual("sue", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
-        }
-
-        /*
-        private Date parseDate(String s) throws ParseException {
-          return new SimpleDateFormat("yyyy MMM dd", Locale.US).parse(s);
-        }
-        */
-
-        [Test]
-        public void TestWithCachingFilter()
-        {
-            Directory dir = new RAMDirectory();
-            Analyzer analyzer = new WhitespaceAnalyzer();
-
-            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.Close();
-
-            Searcher searcher = new IndexSearcher(dir, true);
-
-            Query query = new TermQuery(new Term("none", "none"));
-
-            QueryWrapperFilter queryFilter = new QueryWrapperFilter(query);
-            CachingWrapperFilter cachingFilter = new CachingWrapperFilter(queryFilter);
-
-            searcher.Search(query, cachingFilter, 1);
-
-            CachingWrapperFilter cachingFilter2 = new CachingWrapperFilter(queryFilter);
-            Filter[] chain = new Filter[2];
-            chain[0] = cachingFilter;
-            chain[1] = cachingFilter2;
-            ChainedFilter cf = new ChainedFilter(chain);
-
-            // throws java.lang.ClassCastException: org.apache.lucene.util.OpenBitSet cannot be cast to java.util.BitSet
-            searcher.Search(new MatchAllDocsQuery(), cf, 1);
-        }
-
-    }
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Analysis;
+using Lucene.Net.Util;
+
+using NUnit.Framework;
+
+namespace Lucene.Net.Analysis
+{
+    public class ChainedFilterTest : Lucene.Net.TestCase
+    {
+        public static int MAX = 500;
+
+        private RAMDirectory directory;
+        private IndexSearcher searcher;
+        private Query query;
+        // private DateFilter dateFilter;   DateFilter was deprecated and removed
+        private TermRangeFilter dateFilter;
+        private QueryWrapperFilter bobFilter;
+        private QueryWrapperFilter sueFilter;
+
+        [SetUp]
+        public void SetUp()
+        {
+            directory = new RAMDirectory();
+            IndexWriter writer =
+               new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+
+            DateTime cal = new DateTime(1041397200000L * TimeSpan.TicksPerMillisecond); // 2003 January 01
+
+            for (int i = 0; i < MAX; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("key", "" + (i + 1), Field.Store.YES, Field.Index.NOT_ANALYZED));
+                doc.Add(new Field("owner", (i < MAX / 2) ? "bob" : "sue", Field.Store.YES, Field.Index.NOT_ANALYZED));
+                doc.Add(new Field("date", (cal.Ticks / TimeSpan.TicksPerMillisecond).ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
+                writer.AddDocument(doc);
+
+                cal.AddMilliseconds(1);
+            }
+
+            writer.Close();
+
+            searcher = new IndexSearcher(directory, true);
+
+            // query for everything to make life easier
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term("owner", "bob")), BooleanClause.Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("owner", "sue")), BooleanClause.Occur.SHOULD);
+            query = bq;
+
+            // date filter matches everything too
+            //Date pastTheEnd = parseDate("2099 Jan 1");
+            // dateFilter = DateFilter.Before("date", pastTheEnd);
+            // just treat dates as strings and select the whole range for now...
+            dateFilter = new TermRangeFilter("date", "", "ZZZZ", true, true);
+
+            bobFilter = new QueryWrapperFilter(
+                new TermQuery(new Term("owner", "bob")));
+            sueFilter = new QueryWrapperFilter(
+                new TermQuery(new Term("owner", "sue")));
+        }
+
+        private ChainedFilter GetChainedFilter(Filter[] chain, ChainedFilter.Logic[] logic)
+        {
+            if (logic == null)
+            {
+                return new ChainedFilter(chain);
+            }
+            else
+            {
+                return new ChainedFilter(chain, logic);
+            }
+        }
+
+        private ChainedFilter GetChainedFilter(Filter[] chain, ChainedFilter.Logic logic)
+        {
+            return new ChainedFilter(chain, logic);
+        }
+
+
+        [Test]
+        public void TestSingleFilter()
+        {
+            ChainedFilter chain = GetChainedFilter(new Filter[] { dateFilter }, null);
+
+            int numHits = searcher.Search(query, chain, 1000).TotalHits;
+            Assert.AreEqual(MAX, numHits);
+
+            chain = new ChainedFilter(new Filter[] { bobFilter });
+            numHits = searcher.Search(query, chain, 1000).TotalHits;
+            Assert.AreEqual(MAX / 2, numHits);
+
+            chain = GetChainedFilter(new Filter[] { bobFilter }, new ChainedFilter.Logic[] { ChainedFilter.Logic.AND });
+            TopDocs hits = searcher.Search(query, chain, 1000);
+            numHits = hits.TotalHits;
+            Assert.AreEqual(MAX / 2, numHits);
+            Assert.AreEqual("bob", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
+
+            chain = GetChainedFilter(new Filter[] { bobFilter }, new ChainedFilter.Logic[] { ChainedFilter.Logic.ANDNOT });
+            hits = searcher.Search(query, chain, 1000);
+            numHits = hits.TotalHits;
+            Assert.AreEqual(MAX / 2, numHits);
+            Assert.AreEqual("sue", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
+        }
+
+        [Test]
+        public void TestOR()
+        {
+            ChainedFilter chain = GetChainedFilter(
+              new Filter[] { sueFilter, bobFilter }, null);
+
+            int numHits = searcher.Search(query, chain, 1000).TotalHits;
+            Assert.AreEqual(MAX, numHits, "OR matches all");
+        }
+
+        [Test]
+        public void TestAND()
+        {
+            ChainedFilter chain = GetChainedFilter(
+              new Filter[] { dateFilter, bobFilter }, ChainedFilter.Logic.AND);
+
+            TopDocs hits = searcher.Search(query, chain, 1000);
+            Assert.AreEqual(MAX / 2, hits.TotalHits, "AND matches just bob");
+            Assert.AreEqual("bob", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
+        }
+
+        [Test]
+        public void TestXOR()
+        {
+            ChainedFilter chain = GetChainedFilter(
+              new Filter[] { dateFilter, bobFilter }, ChainedFilter.Logic.XOR);
+
+            TopDocs hits = searcher.Search(query, chain, 1000);
+            Assert.AreEqual(MAX / 2, hits.TotalHits, "XOR matches sue");
+            Assert.AreEqual("sue", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
+        }
+
+        [Test]
+        public void TestANDNOT()
+        {
+            ChainedFilter chain = GetChainedFilter(
+              new Filter[] { dateFilter, sueFilter },
+                new ChainedFilter.Logic[] { ChainedFilter.Logic.AND, ChainedFilter.Logic.ANDNOT });
+
+            TopDocs hits = searcher.Search(query, chain, 1000);
+            Assert.AreEqual(MAX / 2, hits.TotalHits, "ANDNOT matches just bob");
+            Assert.AreEqual("bob", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
+
+            chain = GetChainedFilter(
+                new Filter[] { bobFilter, bobFilter },
+                  new ChainedFilter.Logic[] { ChainedFilter.Logic.ANDNOT, ChainedFilter.Logic.ANDNOT });
+
+            hits = searcher.Search(query, chain, 1000);
+            Assert.AreEqual(MAX / 2, hits.TotalHits, "ANDNOT bob ANDNOT bob matches all sues");
+            Assert.AreEqual("sue", searcher.Doc(hits.ScoreDocs[0].doc).Get("owner"));
+        }
+
+        /*
+        private Date parseDate(String s) throws ParseException {
+          return new SimpleDateFormat("yyyy MMM dd", Locale.US).parse(s);
+        }
+        */
+
+        [Test]
+        public void TestWithCachingFilter()
+        {
+            Directory dir = new RAMDirectory();
+            Analyzer analyzer = new WhitespaceAnalyzer();
+
+            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+            writer.Close();
+
+            Searcher searcher = new IndexSearcher(dir, true);
+
+            Query query = new TermQuery(new Term("none", "none"));
+
+            QueryWrapperFilter queryFilter = new QueryWrapperFilter(query);
+            CachingWrapperFilter cachingFilter = new CachingWrapperFilter(queryFilter);
+
+            searcher.Search(query, cachingFilter, 1);
+
+            CachingWrapperFilter cachingFilter2 = new CachingWrapperFilter(queryFilter);
+            Filter[] chain = new Filter[2];
+            chain[0] = cachingFilter;
+            chain[1] = cachingFilter2;
+            ChainedFilter cf = new ChainedFilter(chain);
+
+            // throws java.lang.ClassCastException: org.apache.lucene.util.OpenBitSet cannot be cast to java.util.BitSet
+            searcher.Search(new MatchAllDocsQuery(), cf, 1);
+        }
+
+    }
 }
\ No newline at end of file

Propchange: incubator/lucene.net/trunk/test/contrib/Analyzers/Filters/ChainedFilterTest.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/test/contrib/Highlighter/Tokenizer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/contrib/Highlighter/Tokenizer.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/contrib/Highlighter/Tokenizer.cs (original)
+++ incubator/lucene.net/trunk/test/contrib/Highlighter/Tokenizer.cs Thu May 31 04:33:41 2012
@@ -1,213 +1,213 @@
-using System;
-using System.Collections;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-
-namespace Lucene.Net.Search.Highlight.Test
-{
-    /// <summary>
-    /// The class performs token processing in strings
-    /// </summary>
-    public class Tokenizer : IEnumerator<string>
-    {
-        /// Position over the string
-        private long currentPos = 0;
-
-        /// Include demiliters in the results.
-        private bool includeDelims = false;
-
-        /// Char representation of the String to tokenize.
-        private char[] chars = null;
-
-        //The tokenizer uses the default delimiter set: the space character, the tab character, the newline character, and the carriage-return character and the form-feed character
-        private string delimiters = " \t\n\r\f";
-
-        /// <summary>
-        /// Initializes a new class instance with a specified string to process
-        /// </summary>
-        /// <param name="source">String to tokenize</param>
-        public Tokenizer(System.String source)
-        {
-            this.chars = source.ToCharArray();
-        }
-
-        /// <summary>
-        /// Initializes a new class instance with a specified string to process
-        /// and the specified token delimiters to use
-        /// </summary>
-        /// <param name="source">String to tokenize</param>
-        /// <param name="delimiters">String containing the delimiters</param>
-        public Tokenizer(System.String source, System.String delimiters)
-            : this(source)
-        {
-            this.delimiters = delimiters;
-        }
-
-
-        /// <summary>
-        /// Initializes a new class instance with a specified string to process, the specified token 
-        /// delimiters to use, and whether the delimiters must be included in the results.
-        /// </summary>
-        /// <param name="source">String to tokenize</param>
-        /// <param name="delimiters">String containing the delimiters</param>
-        /// <param name="includeDelims">Determines if delimiters are included in the results.</param>
-        public Tokenizer(System.String source, System.String delimiters, bool includeDelims)
-            : this(source, delimiters)
-        {
-            this.includeDelims = includeDelims;
-        }
-
-
-        /// <summary>
-        /// Returns the next token from the token list
-        /// </summary>
-        /// <returns>The string value of the token</returns>
-        public System.String NextToken()
-        {
-            return NextToken(this.delimiters);
-        }
-
-        /// <summary>
-        /// Returns the next token from the source string, using the provided
-        /// token delimiters
-        /// </summary>
-        /// <param name="delimiters">String containing the delimiters to use</param>
-        /// <returns>The string value of the token</returns>
-        public System.String NextToken(System.String delimiters)
-        {
-            //According to documentation, the usage of the received delimiters should be temporary (only for this call).
-            //However, it seems it is not true, so the following line is necessary.
-            this.delimiters = delimiters;
-
-            //at the end 
-            if (this.currentPos == this.chars.Length)
-                throw new System.ArgumentOutOfRangeException();
-                //if over a delimiter and delimiters must be returned
-            else if ((System.Array.IndexOf(delimiters.ToCharArray(), chars[this.currentPos]) != -1)
-                     && this.includeDelims)
-                return "" + this.chars[this.currentPos++];
-                //need to get the token wo delimiters.
-            else
-                return NextToken(delimiters.ToCharArray());
-        }
-
-        //Returns the nextToken wo delimiters
-        private System.String NextToken(char[] delimiters)
-        {
-            string token = "";
-            long pos = this.currentPos;
-
-            //skip possible delimiters
-            while (System.Array.IndexOf(delimiters, this.chars[currentPos]) != -1)
-                //The last one is a delimiter (i.e there is no more tokens)
-                if (++this.currentPos == this.chars.Length)
-                {
-                    this.currentPos = pos;
-                    throw new System.ArgumentOutOfRangeException();
-                }
-
-            //getting the token
-            while (System.Array.IndexOf(delimiters, this.chars[this.currentPos]) == -1)
-            {
-                token += this.chars[this.currentPos];
-                //the last one is not a delimiter
-                if (++this.currentPos == this.chars.Length)
-                    break;
-            }
-            return token;
-        }
-
-
-        /// <summary>
-        /// Determines if there are more tokens to return from the source string
-        /// </summary>
-        /// <returns>True or false, depending if there are more tokens</returns>
-        public bool HasMoreTokens()
-        {
-            //keeping the current pos
-            long pos = this.currentPos;
-
-            try
-            {
-                this.NextToken();
-            }
-            catch (System.ArgumentOutOfRangeException)
-            {
-                return false;
-            }
-            finally
-            {
-                this.currentPos = pos;
-            }
-            return true;
-        }
-
-        /// <summary>
-        /// Remaining tokens count
-        /// </summary>
-        public int Count
-        {
-            get
-            {
-                //keeping the current pos
-                long pos = this.currentPos;
-                int i = 0;
-
-                try
-                {
-                    while (true)
-                    {
-                        this.NextToken();
-                        i++;
-                    }
-                }
-                catch (System.ArgumentOutOfRangeException)
-                {
-                    this.currentPos = pos;
-                    return i;
-                }
-            }
-        }
-
-        /// <summary>
-        ///  Performs the same action as NextToken.
-        /// </summary>
-        public string Current
-        {
-            get { return this.NextToken(); }
-        }
-
-        /// <summary>
-        ///  Performs the same action as NextToken.
-        /// </summary>
-        object IEnumerator.Current
-        {
-            get { return Current; }
-        }
-
-        /// <summary>
-        //  Performs the same action as HasMoreTokens.
-        /// </summary>
-        /// <returns>True or false, depending if there are more tokens</returns>
-        public bool MoveNext()
-        {
-            return this.HasMoreTokens();
-        }
-
-        /// <summary>
-        /// Does nothing.
-        /// </summary>
-        public void Reset()
-        {
-        }
-
-        /// <summary>
-        /// Does nothing.
-        /// </summary>
-        public void Dispose()
-        {
-            
-        }
-    }
-}
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Search.Highlight.Test
+{
+    /// <summary>
+    /// The class performs token processing in strings
+    /// </summary>
+    public class Tokenizer : IEnumerator<string>
+    {
+        /// Position over the string
+        private long currentPos = 0;
+
+        /// Include demiliters in the results.
+        private bool includeDelims = false;
+
+        /// Char representation of the String to tokenize.
+        private char[] chars = null;
+
+        //The tokenizer uses the default delimiter set: the space character, the tab character, the newline character, and the carriage-return character and the form-feed character
+        private string delimiters = " \t\n\r\f";
+
+        /// <summary>
+        /// Initializes a new class instance with a specified string to process
+        /// </summary>
+        /// <param name="source">String to tokenize</param>
+        public Tokenizer(System.String source)
+        {
+            this.chars = source.ToCharArray();
+        }
+
+        /// <summary>
+        /// Initializes a new class instance with a specified string to process
+        /// and the specified token delimiters to use
+        /// </summary>
+        /// <param name="source">String to tokenize</param>
+        /// <param name="delimiters">String containing the delimiters</param>
+        public Tokenizer(System.String source, System.String delimiters)
+            : this(source)
+        {
+            this.delimiters = delimiters;
+        }
+
+
+        /// <summary>
+        /// Initializes a new class instance with a specified string to process, the specified token 
+        /// delimiters to use, and whether the delimiters must be included in the results.
+        /// </summary>
+        /// <param name="source">String to tokenize</param>
+        /// <param name="delimiters">String containing the delimiters</param>
+        /// <param name="includeDelims">Determines if delimiters are included in the results.</param>
+        public Tokenizer(System.String source, System.String delimiters, bool includeDelims)
+            : this(source, delimiters)
+        {
+            this.includeDelims = includeDelims;
+        }
+
+
+        /// <summary>
+        /// Returns the next token from the token list
+        /// </summary>
+        /// <returns>The string value of the token</returns>
+        public System.String NextToken()
+        {
+            return NextToken(this.delimiters);
+        }
+
+        /// <summary>
+        /// Returns the next token from the source string, using the provided
+        /// token delimiters
+        /// </summary>
+        /// <param name="delimiters">String containing the delimiters to use</param>
+        /// <returns>The string value of the token</returns>
+        public System.String NextToken(System.String delimiters)
+        {
+            //According to documentation, the usage of the received delimiters should be temporary (only for this call).
+            //However, it seems it is not true, so the following line is necessary.
+            this.delimiters = delimiters;
+
+            //at the end 
+            if (this.currentPos == this.chars.Length)
+                throw new System.ArgumentOutOfRangeException();
+                //if over a delimiter and delimiters must be returned
+            else if ((System.Array.IndexOf(delimiters.ToCharArray(), chars[this.currentPos]) != -1)
+                     && this.includeDelims)
+                return "" + this.chars[this.currentPos++];
+                //need to get the token wo delimiters.
+            else
+                return NextToken(delimiters.ToCharArray());
+        }
+
+        //Returns the nextToken wo delimiters
+        private System.String NextToken(char[] delimiters)
+        {
+            string token = "";
+            long pos = this.currentPos;
+
+            //skip possible delimiters
+            while (System.Array.IndexOf(delimiters, this.chars[currentPos]) != -1)
+                //The last one is a delimiter (i.e there is no more tokens)
+                if (++this.currentPos == this.chars.Length)
+                {
+                    this.currentPos = pos;
+                    throw new System.ArgumentOutOfRangeException();
+                }
+
+            //getting the token
+            while (System.Array.IndexOf(delimiters, this.chars[this.currentPos]) == -1)
+            {
+                token += this.chars[this.currentPos];
+                //the last one is not a delimiter
+                if (++this.currentPos == this.chars.Length)
+                    break;
+            }
+            return token;
+        }
+
+
+        /// <summary>
+        /// Determines if there are more tokens to return from the source string
+        /// </summary>
+        /// <returns>True or false, depending if there are more tokens</returns>
+        public bool HasMoreTokens()
+        {
+            //keeping the current pos
+            long pos = this.currentPos;
+
+            try
+            {
+                this.NextToken();
+            }
+            catch (System.ArgumentOutOfRangeException)
+            {
+                return false;
+            }
+            finally
+            {
+                this.currentPos = pos;
+            }
+            return true;
+        }
+
+        /// <summary>
+        /// Remaining tokens count
+        /// </summary>
+        public int Count
+        {
+            get
+            {
+                //keeping the current pos
+                long pos = this.currentPos;
+                int i = 0;
+
+                try
+                {
+                    while (true)
+                    {
+                        this.NextToken();
+                        i++;
+                    }
+                }
+                catch (System.ArgumentOutOfRangeException)
+                {
+                    this.currentPos = pos;
+                    return i;
+                }
+            }
+        }
+
+        /// <summary>
+        ///  Performs the same action as NextToken.
+        /// </summary>
+        public string Current
+        {
+            get { return this.NextToken(); }
+        }
+
+        /// <summary>
+        ///  Performs the same action as NextToken.
+        /// </summary>
+        object IEnumerator.Current
+        {
+            get { return Current; }
+        }
+
+        /// <summary>
+        //  Performs the same action as HasMoreTokens.
+        /// </summary>
+        /// <returns>True or false, depending if there are more tokens</returns>
+        public bool MoveNext()
+        {
+            return this.HasMoreTokens();
+        }
+
+        /// <summary>
+        /// Does nothing.
+        /// </summary>
+        public void Reset()
+        {
+        }
+
+        /// <summary>
+        /// Does nothing.
+        /// </summary>
+        public void Dispose()
+        {
+            
+        }
+    }
+}

Propchange: incubator/lucene.net/trunk/test/contrib/Highlighter/Tokenizer.cs
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/lucene.net/trunk/test/contrib/Memory/MemoryIndexTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/contrib/Memory/MemoryIndexTest.cs?rev=1344562&r1=1344561&r2=1344562&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/contrib/Memory/MemoryIndexTest.cs (original)
+++ incubator/lucene.net/trunk/test/contrib/Memory/MemoryIndexTest.cs Thu May 31 04:33:41 2012
@@ -1,227 +1,227 @@
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Text;
-using Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Standard;
-using Lucene.Net.Documents;
-using Lucene.Net.QueryParsers;
-using Lucene.Net.Search;
-using Lucene.Net.Store;
-using Lucene.Net.Test.Analysis;
-using NUnit.Framework;
-using Version = Lucene.Net.Util.Version;
-
-namespace Lucene.Net.Index.Memory.Test
-{
-    /**
-     * Verifies that Lucene MemoryIndex and RAMDirectory have the same behaviour,
-     * returning the same results for queries on some randomish indexes.
-     */
-
-    public class MemoryIndexTest : BaseTokenStreamTestCase
-    {
-        private readonly HashSet<String> _queries = new HashSet<String>();
-        private Random random;
-
-        public static int ITERATIONS = 100;
-
-        [SetUp]
-        public override void SetUp()
-        {
-            base.SetUp();
-            _queries.UnionWith(ReadQueries("testqueries.txt"));
-            _queries.UnionWith(ReadQueries("testqueries2.txt"));
-            random = NewRandom();
-        }
-
-        /**
-         * read a set of queries from a resource file
-         */
-
-        private IEnumerable<string> ReadQueries(String resource)
-        {
-            var queries = new HashSet<String>();
-            using (var fs = File.Open(resource, FileMode.Open, FileAccess.Read))
-            using (var reader = new StreamReader(fs, Encoding.UTF8))
-            {
-                string line;
-                while ((line = reader.ReadLine()) != null)
-                {
-                    line = line.Trim();
-                    if (line.Length > 0 && !line.StartsWith("#") && !line.StartsWith("//"))
-                    {
-                        queries.Add(line);
-                    }
-                }
-                return queries;
-            }
-        }
-
-        /**
-         * runs random tests, up to ITERATIONS times.
-         */
-        [Test]
-        public void TestRandomQueries()
-        {
-            for (int i = 0; i < ITERATIONS; i++)
-                AssertAgainstRAMDirectory();
-        }
-
-        /**
-         * Build a randomish document for both RAMDirectory and MemoryIndex,
-         * and run all the queries against it.
-         */
-
-        public void AssertAgainstRAMDirectory()
-        {
-            var fooField = new StringBuilder();
-            var termField = new StringBuilder();
-
-            // add up to 250 terms to field "foo"
-            for (int i = 0; i < random.Next(250); i++)
-            {
-                fooField.Append(" ");
-                fooField.Append(RandomTerm());
-            }
-
-            // add up to 250 terms to field "term"
-            for (int i = 0; i < random.Next(250); i++)
-            {
-                termField.Append(" ");
-                termField.Append(RandomTerm());
-            }
-
-            var ramdir = new RAMDirectory();
-            var analyzer = RandomAnalyzer();
-            var writer = new IndexWriter(ramdir, analyzer,
-                                                 IndexWriter.MaxFieldLength.UNLIMITED);
-            var doc = new Document();
-            var field1 = new Field("foo", fooField.ToString(), Field.Store.NO, Field.Index.ANALYZED);
-            var field2 = new Field("term", termField.ToString(), Field.Store.NO, Field.Index.ANALYZED);
-            doc.Add(field1);
-            doc.Add(field2);
-            writer.AddDocument(doc);
-            writer.Close();
-
-            var memory = new MemoryIndex();
-            memory.AddField("foo", fooField.ToString(), analyzer);
-            memory.AddField("term", termField.ToString(), analyzer);
-            AssertAllQueries(memory, ramdir, analyzer);
-        }
-
-        /**
-         * Run all queries against both the RAMDirectory and MemoryIndex, ensuring they are the same.
-         */
-
-        public void AssertAllQueries(MemoryIndex memory, RAMDirectory ramdir, Analyzer analyzer)
-        {
-            var ram = new IndexSearcher(ramdir);
-            var mem = memory.CreateSearcher();
-            var qp = new QueryParser(Version.LUCENE_CURRENT, "foo", analyzer);
-
-            foreach (String query in _queries)
-            {
-                var ramDocs = ram.Search(qp.Parse(query), 1);
-                var memDocs = mem.Search(qp.Parse(query), 1);
-                Assert.AreEqual(ramDocs.TotalHits, memDocs.TotalHits);
-            }
-        }
-
-        /**
-         * Return a random analyzer (Simple, Stop, Standard) to analyze the terms.
-         */
-
-        private Analyzer RandomAnalyzer()
-        {
-            switch (random.Next(3))
-            {
-                case 0:
-                    return new SimpleAnalyzer();
-                case 1:
-                    return new StopAnalyzer(Version.LUCENE_CURRENT);
-                default:
-                    return new StandardAnalyzer(Version.LUCENE_CURRENT);
-            }
-        }
-
-        /**
-         * Some terms to be indexed, in addition to random words. 
-         * These terms are commonly used in the queries. 
-         */
-
-        private static readonly string[] TEST_TERMS = {
-                                                          "term", "Term", "tErm", "TERM",
-                                                          "telm", "stop", "drop", "roll", "phrase", "a", "c", "bar",
-                                                          "blar",
-                                                          "gack", "weltbank", "worlbank", "hello", "on", "the", "apache"
-                                                          , "Apache",
-                                                          "copyright", "Copyright"
-                                                      };
-
-
-        /**
-         * half of the time, returns a random term from TEST_TERMS.
-         * the other half of the time, returns a random unicode string.
-         */
-
-        private String RandomTerm()
-        {
-            if (random.Next(2) == 1)
-            {
-                // return a random TEST_TERM
-                return TEST_TERMS[random.Next(TEST_TERMS.Length)];
-            }
-            else
-            {
-                // return a random unicode term
-                return RandomString();
-            }
-        }
-
-        /**
-         * Return a random unicode term, like TestStressIndexing.
-         */
-
-        private String RandomString()
-        {
-            int end = random.Next(20);
-            if (buffer.Length < 1 + end)
-            {
-                char[] newBuffer = new char[(int) ((1 + end)*1.25)];
-                Array.Copy(buffer, 0, newBuffer, 0, buffer.Length);
-                buffer = newBuffer;
-            }
-            for (int i = 0; i < end - 1; i++)
-            {
-                int t = random.Next(6);
-                if (0 == t && i < end - 1)
-                {
-                    // Make a surrogate pair
-                    // High surrogate
-                    buffer[i++] = (char) NextInt(0xd800, 0xdc00);
-                    // Low surrogate
-                    buffer[i] = (char) NextInt(0xdc00, 0xe000);
-                }
-                else if (t <= 1) buffer[i] = (char) random.Next(0x80);
-                else if (2 == t) buffer[i] = (char) NextInt(0x80, 0x800);
-                else if (3 == t) buffer[i] = (char) NextInt(0x800, 0xd7ff);
-                else if (4 == t) buffer[i] = (char) NextInt(0xe000, 0xffff);
-                else if (5 == t)
-                {
-                    // Illegal unpaired surrogate
-                    if (random.Next(1) == 1) buffer[i] = (char) NextInt(0xd800, 0xdc00);
-                    else buffer[i] = (char) NextInt(0xdc00, 0xe000);
-                }
-            }
-            return new String(buffer, 0, end);
-        }
-
-        private char[] buffer = new char[20];
-        // start is inclusive and end is exclusive
-        private int NextInt(int start, int end)
-        {
-            return start + random.Next(end - start);
-        }
-    }
-}
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Standard;
+using Lucene.Net.Documents;
+using Lucene.Net.QueryParsers;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Test.Analysis;
+using NUnit.Framework;
+using Version = Lucene.Net.Util.Version;
+
+namespace Lucene.Net.Index.Memory.Test
+{
+    /**
+     * Verifies that Lucene MemoryIndex and RAMDirectory have the same behaviour,
+     * returning the same results for queries on some randomish indexes.
+     */
+
+    public class MemoryIndexTest : BaseTokenStreamTestCase
+    {
+        private readonly HashSet<String> _queries = new HashSet<String>();
+        private Random random;
+
+        public static int ITERATIONS = 100;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            _queries.UnionWith(ReadQueries("testqueries.txt"));
+            _queries.UnionWith(ReadQueries("testqueries2.txt"));
+            random = NewRandom();
+        }
+
+        /**
+         * read a set of queries from a resource file
+         */
+
+        private IEnumerable<string> ReadQueries(String resource)
+        {
+            var queries = new HashSet<String>();
+            using (var fs = File.Open(resource, FileMode.Open, FileAccess.Read))
+            using (var reader = new StreamReader(fs, Encoding.UTF8))
+            {
+                string line;
+                while ((line = reader.ReadLine()) != null)
+                {
+                    line = line.Trim();
+                    if (line.Length > 0 && !line.StartsWith("#") && !line.StartsWith("//"))
+                    {
+                        queries.Add(line);
+                    }
+                }
+                return queries;
+            }
+        }
+
+        /**
+         * runs random tests, up to ITERATIONS times.
+         */
+        [Test]
+        public void TestRandomQueries()
+        {
+            for (int i = 0; i < ITERATIONS; i++)
+                AssertAgainstRAMDirectory();
+        }
+
+        /**
+         * Build a randomish document for both RAMDirectory and MemoryIndex,
+         * and run all the queries against it.
+         */
+
+        public void AssertAgainstRAMDirectory()
+        {
+            var fooField = new StringBuilder();
+            var termField = new StringBuilder();
+
+            // add up to 250 terms to field "foo"
+            for (int i = 0; i < random.Next(250); i++)
+            {
+                fooField.Append(" ");
+                fooField.Append(RandomTerm());
+            }
+
+            // add up to 250 terms to field "term"
+            for (int i = 0; i < random.Next(250); i++)
+            {
+                termField.Append(" ");
+                termField.Append(RandomTerm());
+            }
+
+            var ramdir = new RAMDirectory();
+            var analyzer = RandomAnalyzer();
+            var writer = new IndexWriter(ramdir, analyzer,
+                                                 IndexWriter.MaxFieldLength.UNLIMITED);
+            var doc = new Document();
+            var field1 = new Field("foo", fooField.ToString(), Field.Store.NO, Field.Index.ANALYZED);
+            var field2 = new Field("term", termField.ToString(), Field.Store.NO, Field.Index.ANALYZED);
+            doc.Add(field1);
+            doc.Add(field2);
+            writer.AddDocument(doc);
+            writer.Close();
+
+            var memory = new MemoryIndex();
+            memory.AddField("foo", fooField.ToString(), analyzer);
+            memory.AddField("term", termField.ToString(), analyzer);
+            AssertAllQueries(memory, ramdir, analyzer);
+        }
+
+        /**
+         * Run all queries against both the RAMDirectory and MemoryIndex, ensuring they are the same.
+         */
+
+        public void AssertAllQueries(MemoryIndex memory, RAMDirectory ramdir, Analyzer analyzer)
+        {
+            var ram = new IndexSearcher(ramdir);
+            var mem = memory.CreateSearcher();
+            var qp = new QueryParser(Version.LUCENE_CURRENT, "foo", analyzer);
+
+            foreach (String query in _queries)
+            {
+                var ramDocs = ram.Search(qp.Parse(query), 1);
+                var memDocs = mem.Search(qp.Parse(query), 1);
+                Assert.AreEqual(ramDocs.TotalHits, memDocs.TotalHits);
+            }
+        }
+
+        /**
+         * Return a random analyzer (Simple, Stop, Standard) to analyze the terms.
+         */
+
+        private Analyzer RandomAnalyzer()
+        {
+            switch (random.Next(3))
+            {
+                case 0:
+                    return new SimpleAnalyzer();
+                case 1:
+                    return new StopAnalyzer(Version.LUCENE_CURRENT);
+                default:
+                    return new StandardAnalyzer(Version.LUCENE_CURRENT);
+            }
+        }
+
+        /**
+         * Some terms to be indexed, in addition to random words. 
+         * These terms are commonly used in the queries. 
+         */
+
+        private static readonly string[] TEST_TERMS = {
+                                                          "term", "Term", "tErm", "TERM",
+                                                          "telm", "stop", "drop", "roll", "phrase", "a", "c", "bar",
+                                                          "blar",
+                                                          "gack", "weltbank", "worlbank", "hello", "on", "the", "apache"
+                                                          , "Apache",
+                                                          "copyright", "Copyright"
+                                                      };
+
+
+        /**
+         * half of the time, returns a random term from TEST_TERMS.
+         * the other half of the time, returns a random unicode string.
+         */
+
+        private String RandomTerm()
+        {
+            if (random.Next(2) == 1)
+            {
+                // return a random TEST_TERM
+                return TEST_TERMS[random.Next(TEST_TERMS.Length)];
+            }
+            else
+            {
+                // return a random unicode term
+                return RandomString();
+            }
+        }
+
+        /**
+         * Return a random unicode term, like TestStressIndexing.
+         */
+
+        private String RandomString()
+        {
+            int end = random.Next(20);
+            if (buffer.Length < 1 + end)
+            {
+                char[] newBuffer = new char[(int) ((1 + end)*1.25)];
+                Array.Copy(buffer, 0, newBuffer, 0, buffer.Length);
+                buffer = newBuffer;
+            }
+            for (int i = 0; i < end - 1; i++)
+            {
+                int t = random.Next(6);
+                if (0 == t && i < end - 1)
+                {
+                    // Make a surrogate pair
+                    // High surrogate
+                    buffer[i++] = (char) NextInt(0xd800, 0xdc00);
+                    // Low surrogate
+                    buffer[i] = (char) NextInt(0xdc00, 0xe000);
+                }
+                else if (t <= 1) buffer[i] = (char) random.Next(0x80);
+                else if (2 == t) buffer[i] = (char) NextInt(0x80, 0x800);
+                else if (3 == t) buffer[i] = (char) NextInt(0x800, 0xd7ff);
+                else if (4 == t) buffer[i] = (char) NextInt(0xe000, 0xffff);
+                else if (5 == t)
+                {
+                    // Illegal unpaired surrogate
+                    if (random.Next(1) == 1) buffer[i] = (char) NextInt(0xd800, 0xdc00);
+                    else buffer[i] = (char) NextInt(0xdc00, 0xe000);
+                }
+            }
+            return new String(buffer, 0, end);
+        }
+
+        private char[] buffer = new char[20];
+        // start is inclusive and end is exclusive
+        private int NextInt(int start, int end)
+        {
+            return start + random.Next(end - start);
+        }
+    }
+}

Propchange: incubator/lucene.net/trunk/test/contrib/Memory/MemoryIndexTest.cs
------------------------------------------------------------------------------
    svn:eol-style = native



Mime
View raw message