lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dougs...@apache.org
Subject svn commit: r798995 [15/35] - in /incubator/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/QueryParser/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net...
Date Wed, 29 Jul 2009 18:04:24 GMT
Added: incubator/lucene.net/trunk/C#/src/Lucene.Net/Index/TermsHashPerField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/Index/TermsHashPerField.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/Index/TermsHashPerField.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/Index/TermsHashPerField.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,619 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using Fieldable = Lucene.Net.Documents.Fieldable;
+using Token = Lucene.Net.Analysis.Token;
+using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
+
+namespace Lucene.Net.Index
+{
+    internal sealed class TermsHashPerField : InvertedDocConsumerPerField
+    {
+
+        internal readonly TermsHashConsumerPerField consumer;
+        internal readonly TermsHashPerField nextPerField;
+        internal readonly TermsHashPerThread perThread;
+        internal readonly DocumentsWriter.DocState docState;
+        internal readonly DocInverter.FieldInvertState fieldState;
+
+        // Copied from our perThread
+        internal readonly CharBlockPool charPool;
+        internal readonly IntBlockPool intPool;
+        internal readonly ByteBlockPool bytePool;
+
+        internal readonly int streamCount;
+        internal readonly int numPostingInt;
+
+        internal readonly FieldInfo fieldInfo;
+
+        internal bool postingsCompacted;
+        internal int numPostings;
+        private const int POSTINGS_HASH_SIZE_DEFAULT = 4;
+        private int postingsHashSize = POSTINGS_HASH_SIZE_DEFAULT;
+        private int postingsHashHalfSize = POSTINGS_HASH_SIZE_DEFAULT / 2;
+        private int postingsHashMask = POSTINGS_HASH_SIZE_DEFAULT - 1;
+        private RawPostingList[] postingsHash = new RawPostingList[POSTINGS_HASH_SIZE_DEFAULT];
+        private RawPostingList p;
+
+        public TermsHashPerField(DocInverterPerField docInverterPerField, TermsHashPerThread perThread, TermsHashPerThread nextPerThread, FieldInfo fieldInfo)
+        {
+            this.perThread = perThread;
+            intPool = perThread.intPool;
+            charPool = perThread.charPool;
+            bytePool = perThread.bytePool;
+            docState = perThread.docState;
+            fieldState = docInverterPerField.fieldState;
+            this.consumer = perThread.consumer.addField(this, fieldInfo);
+            streamCount = consumer.getStreamCount();
+            numPostingInt = 2 * streamCount;
+            this.fieldInfo = fieldInfo;
+            if (nextPerThread != null)
+                nextPerField = (TermsHashPerField)nextPerThread.addField(docInverterPerField, fieldInfo);
+            else
+                nextPerField = null;
+        }
+
+        internal void shrinkHash(int targetSize)
+        {
+            System.Diagnostics.Debug.Assert(postingsCompacted || numPostings == 0);
+
+            // Cannot use ArrayUtil.shrink because we require power
+            // of 2:
+            int newSize = postingsHash.Length;
+            while (newSize >= 8 && newSize / 4 > targetSize)
+            {
+                newSize /= 2;
+            }
+
+            if (newSize != postingsHash.Length)
+            {
+                postingsHash = new RawPostingList[newSize];
+                postingsHashSize = newSize;
+                postingsHashHalfSize = newSize / 2;
+                postingsHashMask = newSize - 1;
+            }
+        }
+
+        public void reset()
+        {
+            if (!postingsCompacted)
+                compactPostings();
+            System.Diagnostics.Debug.Assert(numPostings <= postingsHash.Length);
+            if (numPostings > 0)
+            {
+                perThread.termsHash.recyclePostings(postingsHash, numPostings);
+                SupportClass.CollectionsSupport.ArrayFill(postingsHash, 0, numPostings, null);
+                numPostings = 0;
+            }
+            postingsCompacted = false;
+            if (nextPerField != null)
+                nextPerField.reset();
+        }
+
+        internal override void abort()
+        {
+            lock (this)
+            {
+                reset();
+                if (nextPerField != null)
+                    nextPerField.abort();
+            }
+        }
+
+        public void initReader(ByteSliceReader reader, RawPostingList p, int stream)
+        {
+            System.Diagnostics.Debug.Assert(stream < streamCount);
+            int[] ints = intPool.buffers[p.intStart >> DocumentsWriter.INT_BLOCK_SHIFT];
+            int upto = p.intStart & DocumentsWriter.INT_BLOCK_MASK;
+            reader.Init(bytePool,
+                        p.byteStart + stream * ByteBlockPool.FIRST_LEVEL_SIZE,
+                        ints[upto + stream]);
+        }
+
+        private void compactPostings()
+        {
+            lock (this)
+            {
+                int upto = 0;
+                for (int i = 0; i < postingsHashSize; i++)
+                {
+                    if (postingsHash[i] != null)
+                    {
+                        if (upto < i)
+                        {
+                            postingsHash[upto] = postingsHash[i];
+                            postingsHash[i] = null;
+                        }
+                        upto++;
+                    }
+                }
+
+                System.Diagnostics.Debug.Assert(upto == numPostings);
+                postingsCompacted = true;
+            }
+        }
+
+        /** Collapse the hash table & sort in-place. */
+        public RawPostingList[] sortPostings()
+        {
+            compactPostings();
+            quickSort(postingsHash, 0, numPostings - 1);
+            return postingsHash;
+        }
+
+        void quickSort(RawPostingList[] postings, int lo, int hi)
+        {
+            if (lo >= hi)
+                return;
+            else if (hi == 1 + lo)
+            {
+                if (comparePostings(postings[lo], postings[hi]) > 0)
+                {
+                    RawPostingList tmp = postings[lo];
+                    postings[lo] = postings[hi];
+                    postings[hi] = tmp;
+                }
+                return;
+            }
+
+            int mid = (int)((uint)(lo + hi) >> 1);
+
+            if (comparePostings(postings[lo], postings[mid]) > 0)
+            {
+                RawPostingList tmp = postings[lo];
+                postings[lo] = postings[mid];
+                postings[mid] = tmp;
+            }
+
+            if (comparePostings(postings[mid], postings[hi]) > 0)
+            {
+                RawPostingList tmp = postings[mid];
+                postings[mid] = postings[hi];
+                postings[hi] = tmp;
+
+                if (comparePostings(postings[lo], postings[mid]) > 0)
+                {
+                    RawPostingList tmp2 = postings[lo];
+                    postings[lo] = postings[mid];
+                    postings[mid] = tmp2;
+                }
+            }
+
+            int left = lo + 1;
+            int right = hi - 1;
+
+            if (left >= right)
+                return;
+
+            RawPostingList partition = postings[mid];
+
+            for (; ; )
+            {
+                while (comparePostings(postings[right], partition) > 0)
+                    --right;
+
+                while (left < right && comparePostings(postings[left], partition) <= 0)
+                    ++left;
+
+                if (left < right)
+                {
+                    RawPostingList tmp = postings[left];
+                    postings[left] = postings[right];
+                    postings[right] = tmp;
+                    --right;
+                }
+                else
+                {
+                    break;
+                }
+            }
+
+            quickSort(postings, lo, left);
+            quickSort(postings, left + 1, hi);
+        }
+
+        /** Compares term text for two Posting instance and
+         *  returns -1 if p1 < p2; 1 if p1 > p2; else 0. */
+        int comparePostings(RawPostingList p1, RawPostingList p2)
+        {
+
+            if (p1 == p2)
+                return 0;
+
+            char[] text1 = charPool.buffers[p1.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT];
+            int pos1 = p1.textStart & DocumentsWriter.CHAR_BLOCK_MASK;
+            char[] text2 = charPool.buffers[p2.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT];
+            int pos2 = p2.textStart & DocumentsWriter.CHAR_BLOCK_MASK;
+
+            System.Diagnostics.Debug.Assert(text1 != text2 || pos1 != pos2);
+
+            while (true)
+            {
+                char c1 = text1[pos1++];
+                char c2 = text2[pos2++];
+                if (c1 != c2)
+                {
+                    if (0xffff == c2)
+                        return 1;
+                    else if (0xffff == c1)
+                        return -1;
+                    else
+                        return c1 - c2;
+                }
+                else
+                    // This method should never compare equal postings
+                    // unless p1==p2
+                    System.Diagnostics.Debug.Assert(c1 != 0xffff);
+            }
+        }
+
+        /** Test whether the text for current RawPostingList p equals
+         *  current tokenText. */
+        private bool postingEquals(char[] tokenText, int tokenTextLen)
+        {
+
+            char[] text = perThread.charPool.buffers[p.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT];
+            System.Diagnostics.Debug.Assert(text != null);
+            int pos = p.textStart & DocumentsWriter.CHAR_BLOCK_MASK;
+
+            int tokenPos = 0;
+            for (; tokenPos < tokenTextLen; pos++, tokenPos++)
+                if (tokenText[tokenPos] != text[pos])
+                    return false;
+            return 0xffff == text[pos];
+        }
+
+        private bool doCall;
+        private bool doNextCall;
+
+        internal override bool start(Fieldable[] fields, int count)
+        {
+            doCall = consumer.start(fields, count);
+            if (nextPerField != null)
+                doNextCall = nextPerField.start(fields, count);
+            return doCall || doNextCall;
+        }
+
+        // Secondary entry point (for 2nd & subsequent TermsHash),
+        // because token text has already been "interned" into
+        // textStart, so we hash by textStart
+        public void add(Token token, int textStart)
+        {
+
+            int code = textStart;
+
+            int hashPos = code & postingsHashMask;
+
+            System.Diagnostics.Debug.Assert(!postingsCompacted);
+
+            // Locate RawPostingList in hash
+            p = postingsHash[hashPos];
+
+            if (p != null && p.textStart != textStart)
+            {
+                // Conflict: keep searching different locations in
+                // the hash table.
+                int inc = ((code >> 8) + code) | 1;
+                do
+                {
+                    code += inc;
+                    hashPos = code & postingsHashMask;
+                    p = postingsHash[hashPos];
+                } while (p != null && p.textStart != textStart);
+            }
+
+            if (p == null)
+            {
+
+                // First time we are seeing this token since we last
+                // flushed the hash.
+
+                // Refill?
+                if (0 == perThread.freePostingsCount)
+                    perThread.morePostings();
+
+                // Pull next free RawPostingList from free list
+                p = perThread.freePostings[--perThread.freePostingsCount];
+                System.Diagnostics.Debug.Assert(p != null);
+
+                p.textStart = textStart;
+
+                System.Diagnostics.Debug.Assert(postingsHash[hashPos] == null);
+                postingsHash[hashPos] = p;
+                numPostings++;
+
+                if (numPostings == postingsHashHalfSize)
+                    rehashPostings(2 * postingsHashSize);
+
+                // Init stream slices
+                if (numPostingInt + intPool.intUpto > DocumentsWriter.INT_BLOCK_SIZE)
+                    intPool.nextBuffer();
+
+                if (DocumentsWriter.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt * ByteBlockPool.FIRST_LEVEL_SIZE)
+                    bytePool.NextBuffer();
+
+                intUptos = intPool.buffer;
+                intUptoStart = intPool.intUpto;
+                intPool.intUpto += streamCount;
+
+                p.intStart = intUptoStart + intPool.intOffset;
+
+                for (int i = 0; i < streamCount; i++)
+                {
+                    int upto = bytePool.NewSlice(ByteBlockPool.FIRST_LEVEL_SIZE);
+                    intUptos[intUptoStart + i] = upto + bytePool.byteOffset;
+                }
+                p.byteStart = intUptos[intUptoStart];
+
+                consumer.newTerm(token, p);
+
+            }
+            else
+            {
+                intUptos = intPool.buffers[p.intStart >> DocumentsWriter.INT_BLOCK_SHIFT];
+                intUptoStart = p.intStart & DocumentsWriter.INT_BLOCK_MASK;
+                consumer.addTerm(token, p);
+            }
+        }
+
+        // Primary entry point (for first TermsHash)
+        internal override void add(Token token)
+        {
+
+            System.Diagnostics.Debug.Assert(!postingsCompacted);
+
+            // We are first in the chain so we must "intern" the
+            // term text into textStart address
+
+            // Get the text of this term.
+            char[] tokenText = token.TermBuffer();
+            int tokenTextLen = token.TermLength();
+
+            // Compute hashcode & replace any invalid UTF16 sequences
+            int downto = tokenTextLen;
+            int code = 0;
+            while (downto > 0)
+            {
+                char ch = tokenText[--downto];
+
+                if (ch >= UnicodeUtil.UNI_SUR_LOW_START && ch <= UnicodeUtil.UNI_SUR_LOW_END)
+                {
+                    if (0 == downto)
+                    {
+                        // Unpaired
+                        ch = tokenText[downto] = (char) UnicodeUtil.UNI_REPLACEMENT_CHAR;
+                    }
+                    else
+                    {
+                        char ch2 = tokenText[downto - 1];
+                        if (ch2 >= UnicodeUtil.UNI_SUR_HIGH_START && ch2 <= UnicodeUtil.UNI_SUR_HIGH_END)
+                        {
+                            // OK: high followed by low.  This is a valid
+                            // surrogate pair.
+                            code = ((code * 31) + ch) * 31 + ch2;
+                            downto--;
+                            continue;
+                        }
+                        else
+                        {
+                            // Unpaired
+                            ch = tokenText[downto] = (char) UnicodeUtil.UNI_REPLACEMENT_CHAR;
+                        }
+                    }
+                }
+                else if (ch >= UnicodeUtil.UNI_SUR_HIGH_START && ch <= UnicodeUtil.UNI_SUR_HIGH_END)
+                    // Unpaired
+                    ch = tokenText[downto] = (char) UnicodeUtil.UNI_REPLACEMENT_CHAR;
+
+                code = (code * 31) + ch;
+            }
+
+            int hashPos = code & postingsHashMask;
+
+            // Locate RawPostingList in hash
+            p = postingsHash[hashPos];
+
+            if (p != null && !postingEquals(tokenText, tokenTextLen))
+            {
+                // Conflict: keep searching different locations in
+                // the hash table.
+                int inc = ((code >> 8) + code) | 1;
+                do
+                {
+                    code += inc;
+                    hashPos = code & postingsHashMask;
+                    p = postingsHash[hashPos];
+                } while (p != null && !postingEquals(tokenText, tokenTextLen));
+            }
+
+            if (p == null)
+            {
+
+                // First time we are seeing this token since we last
+                // flushed the hash.
+                int textLen1 = 1 + tokenTextLen;
+                if (textLen1 + charPool.charUpto > DocumentsWriter.CHAR_BLOCK_SIZE)
+                {
+                    if (textLen1 > DocumentsWriter.CHAR_BLOCK_SIZE)
+                    {
+                        // Just skip this term, to remain as robust as
+                        // possible during indexing.  A TokenFilter
+                        // can be inserted into the analyzer chain if
+                        // other behavior is wanted (pruning the term
+                        // to a prefix, throwing an exception, etc).
+
+                        if (docState.maxTermPrefix == null)
+                            docState.maxTermPrefix = new System.String(tokenText, 0, 30);
+
+                        consumer.skippingLongTerm(token);
+                        return;
+                    }
+                    charPool.nextBuffer();
+                }
+
+                // Refill?
+                if (0 == perThread.freePostingsCount)
+                    perThread.morePostings();
+
+                // Pull next free RawPostingList from free list
+                p = perThread.freePostings[--perThread.freePostingsCount];
+                System.Diagnostics.Debug.Assert(p != null);
+
+                char[] text = charPool.buffer;
+                int textUpto = charPool.charUpto;
+                p.textStart = textUpto + charPool.charOffset;
+                charPool.charUpto += textLen1;
+                System.Array.Copy(tokenText, 0, text, textUpto, tokenTextLen);
+                text[textUpto + tokenTextLen] = (char) 0xffff;
+
+                System.Diagnostics.Debug.Assert(postingsHash[hashPos] == null);
+                postingsHash[hashPos] = p;
+                numPostings++;
+
+                if (numPostings == postingsHashHalfSize)
+                    rehashPostings(2 * postingsHashSize);
+
+                // Init stream slices
+                if (numPostingInt + intPool.intUpto > DocumentsWriter.INT_BLOCK_SIZE)
+                    intPool.nextBuffer();
+
+                if (DocumentsWriter.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt * ByteBlockPool.FIRST_LEVEL_SIZE)
+                    bytePool.NextBuffer();
+
+                intUptos = intPool.buffer;
+                intUptoStart = intPool.intUpto;
+                intPool.intUpto += streamCount;
+
+                p.intStart = intUptoStart + intPool.intOffset;
+
+                for (int i = 0; i < streamCount; i++)
+                {
+                    int upto = bytePool.NewSlice(ByteBlockPool.FIRST_LEVEL_SIZE);
+                    intUptos[intUptoStart + i] = upto + bytePool.byteOffset;
+                }
+                p.byteStart = intUptos[intUptoStart];
+
+                consumer.newTerm(token, p);
+
+            }
+            else
+            {
+                intUptos = intPool.buffers[p.intStart >> DocumentsWriter.INT_BLOCK_SHIFT];
+                intUptoStart = p.intStart & DocumentsWriter.INT_BLOCK_MASK;
+                consumer.addTerm(token, p);
+            }
+
+            if (doNextCall)
+                nextPerField.add(token, p.textStart);
+        }
+
+        int[] intUptos;
+        int intUptoStart;
+
+        void writeByte(int stream, byte b)
+        {
+            int upto = intUptos[intUptoStart + stream];
+            byte[] bytes = bytePool.buffers[upto >> DocumentsWriter.BYTE_BLOCK_SHIFT];
+            System.Diagnostics.Debug.Assert(bytes != null);
+            int offset = upto & DocumentsWriter.BYTE_BLOCK_MASK;
+            if (bytes[offset] != 0)
+            {
+                // End of slice; allocate a new one
+                offset = bytePool.AllocSlice(bytes, offset);
+                bytes = bytePool.buffer;
+                intUptos[intUptoStart + stream] = offset + bytePool.byteOffset;
+            }
+            bytes[offset] = b;
+            (intUptos[intUptoStart + stream])++;
+        }
+
+        public void writeBytes(int stream, byte[] b, int offset, int len)
+        {
+            // TODO: optimize
+            int end = offset + len;
+            for (int i = offset; i < end; i++)
+                writeByte(stream, b[i]);
+        }
+
+        internal void writeVInt(int stream, int i)
+        {
+            System.Diagnostics.Debug.Assert(stream < streamCount);
+            while ((i & ~0x7F) != 0)
+            {
+                writeByte(stream, (byte)((i & 0x7f) | 0x80));
+                i = (int)((uint)i >> 7);
+            }
+            writeByte(stream, (byte)i);
+        }
+
+        internal override void finish()
+        {
+            consumer.finish();
+            if (nextPerField != null)
+                nextPerField.finish();
+        }
+
+        /** Called when postings hash is too small (> 50%
+         *  occupied) or too large (< 20% occupied). */
+        void rehashPostings(int newSize)
+        {
+
+            int newMask = newSize - 1;
+
+            RawPostingList[] newHash = new RawPostingList[newSize];
+            for (int i = 0; i < postingsHashSize; i++)
+            {
+                RawPostingList p0 = postingsHash[i];
+                if (p0 != null)
+                {
+                    int code;
+                    if (perThread.primary)
+                    {
+                        int start = p0.textStart & DocumentsWriter.CHAR_BLOCK_MASK;
+                        char[] text = charPool.buffers[p0.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT];
+                        int pos = start;
+                        while (text[pos] != 0xffff)
+                            pos++;
+                        code = 0;
+                        while (pos > start)
+                            code = (code * 31) + text[--pos];
+                    }
+                    else
+                        code = p0.textStart;
+
+                    int hashPos = code & newMask;
+                    System.Diagnostics.Debug.Assert(hashPos >= 0);
+                    if (newHash[hashPos] != null)
+                    {
+                        int inc = ((code >> 8) + code) | 1;
+                        do
+                        {
+                            code += inc;
+                            hashPos = code & newMask;
+                        } while (newHash[hashPos] != null);
+                    }
+                    newHash[hashPos] = p0;
+                }
+            }
+
+            postingsHashMask = newMask;
+            postingsHash = newHash;
+            postingsHashSize = newSize;
+            postingsHashHalfSize = newSize >> 1;
+        }
+    }
+}

Added: incubator/lucene.net/trunk/C#/src/Lucene.Net/Index/TermsHashPerThread.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/Index/TermsHashPerThread.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/Index/TermsHashPerThread.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/Index/TermsHashPerThread.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace Lucene.Net.Index
+{
+    internal sealed class TermsHashPerThread : InvertedDocConsumerPerThread
+    {
+
+        internal readonly TermsHash termsHash;
+        internal readonly TermsHashConsumerPerThread consumer;
+        internal readonly TermsHashPerThread nextPerThread;
+
+        internal readonly CharBlockPool charPool;
+        internal readonly IntBlockPool intPool;
+        internal readonly ByteBlockPool bytePool;
+        internal readonly bool primary;
+        internal readonly DocumentsWriter.DocState docState;
+
+        internal readonly RawPostingList[] freePostings = new RawPostingList[256];
+        internal int freePostingsCount;
+
+        public TermsHashPerThread(DocInverterPerThread docInverterPerThread, TermsHash termsHash, TermsHash nextTermsHash, TermsHashPerThread primaryPerThread)
+        {
+            docState = docInverterPerThread.docState;
+
+            this.termsHash = termsHash;
+            this.consumer = termsHash.consumer.addThread(this);
+
+            if (nextTermsHash != null)
+            {
+                // We are primary
+                charPool = new CharBlockPool(termsHash.docWriter);
+                primary = true;
+            }
+            else
+            {
+                charPool = primaryPerThread.charPool;
+                primary = false;
+            }
+
+            intPool = new IntBlockPool(termsHash.docWriter, termsHash.trackAllocations);
+            bytePool = new ByteBlockPool(termsHash.docWriter.byteBlockAllocator, termsHash.trackAllocations);
+
+            if (nextTermsHash != null)
+                nextPerThread = nextTermsHash.addThread(docInverterPerThread, this);
+            else
+                nextPerThread = null;
+        }
+
+        internal override InvertedDocConsumerPerField addField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo)
+        {
+            return new TermsHashPerField(docInverterPerField, this, nextPerThread, fieldInfo);
+        }
+
+        internal override void abort()
+        {
+            lock (this)
+            {
+                reset(true);
+                consumer.abort();
+                if (nextPerThread != null)
+                    nextPerThread.abort();
+            }
+        }
+
+        // perField calls this when it needs more postings:
+        internal void morePostings()
+        {
+            System.Diagnostics.Debug.Assert(freePostingsCount == 0);
+            termsHash.getPostings(freePostings);
+            freePostingsCount = freePostings.Length;
+            System.Diagnostics.Debug.Assert(noNullPostings(freePostings, freePostingsCount, "consumer=" + consumer));
+        }
+
+        private static bool noNullPostings(RawPostingList[] postings, int count, string details)
+        {
+            for (int i = 0; i < count; i++)
+                System.Diagnostics.Debug.Assert(postings[i] != null, "postings[" + i + "] of " + count + " is null: " + details);
+            return true;
+        }
+
+        internal override void startDocument()
+        {
+            consumer.startDocument();
+            if (nextPerThread != null)
+                nextPerThread.consumer.startDocument();
+        }
+
+        internal override DocumentsWriter.DocWriter finishDocument()
+        {
+            DocumentsWriter.DocWriter doc = consumer.finishDocument();
+
+            DocumentsWriter.DocWriter doc2;
+            if (nextPerThread != null)
+                doc2 = nextPerThread.consumer.finishDocument();
+            else
+                doc2 = null;
+            if (doc == null)
+                return doc2;
+            else
+            {
+                doc.SetNext(doc2);
+                return doc;
+            }
+        }
+
+        // Clear all state
+        internal void reset(bool recyclePostings)
+        {
+            intPool.reset();
+            bytePool.Reset();
+
+            if (primary)
+                charPool.reset();
+
+            if (recyclePostings)
+            {
+                termsHash.recyclePostings(freePostings, freePostingsCount);
+                freePostingsCount = 0;
+            }
+        }
+    }
+}

Copied: incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.csproj (from r797580, incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.3.2-VS2005.csproj)
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.csproj?p2=incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.csproj&p1=incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net-2.3.2-VS2005.csproj&r1=797580&r2=798995&rev=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.3.2-VS2005.csproj (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.csproj Wed Jul 29 18:04:12 2009
@@ -3,7 +3,7 @@
     <ProjectType>Local</ProjectType>
     <ProductVersion>8.0.50727</ProductVersion>
     <SchemaVersion>2.0</SchemaVersion>
-    <ProjectGuid>{F04CA2F4-E182-46A8-B914-F46AF5319E83}</ProjectGuid>
+    <ProjectGuid>{5D4AD9BE-1FFB-41AB-9943-25737971BF57}</ProjectGuid>
     <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
     <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
     <ApplicationIcon>
@@ -73,9 +73,9 @@
     <ErrorReport>prompt</ErrorReport>
   </PropertyGroup>
   <ItemGroup>
-    <Reference Include="ICSharpCode.SharpZipLib, Version=0.85.4.369, Culture=neutral, processorArchitecture=MSIL">
+    <Reference Include="ICSharpCode.SharpZipLib, Version=0.85.5.452, Culture=neutral, processorArchitecture=MSIL">
       <SpecificVersion>False</SpecificVersion>
-      <HintPath>..\..\..\References\ICSharpCode.SharpZipLib.dll</HintPath>
+      <HintPath>..\..\..\..\ICSharpCode.SharpZipLib.dll</HintPath>
     </Reference>
     <Reference Include="System">
       <Name>System</Name>
@@ -212,6 +212,11 @@
     <Compile Include="Document\SetBasedFieldSelector.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\BufferedDeletes.cs" />
+    <Compile Include="Index\ByteBlockPool.cs" />
+    <Compile Include="Index\ByteSliceReader.cs" />
+    <Compile Include="Index\ByteSliceWriter.cs" />
+    <Compile Include="Index\CharBlockPool.cs" />
     <Compile Include="Index\CheckIndex.cs" />
     <Compile Include="Index\CompoundFileReader.cs">
       <SubType>Code</SubType>
@@ -224,7 +229,22 @@
     <Compile Include="Index\DefaultSkipListReader.cs" />
     <Compile Include="Index\DefaultSkipListWriter.cs" />
     <Compile Include="Index\DirectoryIndexReader.cs" />
+    <Compile Include="Index\DocConsumer.cs" />
+    <Compile Include="Index\DocConsumerPerThread.cs" />
+    <Compile Include="Index\DocFieldConsumer.cs" />
+    <Compile Include="Index\DocFieldConsumerPerField.cs" />
+    <Compile Include="Index\DocFieldConsumerPerThread.cs" />
+    <Compile Include="Index\DocFieldConsumers.cs" />
+    <Compile Include="Index\DocFieldConsumersPerField.cs" />
+    <Compile Include="Index\DocFieldConsumersPerThread.cs" />
+    <Compile Include="Index\DocFieldProcessor.cs" />
+    <Compile Include="Index\DocFieldProcessorPerField.cs" />
+    <Compile Include="Index\DocFieldProcessorPerThread.cs" />
+    <Compile Include="Index\DocInverter.cs" />
+    <Compile Include="Index\DocInverterPerField.cs" />
+    <Compile Include="Index\DocInverterPerThread.cs" />
     <Compile Include="Index\DocumentsWriter.cs" />
+    <Compile Include="Index\DocumentsWriterThreadState.cs" />
     <Compile Include="Index\FieldInfo.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -244,6 +264,11 @@
     <Compile Include="Index\FilterIndexReader.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\FreqProxFieldMergeState.cs" />
+    <Compile Include="Index\FreqProxTermsWriter.cs" />
+    <Compile Include="Index\FreqProxTermsWriterPerField.cs" />
+    <Compile Include="Index\FreqProxTermsWriterPerThread.cs" />
+    <Compile Include="Index\IndexCommit.cs" />
     <Compile Include="Index\IndexCommitPoint.cs" />
     <Compile Include="Index\IndexDeletionPolicy.cs" />
     <Compile Include="Index\IndexFileDeleter.cs">
@@ -264,10 +289,18 @@
     <Compile Include="Index\IndexWriter.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\IntBlockPool.cs" />
+    <Compile Include="Index\InvertedDocConsumer.cs" />
+    <Compile Include="Index\InvertedDocConsumerPerField.cs" />
+    <Compile Include="Index\InvertedDocConsumerPerThread.cs" />
+    <Compile Include="Index\InvertedDocEndConsumer.cs" />
+    <Compile Include="Index\InvertedDocEndConsumerPerField.cs" />
+    <Compile Include="Index\InvertedDocEndConsumerPerThread.cs" />
     <Compile Include="Index\KeepOnlyLastCommitDeletionPolicy.cs" />
     <Compile Include="Index\LogByteSizeMergePolicy.cs" />
     <Compile Include="Index\LogDocMergePolicy.cs" />
     <Compile Include="Index\LogMergePolicy.cs" />
+    <Compile Include="Index\MergeDocIDRemapper.cs" />
     <Compile Include="Index\MergePolicy.cs" />
     <Compile Include="Index\MergeScheduler.cs" />
     <Compile Include="Index\MultiLevelSkipListReader.cs" />
@@ -279,11 +312,18 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Index\MultiSegmentReader.cs" />
+    <Compile Include="Index\NormsWriter.cs" />
+    <Compile Include="Index\NormsWriterPerField.cs" />
+    <Compile Include="Index\NormsWriterPerThread.cs" />
     <Compile Include="Index\ParallelReader.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Index\Payload.cs" />
     <Compile Include="Index\PositionBasedTermVectorMapper.cs" />
+    <Compile Include="Index\RawPostingList.cs" />
+    <Compile Include="Index\ReadOnlyMultiSegmentReader.cs" />
+    <Compile Include="Index\ReadOnlySegmentReader.cs" />
+    <Compile Include="Index\ReusableStringReader.cs" />
     <Compile Include="Index\SegmentInfo.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -321,6 +361,9 @@
     <Compile Include="Index\SnapshotDeletionPolicy.cs" />
     <Compile Include="Index\SortedTermVectorMapper.cs" />
     <Compile Include="Index\StaleReaderException.cs" />
+    <Compile Include="Index\StoredFieldsWriter.cs" />
+    <Compile Include="Index\StoredFieldsWriterPerField.cs" />
+    <Compile Include="Index\StoredFieldWriterPerThread.cs" />
     <Compile Include="Index\Term.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -351,6 +394,12 @@
     <Compile Include="Index\TermPositionVector.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\TermsHash.cs" />
+    <Compile Include="Index\TermsHashConsumer.cs" />
+    <Compile Include="Index\TermsHashConsumerPerField.cs" />
+    <Compile Include="Index\TermsHashConsumerPerThread.cs" />
+    <Compile Include="Index\TermsHashPerField.cs" />
+    <Compile Include="Index\TermsHashPerThread.cs" />
     <Compile Include="Index\TermVectorEntry.cs" />
     <Compile Include="Index\TermVectorEntryFreqSortedComparator.cs" />
     <Compile Include="Index\TermVectorMapper.cs" />
@@ -360,6 +409,9 @@
     <Compile Include="Index\TermVectorsReader.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\TermVectorsTermsWriter.cs" />
+    <Compile Include="Index\TermVectorsTermsWriterPerField.cs" />
+    <Compile Include="Index\TermVectorsTermsWriterPerThread.cs" />
     <Compile Include="Index\TermVectorsWriter.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -433,6 +485,8 @@
     <Compile Include="Search\DisjunctionSumScorer.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Search\DocIdSet.cs" />
+    <Compile Include="Search\DocIdSetIterator.cs" />
     <Compile Include="Search\ExactPhraseScorer.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -521,6 +575,7 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Search\Payload\BoostingTermQuery.cs" />
+    <Compile Include="Search\Payload\PayloadSpanUtil.cs" />
     <Compile Include="Search\PhrasePositions.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -604,12 +659,14 @@
     <Compile Include="Search\SpanFilter.cs" />
     <Compile Include="Search\SpanFilterResult.cs" />
     <Compile Include="Search\SpanQueryFilter.cs" />
+    <Compile Include="Search\Spans\NearSpans.cs" />
     <Compile Include="Search\Spans\NearSpansOrdered.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Search\Spans\NearSpansUnordered.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Search\Spans\PayloadSpans.cs" />
     <Compile Include="Search\Spans\SpanFirstQuery.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -675,6 +732,8 @@
     <Compile Include="Store\BufferedIndexOutput.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Store\CheckSumIndexInput.cs" />
+    <Compile Include="Store\CheckSumIndexOutput.cs" />
     <Compile Include="Store\Directory.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -728,31 +787,42 @@
     <Compile Include="SupportClass.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Util\ArrayUtil.cs" />
+    <Compile Include="Util\BitUtil.cs" />
     <Compile Include="Util\BitVector.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Util\CloseableThreadLocal.cs" />
     <Compile Include="Util\Constants.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Util\DocIdBitSet.cs" />
+    <Compile Include="Util\OpenBitSet.cs" />
+    <Compile Include="Util\OpenBitSetDISI.cs" />
+    <Compile Include="Util\OpenBitSetIterator.cs" />
     <Compile Include="Util\Parameter.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\PriorityQueue.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Util\Cache\Cache.cs" />
+    <Compile Include="Util\Cache\SimpleLRUCache.cs" />
+    <Compile Include="Util\Cache\SimpleMapCache.cs" />
     <Compile Include="Util\ScorerDocQueue.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\SmallFloat.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Util\SortedVIntList.cs" />
     <Compile Include="Util\StringHelper.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\ToStringUtils.cs">
       <SubType>Code</SubType>
     </Compile>
-    <None Include="Analysis\Standard\StandardTokenizer.jj" />
+    <Compile Include="Util\UnicodeUtil.cs" />
     <None Include="Analysis\Standard\StandardTokenizerImpl.jflex" />
     <None Include="Lucene.Net.Search.RemoteSearchable.config" />
     <None Include="Lucene.Net.Search.TestSort.config" />

Copied: incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.sln (from r797580, incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.3.2-VS2005.sln)
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.sln?p2=incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.sln&p1=incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net-2.3.2-VS2005.sln&r1=797580&r2=798995&rev=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.3.2-VS2005.sln (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net-2.4.0-VS2005.sln Wed Jul 29 18:04:12 2009
@@ -1,7 +1,7 @@
 
 Microsoft Visual Studio Solution File, Format Version 9.00
 # Visual Studio 2005
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net-2.3.2-VS2005", "Lucene.Net-2.3.2-VS2005.csproj", "{F04CA2F4-E182-46A8-B914-F46AF5319E83}"
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net-2.4.0-VS2005", "Lucene.Net-2.4.0-VS2005.csproj", "{5D4AD9BE-1FFB-41AB-9943-25737971BF57}"
 EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -9,10 +9,10 @@
 		Release|Any CPU = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(ProjectConfigurationPlatforms) = postSolution
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Release|Any CPU.Build.0 = Release|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Release|Any CPU.Build.0 = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(SolutionProperties) = preSolution
 		HideSolutionNode = FALSE

Modified: incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.sln
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net.sln?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.sln (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.sln Wed Jul 29 18:04:12 2009
@@ -1,7 +1,7 @@
 
 Microsoft Visual Studio Solution File, Format Version 9.00
 # Visual Studio 2005
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net-2.3.1-VS2005", "Lucene.Net-2.3.1-VS2005.csproj", "{F04CA2F4-E182-46A8-B914-F46AF5319E83}"
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net-2.4.0-VS2005", "Lucene.Net-2.4.0-VS2005.csproj", "{5D4AD9BE-1FFB-41AB-9943-25737971BF57}"
 EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -9,10 +9,10 @@
 		Release|Any CPU = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(ProjectConfigurationPlatforms) = postSolution
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{F04CA2F4-E182-46A8-B914-F46AF5319E83}.Release|Any CPU.Build.0 = Release|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{5D4AD9BE-1FFB-41AB-9943-25737971BF57}.Release|Any CPU.Build.0 = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(SolutionProperties) = preSolution
 		HideSolutionNode = FALSE

Modified: incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/CharStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/QueryParser/CharStream.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/CharStream.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/CharStream.cs Wed Jul 29 18:04:12 2009
@@ -16,6 +16,7 @@
  */
 
 /* Generated By:JavaCC: Do not edit this line. CharStream.java Version 3.0 */
+/* JavaCCOptions:STATIC=false */
 
 using System;
 
@@ -46,18 +47,16 @@
 		char ReadChar();
 		
 		/// <summary> Returns the column position of the character last read.</summary>
-		/// <deprecated> 
-		/// </deprecated>
 		/// <seealso cref="#getEndColumn">
 		/// </seealso>
+        [Obsolete()]
 		int GetColumn();
 		
 		/// <summary> Returns the line number of the character last read.</summary>
-		/// <deprecated> 
-		/// </deprecated>
 		/// <seealso cref="GetEndLine">
 		/// </seealso>
-		int GetLine();
+        [Obsolete()]
+        int GetLine();
 		
 		/// <summary> Returns the column number of the last character for current token (being
 		/// matched after the last call to BeginTOken).
@@ -118,4 +117,5 @@
 		/// </summary>
 		void  Done();
 	}
-}
\ No newline at end of file
+}
+/* JavaCC - OriginalChecksum=32a89423891f765dde472f7ef0e3ef7b (do not edit this line) */

Modified: incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/FastCharStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/QueryParser/FastCharStream.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/FastCharStream.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/FastCharStream.cs Wed Jul 29 18:04:12 2009
@@ -83,13 +83,13 @@
 
 			int charsRead = 0;
 			
-			try
-			{
-				charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
-			}
-			catch
-			{
-			}
+            //try
+            //{
+                charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
+            //}
+            //catch
+            //{
+            //}
 			
 			if (charsRead <= 0)
 				throw new System.IO.IOException("read past eof");

Modified: incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/MultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/QueryParser/MultiFieldQueryParser.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/MultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/MultiFieldQueryParser.cs Wed Jul 29 18:04:12 2009
@@ -114,14 +114,7 @@
 								q.SetBoost((float) boost);
 							}
 						}
-						if (q is PhraseQuery)
-						{
-							((PhraseQuery) q).SetSlop(slop);
-						}
-						if (q is MultiPhraseQuery)
-						{
-							((MultiPhraseQuery) q).SetSlop(slop);
-						}
+                        ApplySlop(q, slop);
 						clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
 					}
 				}
@@ -130,10 +123,23 @@
 					return null;
 				return GetBooleanQuery(clauses, true);
 			}
-			return base.GetFieldQuery(field, queryText);
-		}
-		
-		
+            Query q2 = base.GetFieldQuery(field, queryText);
+            ApplySlop(q2, slop);
+            return q2;
+		}
+
+        private void ApplySlop(Query q, int slop)
+        {
+            if (q is PhraseQuery)
+            {
+                ((PhraseQuery)q).SetSlop(slop);
+            }
+            if (q is MultiPhraseQuery)
+            {
+                ((MultiPhraseQuery)q).SetSlop(slop);
+            }
+        }
+
 		public override Query GetFieldQuery(System.String field, System.String queryText)
 		{
 			return GetFieldQuery(field, queryText, 0);

Modified: incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/ParseException.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Lucene.Net/QueryParser/ParseException.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/ParseException.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Lucene.Net/QueryParser/ParseException.cs Wed Jul 29 18:04:12 2009
@@ -16,6 +16,7 @@
  */
 
 /* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
+/* JavaCCOptions:KEEP_LINE_COL=null */
 
 using System;
 
@@ -50,7 +51,7 @@
 				{
 					return base.Message;
 				}
-				System.String expected = "";
+                System.Text.StringBuilder expected = new System.Text.StringBuilder();
 				int maxSize = 0;
 				for (int i = 0; i < expectedTokenSequences.Length; i++)
 				{
@@ -60,13 +61,13 @@
 					}
 					for (int j = 0; j < expectedTokenSequences[i].Length; j++)
 					{
-						expected += (tokenImage[expectedTokenSequences[i][j]] + " ");
+						expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
 					}
 					if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
 					{
-						expected += "...";
+						expected.Append("...");
 					}
-					expected += (eol + "    ");
+					expected.Append(eol).Append("    ");
 				}
 				System.String retval = "Encountered \"";
 				Token tok = currentToken.next;
@@ -79,8 +80,11 @@
 						retval += tokenImage[0];
 						break;
 					}
-					retval += Add_escapes(tok.image);
-					tok = tok.next;
+                    retval += tokenImage[tok.kind];
+                    retval += " \"";
+                    retval += Add_escapes(tok.image);
+                    retval += " \"";
+                    tok = tok.next;
 				}
 				retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
 				retval += ("." + eol);
@@ -92,7 +96,7 @@
 				{
 					retval += ("Was expecting one of:" + eol + "    ");
 				}
-				retval += expected;
+				retval += expected.ToString();
 				return retval;
 			}
 			
@@ -131,6 +135,10 @@
 			specialConstructor = false;
 		}
 		
+        /// <summary>
+        /// Constructor with message.
+        /// </summary>
+        /// <param name="message"></param>
 		public ParseException(System.String message) : base(message)
 		{
 			specialConstructor = false;
@@ -215,7 +223,7 @@
 						if ((ch = str[i]) < 0x20 || ch > 0x7e)
 						{
 							System.String s = "0000" + System.Convert.ToString(ch, 16);
-							retval.Append("\\u").Append(s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+							retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
 						}
 						else
 						{
@@ -228,4 +236,5 @@
 			return retval.ToString();
 		}
 	}
-}
\ No newline at end of file
+}
+/* JavaCC - OriginalChecksum=c7631a240f7446940695eac31d9483ca (do not edit this line) */



Mime
View raw message