lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mhern...@apache.org
Subject [43/50] [abbrv] git commit: Initial port of classic QueryParser. Broken.
Date Tue, 24 Sep 2013 18:33:19 GMT
Initial port of classic QueryParser. Broken.

Gets stuck in infinite loop if you search with the default field. If
searching with a field specified or the *:* query, it errors out.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/0e6eb14a
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/0e6eb14a
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/0e6eb14a

Branch: refs/heads/branch_4x
Commit: 0e6eb14ac34d87812d5f49a59278a67eaafae531
Parents: 7a4b442
Author: Paul Irwin <paulirwin@gmail.com>
Authored: Thu Aug 8 17:46:19 2013 -0400
Committer: Paul Irwin <paulirwin@gmail.com>
Committed: Thu Aug 8 17:46:19 2013 -0400

----------------------------------------------------------------------
 build/vs2012/Lucene.Net.All/Lucene.Net.All.sln  |   11 +
 src/contrib/Core/Analysis/Ext/Analysis.Ext.cs   |    1 +
 src/contrib/Core/Contrib.Core.csproj            |    7 +-
 .../QueryParsers/Classic/FastCharStream.cs      |  134 ++
 src/contrib/QueryParsers/Classic/ICharStream.cs |   37 +
 .../QueryParsers/Classic/ParseException.cs      |  153 +++
 src/contrib/QueryParsers/Classic/QueryParser.cs |  785 ++++++++++++
 .../QueryParsers/Classic/QueryParserBase.cs     | 1033 +++++++++++++++
 .../Classic/QueryParserConstants.cs             |  126 ++
 .../Classic/QueryParserTokenManager.cs          | 1188 ++++++++++++++++++
 src/contrib/QueryParsers/Classic/Token.cs       |  104 ++
 .../QueryParsers/Classic/TokenMgrError.cs       |  105 ++
 .../QueryParsers/Contrib.QueryParsers.csproj    |   69 +
 .../Standard/ICommonQueryParserConfiguration.cs |   37 +
 .../QueryParsers/Properties/AssemblyInfo.cs     |   36 +
 15 files changed, 3825 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/build/vs2012/Lucene.Net.All/Lucene.Net.All.sln
----------------------------------------------------------------------
diff --git a/build/vs2012/Lucene.Net.All/Lucene.Net.All.sln b/build/vs2012/Lucene.Net.All/Lucene.Net.All.sln
index d8c2cc2..0b9121d 100644
--- a/build/vs2012/Lucene.Net.All/Lucene.Net.All.sln
+++ b/build/vs2012/Lucene.Net.All/Lucene.Net.All.sln
@@ -37,6 +37,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contrib.WordNet.SynLookup",
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contrib.WordNet.Syns2Index", "..\..\..\src\contrib\WordNet\Syns2Index\Contrib.WordNet.Syns2Index.csproj", "{7563D4D9-AE91-42BA-A270-1D264660F6DF}"
 EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contrib.QueryParsers", "..\..\..\src\contrib\QueryParsers\Contrib.QueryParsers.csproj", "{56438272-B00E-40DE-9C9A-0785E705E7D9}"
+EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution
 		Debug|Any CPU = Debug|Any CPU
@@ -173,6 +175,14 @@ Global
 		{7563D4D9-AE91-42BA-A270-1D264660F6DF}.Release|Any CPU.Build.0 = Release|Any CPU
 		{7563D4D9-AE91-42BA-A270-1D264660F6DF}.Release35|Any CPU.ActiveCfg = Release35|Any CPU
 		{7563D4D9-AE91-42BA-A270-1D264660F6DF}.Release35|Any CPU.Build.0 = Release35|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Debug35|Any CPU.ActiveCfg = Debug|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Debug35|Any CPU.Build.0 = Debug|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Release|Any CPU.Build.0 = Release|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Release35|Any CPU.ActiveCfg = Release|Any CPU
+		{56438272-B00E-40DE-9C9A-0785E705E7D9}.Release35|Any CPU.Build.0 = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(SolutionProperties) = preSolution
 		HideSolutionNode = FALSE
@@ -194,5 +204,6 @@ Global
 		{1407C9BA-337C-4C6C-B065-68328D3871B3} = {7E19085A-545B-4DE8-BBF5-B1DBC370FD37}
 		{2CA12E3F-76E1-4FA6-9E87-37079A7B7C69} = {7E19085A-545B-4DE8-BBF5-B1DBC370FD37}
 		{7563D4D9-AE91-42BA-A270-1D264660F6DF} = {7E19085A-545B-4DE8-BBF5-B1DBC370FD37}
+		{56438272-B00E-40DE-9C9A-0785E705E7D9} = {7E19085A-545B-4DE8-BBF5-B1DBC370FD37}
 	EndGlobalSection
 EndGlobal

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/Core/Analysis/Ext/Analysis.Ext.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Core/Analysis/Ext/Analysis.Ext.cs b/src/contrib/Core/Analysis/Ext/Analysis.Ext.cs
index beec3fd..0903cfb 100644
--- a/src/contrib/Core/Analysis/Ext/Analysis.Ext.cs
+++ b/src/contrib/Core/Analysis/Ext/Analysis.Ext.cs
@@ -24,6 +24,7 @@ using System.IO;
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.Tokenattributes;
 using Lucene.Net.Util;
+using Lucene.Net.Analysis.Core;
 
 
 namespace Lucene.Net.Analysis.Ext

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/Core/Contrib.Core.csproj
----------------------------------------------------------------------
diff --git a/src/contrib/Core/Contrib.Core.csproj b/src/contrib/Core/Contrib.Core.csproj
index bced134..6e1bdb9 100644
--- a/src/contrib/Core/Contrib.Core.csproj
+++ b/src/contrib/Core/Contrib.Core.csproj
@@ -30,7 +30,8 @@
     <RootNamespace>Lucene.Net</RootNamespace>
     <AssemblyName>Lucene.Net.Contrib.Core</AssemblyName>
     <FileAlignment>512</FileAlignment>
-    <FileUpgradeFlags></FileUpgradeFlags>
+    <FileUpgradeFlags>
+    </FileUpgradeFlags>
     <OldToolsVersion>3.5</OldToolsVersion>
     <UpgradeBackupLocation />
     <PublishUrl>publish\</PublishUrl>
@@ -126,6 +127,10 @@
       <Project>{5D4AD9BE-1FFB-41AB-9943-25737971BF57}</Project>
       <Name>Lucene.Net</Name>
     </ProjectReference>
+    <ProjectReference Include="..\Analyzers\Contrib.Analyzers.csproj">
+      <Project>{4286e961-9143-4821-b46d-3d39d3736386}</Project>
+      <Name>Contrib.Analyzers</Name>
+    </ProjectReference>
   </ItemGroup>
   <ItemGroup>
     <BootstrapperPackage Include=".NETFramework,Version=v4.0">

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/QueryParsers/Classic/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/src/contrib/QueryParsers/Classic/FastCharStream.cs b/src/contrib/QueryParsers/Classic/FastCharStream.cs
new file mode 100644
index 0000000..6e3a39e
--- /dev/null
+++ b/src/contrib/QueryParsers/Classic/FastCharStream.cs
@@ -0,0 +1,134 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.QueryParsers.Classic
+{
+    public sealed class FastCharStream : ICharStream
+    {
+        internal char[] buffer = null;
+
+        internal int bufferLength = 0;          // end of valid chars
+        internal int bufferPosition = 0;        // next char to read
+
+        internal int tokenStart = 0;          // offset in buffer
+        internal int bufferStart = 0;          // position in file of buffer
+
+        internal TextReader input;            // source of chars
+
+        public FastCharStream(TextReader r)
+        {
+            input = r;
+        }
+
+        public char ReadChar()
+        {
+            if (bufferPosition >= bufferLength)
+                Refill();
+            return buffer[bufferPosition++];
+        }
+
+        private void Refill()
+        {
+            int newPosition = bufferLength - tokenStart;
+
+            if (tokenStart == 0)
+            {        // token won't fit in buffer
+                if (buffer == null)
+                {        // first time: alloc buffer
+                    buffer = new char[2048];
+                }
+                else if (bufferLength == buffer.Length)
+                { // grow buffer
+                    char[] newBuffer = new char[buffer.Length * 2];
+                    Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
+                    buffer = newBuffer;
+                }
+            }
+            else
+            {            // shift token to front
+                Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
+            }
+
+            bufferLength = newPosition;        // update state
+            bufferPosition = newPosition;
+            bufferStart += tokenStart;
+            tokenStart = 0;
+
+            int charsRead =          // fill space in buffer
+              input.Read(buffer, newPosition, buffer.Length - newPosition);
+            if (charsRead == -1)
+                throw new IOException("read past eof");
+            else
+                bufferLength += charsRead;
+        }
+        
+        public char BeginToken()
+        {
+            tokenStart = bufferPosition;
+            return ReadChar();
+        }
+        
+        public void Backup(int amount)
+        {
+            bufferPosition -= amount;
+        }
+
+        public string GetImage()
+        {
+            return new String(buffer, tokenStart, bufferPosition - tokenStart);
+        }
+
+        public char[] GetSuffix(int len)
+        {
+            char[] value = new char[len];
+            Array.Copy(buffer, bufferPosition - len, value, 0, len);
+            return value;
+        }
+
+        public void Done()
+        {
+            try
+            {
+                input.Close();
+            }
+            catch (IOException)
+            {
+            }
+        }
+
+        public int Column
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int Line
+        {
+            get { return 1; }
+        }
+
+        public int EndColumn
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int EndLine
+        {
+            get { return 1; }
+        }
+
+        public int BeginColumn
+        {
+            get { return bufferStart + tokenStart; }
+        }
+
+        public int BeginLine
+        {
+            get { return 1; }
+        }
+        
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/QueryParsers/Classic/ICharStream.cs
----------------------------------------------------------------------
diff --git a/src/contrib/QueryParsers/Classic/ICharStream.cs b/src/contrib/QueryParsers/Classic/ICharStream.cs
new file mode 100644
index 0000000..d68cfba
--- /dev/null
+++ b/src/contrib/QueryParsers/Classic/ICharStream.cs
@@ -0,0 +1,37 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.QueryParsers.Classic
+{
+    public interface ICharStream
+    {
+        char ReadChar();
+
+        [Obsolete]
+        int Column { get; }
+
+        [Obsolete]
+        int Line { get; }
+
+        int EndColumn { get; }
+
+        int EndLine { get; }
+
+        int BeginColumn { get; }
+
+        int BeginLine { get; }
+
+        void Backup(int amount);
+
+        char BeginToken();
+
+        string GetImage();
+
+        char[] GetSuffix(int len);
+
+        void Done();
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/QueryParsers/Classic/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/contrib/QueryParsers/Classic/ParseException.cs b/src/contrib/QueryParsers/Classic/ParseException.cs
new file mode 100644
index 0000000..253c0cb
--- /dev/null
+++ b/src/contrib/QueryParsers/Classic/ParseException.cs
@@ -0,0 +1,153 @@
+using System;
+using System.Collections.Generic;
+using System.Configuration;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.QueryParsers.Classic
+{
+    public class ParseException : Exception
+    {
+        private const long serialVersionUID = 1L;
+
+        public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        String[] tokenImageVal
+                       )
+            : base(Initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
+        {
+            currentToken = currentTokenVal;
+            expectedTokenSequences = expectedTokenSequencesVal;
+            tokenImage = tokenImageVal;
+        }
+
+        public ParseException()
+            : base()
+        {
+        }
+
+        public ParseException(String message)
+            : base(message)
+        {
+        }
+
+        // .NET Port: not present in Java version but needed for inner exception
+        public ParseException(String message, Exception innerException)
+            : base(message, innerException)
+        {
+        }
+
+
+        public Token currentToken;
+
+        public int[][] expectedTokenSequences;
+
+        public String[] tokenImage;
+
+        private static String Initialise(Token currentToken,
+                           int[][] expectedTokenSequences,
+                           String[] tokenImage)
+        {
+            String eol = ConfigurationManager.AppSettings["line.separator"] ?? "\n";
+            StringBuilder expected = new StringBuilder();
+            int maxSize = 0;
+            for (int i = 0; i < expectedTokenSequences.Length; i++)
+            {
+                if (maxSize < expectedTokenSequences[i].Length)
+                {
+                    maxSize = expectedTokenSequences[i].Length;
+                }
+                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
+                {
+                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
+                }
+                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
+                {
+                    expected.Append("...");
+                }
+                expected.Append(eol).Append("    ");
+            }
+            String retval = "Encountered \"";
+            Token tok = currentToken.next;
+            for (int i = 0; i < maxSize; i++)
+            {
+                if (i != 0) retval += " ";
+                if (tok.kind == 0)
+                {
+                    retval += tokenImage[0];
+                    break;
+                }
+                retval += " " + tokenImage[tok.kind];
+                retval += " \"";
+                retval += Add_escapes(tok.image);
+                retval += " \"";
+                tok = tok.next;
+            }
+            retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+            retval += "." + eol;
+            if (expectedTokenSequences.Length == 1)
+            {
+                retval += "Was expecting:" + eol + "    ";
+            }
+            else
+            {
+                retval += "Was expecting one of:" + eol + "    ";
+            }
+            retval += expected.ToString();
+            return retval;
+        }
+
+        protected String eol = ConfigurationManager.AppSettings["line.separator"] ?? "\n";
+
+        static String Add_escapes(String str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+                    case (char)0:
+                        continue;
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            String s = "0000" + Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, s.Length));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+                }
+            }
+            return retval.ToString();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/QueryParsers/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/contrib/QueryParsers/Classic/QueryParser.cs b/src/contrib/QueryParsers/Classic/QueryParser.cs
new file mode 100644
index 0000000..ca76ac5
--- /dev/null
+++ b/src/contrib/QueryParsers/Classic/QueryParser.cs
@@ -0,0 +1,785 @@
+using Lucene.Net.Analysis;
+using Lucene.Net.Search;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Version = Lucene.Net.Util.Version;
+
+namespace Lucene.Net.QueryParsers.Classic
+{
+    public class QueryParser : QueryParserBase, IQueryParserConstants
+    {
+        public enum Operator
+        {
+            OR,
+            AND
+        }
+
+        public QueryParser(Version matchVersion, String f, Analyzer a)
+            : this(new FastCharStream(new StringReader("")))
+        {
+            Init(matchVersion, f, a);
+        }
+
+        public int Conjunction()
+        {
+            int ret = CONJ_NONE;
+            switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+            {
+                case QueryParserConstants.AND:
+                case QueryParserConstants.OR:
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.AND:
+                            jj_consume_token(QueryParserConstants.AND);
+                            ret = CONJ_AND;
+                            break;
+                        case QueryParserConstants.OR:
+                            jj_consume_token(QueryParserConstants.OR);
+                            ret = CONJ_OR;
+                            break;
+                        default:
+                            jj_la1[0] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    break;
+                default:
+                    jj_la1[1] = jj_gen;
+                    break;
+            }
+            { if (true) return ret; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public int Modifiers()
+        {
+            int ret = MOD_NONE;
+            switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+            {
+                case QueryParserConstants.NOT:
+                case QueryParserConstants.PLUS:
+                case QueryParserConstants.MINUS:
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.PLUS:
+                            jj_consume_token(QueryParserConstants.PLUS);
+                            ret = MOD_REQ;
+                            break;
+                        case QueryParserConstants.MINUS:
+                            jj_consume_token(QueryParserConstants.MINUS);
+                            ret = MOD_NOT;
+                            break;
+                        case QueryParserConstants.NOT:
+                            jj_consume_token(QueryParserConstants.NOT);
+                            ret = MOD_NOT;
+                            break;
+                        default:
+                            jj_la1[2] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    break;
+                default:
+                    jj_la1[3] = jj_gen;
+                    break;
+            }
+            { if (true) return ret; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public override Query TopLevelQuery(String field)
+        {
+            Query q;
+            q = Query(field);
+            jj_consume_token(0);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public Query Query(String field)
+        {
+            IList<BooleanClause> clauses = new List<BooleanClause>();
+            Query q, firstQuery = null;
+            int conj, mods;
+            mods = Modifiers();
+            q = Clause(field);
+            AddClause(clauses, CONJ_NONE, mods, q);
+            if (mods == MOD_NONE)
+                firstQuery = q;
+
+            while (true)
+            {
+                bool shouldBreakOuter = false;
+                switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                {
+                    case QueryParserConstants.AND:
+                    case QueryParserConstants.OR:
+                    case QueryParserConstants.NOT:
+                    case QueryParserConstants.PLUS:
+                    case QueryParserConstants.MINUS:
+                    case QueryParserConstants.BAREOPER:
+                    case QueryParserConstants.LPAREN:
+                    case QueryParserConstants.STAR:
+                    case QueryParserConstants.QUOTED:
+                    case QueryParserConstants.TERM:
+                    case QueryParserConstants.PREFIXTERM:
+                    case QueryParserConstants.WILDTERM:
+                    case QueryParserConstants.REGEXPTERM:
+                    case QueryParserConstants.RANGEIN_START:
+                    case QueryParserConstants.RANGEEX_START:
+                    case QueryParserConstants.NUMBER:
+                        ;
+                        break;
+                    default:
+                        jj_la1[4] = jj_gen;
+                        shouldBreakOuter = true;
+                        break;
+                }
+
+                if (shouldBreakOuter) break;
+                conj = Conjunction();
+                mods = Modifiers();
+                q = Clause(field);
+                AddClause(clauses, conj, mods, q);
+            }
+            if (clauses.Count == 1 && firstQuery != null)
+            { if (true) return firstQuery; }
+            else
+            {
+                { if (true) return GetBooleanQuery(clauses); }
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public Query Clause(String field)
+        {
+            Query q;
+            Token fieldToken = null, boost = null;
+            if (jj_2_1(2))
+            {
+                switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                {
+                    case QueryParserConstants.TERM:
+                        fieldToken = jj_consume_token(QueryParserConstants.TERM);
+                        jj_consume_token(QueryParserConstants.COLON);
+                        field = DiscardEscapeChar(fieldToken.image);
+                        break;
+                    case QueryParserConstants.STAR:
+                        jj_consume_token(QueryParserConstants.STAR);
+                        jj_consume_token(QueryParserConstants.COLON);
+                        field = "*";
+                        break;
+                    default:
+                        jj_la1[5] = jj_gen;
+                        jj_consume_token(-1);
+                        throw new ParseException();
+                }
+            }
+            else
+            {
+                ;
+            }
+            switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+            {
+                case QueryParserConstants.BAREOPER:
+                case QueryParserConstants.STAR:
+                case QueryParserConstants.QUOTED:
+                case QueryParserConstants.TERM:
+                case QueryParserConstants.PREFIXTERM:
+                case QueryParserConstants.WILDTERM:
+                case QueryParserConstants.REGEXPTERM:
+                case QueryParserConstants.RANGEIN_START:
+                case QueryParserConstants.RANGEEX_START:
+                case QueryParserConstants.NUMBER:
+                    q = Term(field);
+                    break;
+                case QueryParserConstants.LPAREN:
+                    jj_consume_token(QueryParserConstants.LPAREN);
+                    q = Query(field);
+                    jj_consume_token(QueryParserConstants.RPAREN);
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.CARAT:
+                            jj_consume_token(QueryParserConstants.CARAT);
+                            boost = jj_consume_token(QueryParserConstants.NUMBER);
+                            break;
+                        default:
+                            jj_la1[6] = jj_gen;
+                            break;
+                    }
+                    break;
+                default:
+                    jj_la1[7] = jj_gen;
+                    jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            { if (true) return HandleBoost(q, boost); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public Query Term(String field)
+        {
+            Token term, boost = null, fuzzySlop = null, goop1, goop2;
+            bool prefix = false;
+            bool wildcard = false;
+            bool fuzzy = false;
+            bool regexp = false;
+            bool startInc = false;
+            bool endInc = false;
+            Query q;
+            switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+            {
+                case QueryParserConstants.BAREOPER:
+                case QueryParserConstants.STAR:
+                case QueryParserConstants.TERM:
+                case QueryParserConstants.PREFIXTERM:
+                case QueryParserConstants.WILDTERM:
+                case QueryParserConstants.REGEXPTERM:
+                case QueryParserConstants.NUMBER:
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.TERM:
+                            term = jj_consume_token(QueryParserConstants.TERM);
+                            break;
+                        case QueryParserConstants.STAR:
+                            term = jj_consume_token(QueryParserConstants.STAR);
+                            wildcard = true;
+                            break;
+                        case QueryParserConstants.PREFIXTERM:
+                            term = jj_consume_token(QueryParserConstants.PREFIXTERM);
+                            prefix = true;
+                            break;
+                        case QueryParserConstants.WILDTERM:
+                            term = jj_consume_token(QueryParserConstants.WILDTERM);
+                            wildcard = true;
+                            break;
+                        case QueryParserConstants.REGEXPTERM:
+                            term = jj_consume_token(QueryParserConstants.REGEXPTERM);
+                            regexp = true;
+                            break;
+                        case QueryParserConstants.NUMBER:
+                            term = jj_consume_token(QueryParserConstants.NUMBER);
+                            break;
+                        case QueryParserConstants.BAREOPER:
+                            term = jj_consume_token(QueryParserConstants.BAREOPER);
+                            term.image = term.image.Substring(0, 1);
+                            break;
+                        default:
+                            jj_la1[8] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.FUZZY_SLOP:
+                            fuzzySlop = jj_consume_token(QueryParserConstants.FUZZY_SLOP);
+                            fuzzy = true;
+                            break;
+                        default:
+                            jj_la1[9] = jj_gen;
+                            break;
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.CARAT:
+                            jj_consume_token(QueryParserConstants.CARAT);
+                            boost = jj_consume_token(QueryParserConstants.NUMBER);
+                            switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                            {
+                                case QueryParserConstants.FUZZY_SLOP:
+                                    fuzzySlop = jj_consume_token(QueryParserConstants.FUZZY_SLOP);
+                                    fuzzy = true;
+                                    break;
+                                default:
+                                    jj_la1[10] = jj_gen;
+                                    break;
+                            }
+                            break;
+                        default:
+                            jj_la1[11] = jj_gen;
+                            break;
+                    }
+                    q = HandleBareTokenQuery(field, term, fuzzySlop, prefix, wildcard, fuzzy, regexp);
+                    break;
+                case QueryParserConstants.RANGEIN_START:
+                case QueryParserConstants.RANGEEX_START:
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.RANGEIN_START:
+                            jj_consume_token(QueryParserConstants.RANGEIN_START);
+                            startInc = true;
+                            break;
+                        case QueryParserConstants.RANGEEX_START:
+                            jj_consume_token(QueryParserConstants.RANGEEX_START);
+                            break;
+                        default:
+                            jj_la1[12] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.RANGE_GOOP:
+                            goop1 = jj_consume_token(QueryParserConstants.RANGE_GOOP);
+                            break;
+                        case QueryParserConstants.RANGE_QUOTED:
+                            goop1 = jj_consume_token(QueryParserConstants.RANGE_QUOTED);
+                            break;
+                        default:
+                            jj_la1[13] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.RANGE_TO:
+                            jj_consume_token(QueryParserConstants.RANGE_TO);
+                            break;
+                        default:
+                            jj_la1[14] = jj_gen;
+                            break;
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.RANGE_GOOP:
+                            goop2 = jj_consume_token(QueryParserConstants.RANGE_GOOP);
+                            break;
+                        case QueryParserConstants.RANGE_QUOTED:
+                            goop2 = jj_consume_token(QueryParserConstants.RANGE_QUOTED);
+                            break;
+                        default:
+                            jj_la1[15] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.RANGEIN_END:
+                            jj_consume_token(QueryParserConstants.RANGEIN_END);
+                            endInc = true;
+                            break;
+                        case QueryParserConstants.RANGEEX_END:
+                            jj_consume_token(QueryParserConstants.RANGEEX_END);
+                            break;
+                        default:
+                            jj_la1[16] = jj_gen;
+                            jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.CARAT:
+                            jj_consume_token(QueryParserConstants.CARAT);
+                            boost = jj_consume_token(QueryParserConstants.NUMBER);
+                            break;
+                        default:
+                            jj_la1[17] = jj_gen;
+                            break;
+                    }
+                    bool startOpen = false;
+                    bool endOpen = false;
+                    if (goop1.kind == QueryParserConstants.RANGE_QUOTED)
+                    {
+                        goop1.image = goop1.image.Substring(1, goop1.image.Length - 1);
+                    }
+                    else if ("*".Equals(goop1.image))
+                    {
+                        startOpen = true;
+                    }
+                    if (goop2.kind == QueryParserConstants.RANGE_QUOTED)
+                    {
+                        goop2.image = goop2.image.Substring(1, goop2.image.Length - 1);
+                    }
+                    else if ("*".Equals(goop2.image))
+                    {
+                        endOpen = true;
+                    }
+                    q = GetRangeQuery(field, startOpen ? null : DiscardEscapeChar(goop1.image), endOpen ? null : DiscardEscapeChar(goop2.image), startInc, endInc);
+                    break;
+                case QueryParserConstants.QUOTED:
+                    term = jj_consume_token(QueryParserConstants.QUOTED);
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.FUZZY_SLOP:
+                            fuzzySlop = jj_consume_token(QueryParserConstants.FUZZY_SLOP);
+                            break;
+                        default:
+                            jj_la1[18] = jj_gen;
+                            break;
+                    }
+                    switch ((_jj_ntk == -1) ? jj_ntk() : _jj_ntk)
+                    {
+                        case QueryParserConstants.CARAT:
+                            jj_consume_token(QueryParserConstants.CARAT);
+                            boost = jj_consume_token(QueryParserConstants.NUMBER);
+                            break;
+                        default:
+                            jj_la1[19] = jj_gen;
+                            break;
+                    }
+                    q = HandleQuotedTerm(field, term, fuzzySlop);
+                    break;
+                default:
+                    jj_la1[20] = jj_gen;
+                    jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            { if (true) return HandleBoost(q, boost); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        private bool jj_2_1(int xla)
+        {
+            jj_la = xla; jj_lastpos = jj_scanpos = token;
+            try { return !jj_3_1(); }
+            catch (LookaheadSuccess ls) { return true; }
+            finally { jj_save(0, xla); }
+        }
+
+        private bool jj_3R_2()
+        {
+            if (jj_scan_token(QueryParserConstants.TERM)) return true;
+            if (jj_scan_token(QueryParserConstants.COLON)) return true;
+            return false;
+        }
+
+        private bool jj_3_1()
+        {
+            Token xsp;
+            xsp = jj_scanpos;
+            if (jj_3R_2())
+            {
+                jj_scanpos = xsp;
+                if (jj_3R_3()) return true;
+            }
+            return false;
+        }
+
+        private bool jj_3R_3()
+        {
+            if (jj_scan_token(QueryParserConstants.STAR)) return true;
+            if (jj_scan_token(QueryParserConstants.COLON)) return true;
+            return false;
+        }
+
+        /** Generated Token Manager. */
+        public QueryParserTokenManager token_source;
+        /** Current token. */
+        public Token token;
+        /** Next token. */
+        public Token jj_nt;
+        private int _jj_ntk;
+        private Token jj_scanpos, jj_lastpos;
+        private int jj_la;
+        private int jj_gen;
+        private readonly int[] jj_la1 = new int[21];
+        static private int[] jj_la1_0;
+        static private int[] jj_la1_1;
+
+        static QueryParser()
+        {
+            jj_la1_init_0();
+            jj_la1_init_1();
+        }
+
+        private static void jj_la1_init_0()
+        {
+            jj_la1_0 = new int[] { 0x300, 0x300, 0x1c00, 0x1c00, 0xfda7f00, 0x120000, 0x40000, 0xfda6000, 0x9d22000, 0x200000, 0x200000, 0x40000, 0x6000000, unchecked((int)0x80000000), 0x10000000, unchecked((int)0x80000000), 0x60000000, 0x40000, 0x200000, 0x40000, 0xfda2000, };
+        }
+        private static void jj_la1_init_1()
+        {
+            jj_la1_1 = new int[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, };
+        }
+
+        private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
+        private bool jj_rescan = false;
+        private int jj_gc = 0;
+
+        /** Constructor with user supplied CharStream. */
+        protected QueryParser(ICharStream stream)
+        {
+            token_source = new QueryParserTokenManager(stream);
+            token = new Token();
+            _jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /** Reinitialise. */
+        public override void ReInit(ICharStream stream)
+        {
+            token_source.ReInit(stream);
+            token = new Token();
+            _jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /** Constructor with generated Token Manager. */
+        protected QueryParser(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            _jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /** Reinitialise. */
+        public void ReInit(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            _jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        private Token jj_consume_token(int kind)
+        {
+            Token oldToken;
+            if ((oldToken = token).next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            _jj_ntk = -1;
+            if (token.kind == kind)
+            {
+                jj_gen++;
+                if (++jj_gc > 100)
+                {
+                    jj_gc = 0;
+                    for (int i = 0; i < jj_2_rtns.Length; i++)
+                    {
+                        JJCalls c = jj_2_rtns[i];
+                        while (c != null)
+                        {
+                            if (c.gen < jj_gen) c.first = null;
+                            c = c.next;
+                        }
+                    }
+                }
+                return token;
+            }
+            token = oldToken;
+            jj_kind = kind;
+            throw GenerateParseException();
+        }
+
+        private sealed class LookaheadSuccess : Exception { }
+
+        private readonly LookaheadSuccess jj_ls = new LookaheadSuccess();
+
+        private bool jj_scan_token(int kind)
+        {
+            if (jj_scanpos == jj_lastpos)
+            {
+                jj_la--;
+                if (jj_scanpos.next == null)
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+                }
+                else
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next;
+                }
+            }
+            else
+            {
+                jj_scanpos = jj_scanpos.next;
+            }
+            if (jj_rescan)
+            {
+                int i = 0; Token tok = token;
+                while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+                if (tok != null) jj_add_error_token(kind, i);
+            }
+            if (jj_scanpos.kind != kind) return true;
+            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+            return false;
+        }
+
+        /** Get the next Token. */
+        public Token GetNextToken()
+        {
+            if (token.next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            _jj_ntk = -1;
+            jj_gen++;
+            return token;
+        }
+
+        /** Get the specific Token. */
+        public Token GetToken(int index)
+        {
+            Token t = token;
+            for (int i = 0; i < index; i++)
+            {
+                if (t.next != null) t = t.next;
+                else t = t.next = token_source.GetNextToken();
+            }
+            return t;
+        }
+
+        private int jj_ntk()
+        {
+            if ((jj_nt = token.next) == null)
+                return (_jj_ntk = (token.next = token_source.GetNextToken()).kind);
+            else
+                return (_jj_ntk = jj_nt.kind);
+        }
+
+        private IList<int[]> jj_expentries = new List<int[]>();
+        private int[] jj_expentry;
+        private int jj_kind = -1;
+        private int[] jj_lasttokens = new int[100];
+        private int jj_endpos;
+
+        private void jj_add_error_token(int kind, int pos)
+        {
+            if (pos >= 100) return;
+            if (pos == jj_endpos + 1)
+            {
+                jj_lasttokens[jj_endpos++] = kind;
+            }
+            else if (jj_endpos != 0)
+            {
+                jj_expentry = new int[jj_endpos];
+                for (int i = 0; i < jj_endpos; i++)
+                {
+                    jj_expentry[i] = jj_lasttokens[i];
+                }
+
+                foreach (int[] oldentry in jj_expentries)
+                {
+                    bool shouldContinueOuter = false;
+                    if (oldentry.Length == jj_expentry.Length)
+                    {
+                        for (int i = 0; i < jj_expentry.Length; i++)
+                        {
+                            if (oldentry[i] != jj_expentry[i])
+                            {
+                                shouldContinueOuter = true;
+                                break;
+                            }
+                        }
+
+                        if (shouldContinueOuter)
+                            continue;
+                        jj_expentries.Add(jj_expentry);
+                        break;
+                    }
+                }
+                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+            }
+        }
+
+        /** Generate ParseException. */
+        public ParseException GenerateParseException()
+        {
+            jj_expentries.Clear();
+            bool[] la1tokens = new bool[33];
+            if (jj_kind >= 0)
+            {
+                la1tokens[jj_kind] = true;
+                jj_kind = -1;
+            }
+            for (int i = 0; i < 21; i++)
+            {
+                if (jj_la1[i] == jj_gen)
+                {
+                    for (int j = 0; j < 32; j++)
+                    {
+                        if ((jj_la1_0[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[j] = true;
+                        }
+                        if ((jj_la1_1[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[32 + j] = true;
+                        }
+                    }
+                }
+            }
+            for (int i = 0; i < 33; i++)
+            {
+                if (la1tokens[i])
+                {
+                    jj_expentry = new int[1];
+                    jj_expentry[0] = i;
+                    jj_expentries.Add(jj_expentry);
+                }
+            }
+            jj_endpos = 0;
+            jj_rescan_token();
+            jj_add_error_token(0, 0);
+            int[][] exptokseq = new int[jj_expentries.Count][];
+            for (int i = 0; i < jj_expentries.Count; i++)
+            {
+                exptokseq[i] = jj_expentries[i];
+            }
+            return new ParseException(token, exptokseq, QueryParserConstants.tokenImage);
+        }
+
+        /** Enable tracing. */
+        public virtual void EnableTracing()
+        {
+        }
+
+        /** Disable tracing. */
+        public virtual void DisableTracing()
+        {
+        }
+
+        private void jj_rescan_token()
+        {
+            jj_rescan = true;
+            for (int i = 0; i < 1; i++)
+            {
+                try
+                {
+                    JJCalls p = jj_2_rtns[i];
+                    do
+                    {
+                        if (p.gen > jj_gen)
+                        {
+                            jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
+                            switch (i)
+                            {
+                                case 0: jj_3_1(); break;
+                            }
+                        }
+                        p = p.next;
+                    } while (p != null);
+                }
+                catch (LookaheadSuccess ls) { }
+            }
+            jj_rescan = false;
+        }
+
+        private void jj_save(int index, int xla)
+        {
+            JJCalls p = jj_2_rtns[index];
+            while (p.gen > jj_gen)
+            {
+                if (p.next == null) { p = p.next = new JJCalls(); break; }
+                p = p.next;
+            }
+            p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+        }
+
+        internal sealed class JJCalls
+        {
+            public int gen;
+            public Token first;
+            public int arg;
+            public JJCalls next;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0e6eb14a/src/contrib/QueryParsers/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/src/contrib/QueryParsers/Classic/QueryParserBase.cs b/src/contrib/QueryParsers/Classic/QueryParserBase.cs
new file mode 100644
index 0000000..5425f0c
--- /dev/null
+++ b/src/contrib/QueryParsers/Classic/QueryParserBase.cs
@@ -0,0 +1,1033 @@
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParsers.Flexible.Standard;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Operator = Lucene.Net.QueryParsers.Classic.QueryParser.Operator;
+using Version = Lucene.Net.Util.Version;
+
+namespace Lucene.Net.QueryParsers.Classic
+{
+    public abstract class QueryParserBase : ICommonQueryParserConfiguration
+    {
+        /** Do not catch this exception in your code, it means you are using methods that you should no longer use. */
+        public class MethodRemovedUseAnother : Exception { }
+
+        internal const int CONJ_NONE = 0;
+        internal const int CONJ_AND = 1;
+        internal const int CONJ_OR = 2;
+
+        internal const int MOD_NONE = 0;
+        internal const int MOD_NOT = 10;
+        internal const int MOD_REQ = 11;
+
+        // make it possible to call setDefaultOperator() without accessing
+        // the nested class:
+        /** Alternative form of QueryParser.Operator.AND */
+        public static readonly Operator AND_OPERATOR = Operator.AND;
+        /** Alternative form of QueryParser.Operator.OR */
+        public static readonly Operator OR_OPERATOR = Operator.OR;
+
+        /** The actual operator that parser uses to combine query terms */
+        internal Operator operator_renamed = OR_OPERATOR;
+
+        internal bool lowercaseExpandedTerms = true;
+        internal MultiTermQuery.RewriteMethod multiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
+        internal bool allowLeadingWildcard = false;
+        internal bool enablePositionIncrements = true;
+
+        internal Analyzer analyzer;
+        internal String field;
+        internal int phraseSlop = 0;
+        internal float fuzzyMinSim = FuzzyQuery.defaultMinSimilarity;
+        internal int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
+        internal CultureInfo locale = CultureInfo.InvariantCulture;
+        internal TimeZone timeZone = TimeZone.CurrentTimeZone;
+
+        // the default date resolution
+        internal DateTools.Resolution dateResolution = null;
+        // maps field names to date resolutions
+        internal IDictionary<String, DateTools.Resolution> fieldToDateResolution = null;
+
+        //Whether or not to analyze range terms when constructing RangeQuerys
+        // (For example, analyzing terms into collation keys for locale-sensitive RangeQuery)
+        internal bool analyzeRangeTerms = false;
+
+        internal bool autoGeneratePhraseQueries;
+
+        // So the generated QueryParser(CharStream) won't error out
+        protected QueryParserBase()
+        {
+        }
+
+        public void Init(Version matchVersion, String f, Analyzer a)
+        {
+            analyzer = a;
+            field = f;
+            if (matchVersion.OnOrAfter(Version.LUCENE_31))
+            {
+                AutoGeneratePhraseQueries = false;
+            }
+            else
+            {
+                AutoGeneratePhraseQueries = true;
+            }
+        }
+
+        // the generated parser will create these in QueryParser
+        public abstract void ReInit(ICharStream stream);
+        public abstract Query TopLevelQuery(String field);
+
+        public Query Parse(String query)
+        {
+            ReInit(new FastCharStream(new StringReader(query)));
+            try
+            {
+                // TopLevelQuery is a Query followed by the end-of-input (EOF)
+                Query res = TopLevelQuery(field);
+                return res != null ? res : NewBooleanQuery(false);
+            }
+            catch (ParseException tme)
+            {
+                // rethrow to include the original query:
+                ParseException e = new ParseException("Cannot parse '" + query + "': " + tme.Message, tme);
+                throw e;
+            }
+            catch (TokenMgrError tme)
+            {
+                ParseException e = new ParseException("Cannot parse '" + query + "': " + tme.Message, tme);
+                throw e;
+            }
+            catch (BooleanQuery.TooManyClauses tmc)
+            {
+                ParseException e = new ParseException("Cannot parse '" + query + "': too many boolean clauses", tmc);
+                throw e;
+            }
+        }
+
+        public Analyzer Analyzer
+        {
+            get { return analyzer; }
+        }
+
+        public string Field
+        {
+            get { return field; }
+        }
+
+        public bool AutoGeneratePhraseQueries
+        {
+            get { return autoGeneratePhraseQueries; }
+            set { autoGeneratePhraseQueries = value; }
+        }
+
+        public float FuzzyMinSim
+        {
+            get { return fuzzyMinSim; }
+            set { fuzzyMinSim = value; }
+        }
+
+        public int FuzzyPrefixLength
+        {
+            get { return fuzzyPrefixLength; }
+            set { fuzzyPrefixLength = value; }
+        }
+
+        public int PhraseSlop
+        {
+            get { return phraseSlop; }
+            set { phraseSlop = value; }
+        }
+
+        public bool AllowLeadingWildcard
+        {
+            get { return allowLeadingWildcard; }
+            set { allowLeadingWildcard = value; }
+        }
+
+        public bool EnablePositionIncrements
+        {
+            get { return enablePositionIncrements; }
+            set { enablePositionIncrements = value; }
+        }
+
+        public Operator DefaultOperator
+        {
+            get { return operator_renamed; }
+            set { operator_renamed = value; }
+        }
+
+        public bool LowercaseExpandedTerms
+        {
+            get { return lowercaseExpandedTerms; }
+            set { lowercaseExpandedTerms = value; }
+        }
+
+        public MultiTermQuery.RewriteMethod MultiTermRewriteMethod
+        {
+            get { return multiTermRewriteMethod; }
+            set { multiTermRewriteMethod = value; }
+        }
+
+        public CultureInfo Locale
+        {
+            get { return locale; }
+            set { locale = value; }
+        }
+
+        public TimeZone TimeZone
+        {
+            get { return timeZone; }
+            set { timeZone = value; }
+        }
+
+        public DateTools.Resolution DateResolution
+        {
+            get { return dateResolution; }
+            set { dateResolution = value; }
+        }
+
+        public void SetDateResolution(string fieldName, DateTools.Resolution dateResolution)
+        {
+            if (fieldName == null)
+            {
+                throw new ArgumentException("Field cannot be null.");
+            }
+
+            if (fieldToDateResolution == null)
+            {
+                // lazily initialize HashMap
+                fieldToDateResolution = new HashMap<String, DateTools.Resolution>();
+            }
+
+            fieldToDateResolution[fieldName] = dateResolution;
+        }
+
+        public DateTools.Resolution GetDateResolution(string fieldName)
+        {
+            if (fieldName == null)
+            {
+                throw new ArgumentException("Field cannot be null.");
+            }
+
+            if (fieldToDateResolution == null)
+            {
+                // no field specific date resolutions set; return default date resolution instead
+                return this.dateResolution;
+            }
+
+            DateTools.Resolution resolution = fieldToDateResolution[fieldName];
+            if (resolution == null)
+            {
+                // no date resolutions set for the given field; return default date resolution instead
+                resolution = this.dateResolution;
+            }
+
+            return resolution;
+        }
+
+        public bool AnalyzeRangeTerms
+        {
+            get { return analyzeRangeTerms; }
+            set { analyzeRangeTerms = value; }
+        }
+
+        protected void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
+        {
+            bool required, prohibited;
+
+            // If this term is introduced by AND, make the preceding term required,
+            // unless it's already prohibited
+            if (clauses.Count > 0 && conj == CONJ_AND)
+            {
+                BooleanClause c = clauses[clauses.Count - 1];
+                if (!c.IsProhibited)
+                    c.Occur = Occur.MUST;
+            }
+
+            if (clauses.Count > 0 && operator_renamed == AND_OPERATOR && conj == CONJ_OR)
+            {
+                // If this term is introduced by OR, make the preceding term optional,
+                // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b)
+                // notice if the input is a OR b, first term is parsed as required; without
+                // this modification a OR b would parsed as +a OR b
+                BooleanClause c = clauses[clauses.Count - 1];
+                if (!c.IsProhibited)
+                    c.Occur = Occur.SHOULD;
+            }
+
+            // We might have been passed a null query; the term might have been
+            // filtered away by the analyzer.
+            if (q == null)
+                return;
+
+            if (operator_renamed == OR_OPERATOR)
+            {
+                // We set REQUIRED if we're introduced by AND or +; PROHIBITED if
+                // introduced by NOT or -; make sure not to set both.
+                prohibited = (mods == MOD_NOT);
+                required = (mods == MOD_REQ);
+                if (conj == CONJ_AND && !prohibited)
+                {
+                    required = true;
+                }
+            }
+            else
+            {
+                // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED
+                // if not PROHIBITED and not introduced by OR
+                prohibited = (mods == MOD_NOT);
+                required = (!prohibited && conj != CONJ_OR);
+            }
+            if (required && !prohibited)
+                clauses.Add(NewBooleanClause(q, Occur.MUST));
+            else if (!required && !prohibited)
+                clauses.Add(NewBooleanClause(q, Occur.SHOULD));
+            else if (!required && prohibited)
+                clauses.Add(NewBooleanClause(q, Occur.MUST_NOT));
+            else
+                throw new SystemException("Clause cannot be both required and prohibited");
+        }
+
+        protected Query GetFieldQuery(String field, String queryText, bool quoted)
+        {
+            return NewFieldQuery(analyzer, field, queryText, quoted);
+        }
+
+        protected Query NewFieldQuery(Analyzer analyzer, String field, String queryText, bool quoted)
+        {
+            // Use the analyzer to get all the tokens, and then build a TermQuery,
+            // PhraseQuery, or nothing based on the term count
+
+            TokenStream source;
+            try
+            {
+                source = analyzer.TokenStream(field, new StringReader(queryText));
+                source.Reset();
+            }
+            catch (IOException e)
+            {
+                ParseException p = new ParseException("Unable to initialize TokenStream to analyze query text", e);
+                throw p;
+            }
+            CachingTokenFilter buffer = new CachingTokenFilter(source);
+            ITermToBytesRefAttribute termAtt = null;
+            IPositionIncrementAttribute posIncrAtt = null;
+            int numTokens = 0;
+
+            buffer.Reset();
+
+            if (buffer.HasAttribute<ITermToBytesRefAttribute>())
+            {
+                termAtt = buffer.GetAttribute<ITermToBytesRefAttribute>();
+            }
+            if (buffer.HasAttribute<IPositionIncrementAttribute>())
+            {
+                posIncrAtt = buffer.GetAttribute<IPositionIncrementAttribute>();
+            }
+
+            int positionCount = 0;
+            bool severalTokensAtSamePosition = false;
+
+            bool hasMoreTokens = false;
+            if (termAtt != null)
+            {
+                try
+                {
+                    hasMoreTokens = buffer.IncrementToken();
+                    while (hasMoreTokens)
+                    {
+                        numTokens++;
+                        int positionIncrement = (posIncrAtt != null) ? posIncrAtt.PositionIncrement : 1;
+                        if (positionIncrement != 0)
+                        {
+                            positionCount += positionIncrement;
+                        }
+                        else
+                        {
+                            severalTokensAtSamePosition = true;
+                        }
+                        hasMoreTokens = buffer.IncrementToken();
+                    }
+                }
+                catch (IOException e)
+                {
+                    // ignore
+                }
+            }
+            try
+            {
+                // rewind the buffer stream
+                buffer.Reset();
+
+                // close original stream - all tokens buffered
+                source.Dispose();
+            }
+            catch (IOException e)
+            {
+                ParseException p = new ParseException("Cannot close TokenStream analyzing query text", e);
+                throw p;
+            }
+
+            BytesRef bytes = termAtt == null ? null : termAtt.BytesRef;
+
+            if (numTokens == 0)
+                return null;
+            else if (numTokens == 1)
+            {
+                try
+                {
+                    bool hasNext = buffer.IncrementToken();
+                    //assert hasNext == true;
+                    termAtt.FillBytesRef();
+                }
+                catch (IOException e)
+                {
+                    // safe to ignore, because we know the number of tokens
+                }
+                return NewTermQuery(new Term(field, BytesRef.DeepCopyOf(bytes)));
+            }
+            else
+            {
+                if (severalTokensAtSamePosition || (!quoted && !autoGeneratePhraseQueries))
+                {
+                    if (positionCount == 1 || (!quoted && !autoGeneratePhraseQueries))
+                    {
+                        // no phrase query:
+
+                        if (positionCount == 1)
+                        {
+                            // simple case: only one position, with synonyms
+                            BooleanQuery q = NewBooleanQuery(true);
+                            for (int i = 0; i < numTokens; i++)
+                            {
+                                try
+                                {
+                                    bool hasNext = buffer.IncrementToken();
+                                    //assert hasNext == true;
+                                    termAtt.FillBytesRef();
+                                }
+                                catch (IOException e)
+                                {
+                                    // safe to ignore, because we know the number of tokens
+                                }
+                                Query currentQuery = NewTermQuery(
+                                    new Term(field, BytesRef.DeepCopyOf(bytes)));
+                                q.Add(currentQuery, Occur.SHOULD);
+                            }
+                            return q;
+                        }
+                        else
+                        {
+                            // multiple positions
+                            BooleanQuery q = NewBooleanQuery(false);
+                            Occur occur = operator_renamed == Operator.AND ? Occur.MUST : Occur.SHOULD;
+                            Query currentQuery = null;
+                            for (int i = 0; i < numTokens; i++)
+                            {
+                                try
+                                {
+                                    bool hasNext = buffer.IncrementToken();
+                                    //assert hasNext == true;
+                                    termAtt.FillBytesRef();
+                                }
+                                catch (IOException e)
+                                {
+                                    // safe to ignore, because we know the number of tokens
+                                }
+                                if (posIncrAtt != null && posIncrAtt.PositionIncrement == 0)
+                                {
+                                    if (!(currentQuery is BooleanQuery))
+                                    {
+                                        Query t = currentQuery;
+                                        currentQuery = NewBooleanQuery(true);
+                                        ((BooleanQuery)currentQuery).Add(t, Occur.SHOULD);
+                                    }
+                                    ((BooleanQuery)currentQuery).Add(NewTermQuery(new Term(field, BytesRef.DeepCopyOf(bytes))), Occur.SHOULD);
+                                }
+                                else
+                                {
+                                    if (currentQuery != null)
+                                    {
+                                        q.Add(currentQuery, occur);
+                                    }
+                                    currentQuery = NewTermQuery(new Term(field, BytesRef.DeepCopyOf(bytes)));
+                                }
+                            }
+                            q.Add(currentQuery, occur);
+                            return q;
+                        }
+                    }
+                    else
+                    {
+                        // phrase query:
+                        MultiPhraseQuery mpq = NewMultiPhraseQuery();
+                        mpq.Slop = phraseSlop;
+                        List<Term> multiTerms = new List<Term>();
+                        int position = -1;
+                        for (int i = 0; i < numTokens; i++)
+                        {
+                            int positionIncrement = 1;
+                            try
+                            {
+                                bool hasNext = buffer.IncrementToken();
+                                //assert hasNext == true;
+                                termAtt.FillBytesRef();
+                                if (posIncrAtt != null)
+                                {
+                                    positionIncrement = posIncrAtt.PositionIncrement;
+                                }
+                            }
+                            catch (IOException e)
+                            {
+                                // safe to ignore, because we know the number of tokens
+                            }
+
+                            if (positionIncrement > 0 && multiTerms.Count > 0)
+                            {
+                                if (enablePositionIncrements)
+                                {
+                                    mpq.Add(multiTerms.ToArray(), position);
+                                }
+                                else
+                                {
+                                    mpq.Add(multiTerms.ToArray());
+                                }
+                                multiTerms.Clear();
+                            }
+                            position += positionIncrement;
+                            multiTerms.Add(new Term(field, BytesRef.DeepCopyOf(bytes)));
+                        }
+                        if (enablePositionIncrements)
+                        {
+                            mpq.Add(multiTerms.ToArray(), position);
+                        }
+                        else
+                        {
+                            mpq.Add(multiTerms.ToArray());
+                        }
+                        return mpq;
+                    }
+                }
+                else
+                {
+                    PhraseQuery pq = NewPhraseQuery();
+                    pq.Slop = phraseSlop;
+                    int position = -1;
+
+                    for (int i = 0; i < numTokens; i++)
+                    {
+                        int positionIncrement = 1;
+
+                        try
+                        {
+                            bool hasNext = buffer.IncrementToken();
+                            //assert hasNext == true;
+                            termAtt.FillBytesRef();
+                            if (posIncrAtt != null)
+                            {
+                                positionIncrement = posIncrAtt.PositionIncrement;
+                            }
+                        }
+                        catch (IOException e)
+                        {
+                            // safe to ignore, because we know the number of tokens
+                        }
+
+                        if (enablePositionIncrements)
+                        {
+                            position += positionIncrement;
+                            pq.Add(new Term(field, BytesRef.DeepCopyOf(bytes)), position);
+                        }
+                        else
+                        {
+                            pq.Add(new Term(field, BytesRef.DeepCopyOf(bytes)));
+                        }
+                    }
+                    return pq;
+                }
+            }
+        }
+
+        protected Query GetFieldQuery(String field, String queryText, int slop)
+        {
+            Query query = GetFieldQuery(field, queryText, true);
+
+            if (query is PhraseQuery)
+            {
+                ((PhraseQuery)query).Slop = slop;
+            }
+            if (query is MultiPhraseQuery)
+            {
+                ((MultiPhraseQuery)query).Slop = slop;
+            }
+
+            return query;
+        }
+
+        protected Query GetRangeQuery(String field,
+                                String part1,
+                                String part2,
+                                bool startInclusive,
+                                bool endInclusive)
+        {
+            if (lowercaseExpandedTerms)
+            {
+                part1 = part1 == null ? null : part1.ToLower(locale);
+                part2 = part2 == null ? null : part2.ToLower(locale);
+            }
+
+
+            //DateTimeFormatInfo df = DateTimeFormatInfo.GetInstance(locale);
+            //df.setLenient(true);
+            DateTools.Resolution resolution = GetDateResolution(field);
+
+            try
+            {
+                part1 = DateTools.DateToString(DateTime.Parse(part1, locale), resolution);
+            }
+            catch (Exception e) { }
+
+            try
+            {
+                DateTime d2 = DateTime.Parse(part2, locale);
+                if (endInclusive)
+                {
+                    // The user can only specify the date, not the time, so make sure
+                    // the time is set to the latest possible time of that date to really
+                    // include all documents:
+                    d2 = d2.AddHours(23);
+                    d2 = d2.AddMinutes(59);
+                    d2 = d2.AddSeconds(59);
+                    d2 = d2.AddMilliseconds(999);
+                    // .NET Port TODO: is this right?
+                }
+                part2 = DateTools.DateToString(d2, resolution);
+            }
+            catch (Exception e) { }
+
+            return NewRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        protected BooleanQuery NewBooleanQuery(bool disableCoord)
+        {
+            return new BooleanQuery(disableCoord);
+        }
+
+        protected BooleanClause NewBooleanClause(Query q, Occur occur)
+        {
+            return new BooleanClause(q, occur);
+        }
+
+        protected Query NewTermQuery(Term term)
+        {
+            return new TermQuery(term);
+        }
+
+        protected PhraseQuery NewPhraseQuery()
+        {
+            return new PhraseQuery();
+        }
+
+        protected MultiPhraseQuery NewMultiPhraseQuery()
+        {
+            return new MultiPhraseQuery();
+        }
+
+        protected Query NewPrefixQuery(Term prefix)
+        {
+            PrefixQuery query = new PrefixQuery(prefix);
+            query.SetRewriteMethod(multiTermRewriteMethod);
+            return query;
+        }
+
+        protected Query NewRegexpQuery(Term regexp)
+        {
+            RegexpQuery query = new RegexpQuery(regexp);
+            query.SetRewriteMethod(multiTermRewriteMethod);
+            return query;
+        }
+
+        protected Query NewFuzzyQuery(Term term, float minimumSimilarity, int prefixLength)
+        {
+            // FuzzyQuery doesn't yet allow constant score rewrite
+            String text = term.Text;
+            int numEdits = FuzzyQuery.FloatToEdits(minimumSimilarity,
+                text.Length);
+            return new FuzzyQuery(term, numEdits, prefixLength);
+        }
+
+        // TODO: Should this be protected instead?
+        private BytesRef AnalyzeMultitermTerm(String field, String part)
+        {
+            return AnalyzeMultitermTerm(field, part, analyzer);
+        }
+
+        protected BytesRef AnalyzeMultitermTerm(String field, String part, Analyzer analyzerIn)
+        {
+            TokenStream source;
+
+            if (analyzerIn == null) analyzerIn = analyzer;
+
+            try
+            {
+                source = analyzerIn.TokenStream(field, new StringReader(part));
+                source.Reset();
+            }
+            catch (IOException e)
+            {
+                throw new SystemException("Unable to initialize TokenStream to analyze multiTerm term: " + part, e);
+            }
+
+            ITermToBytesRefAttribute termAtt = source.GetAttribute<ITermToBytesRefAttribute>();
+            BytesRef bytes = termAtt.BytesRef;
+
+            try
+            {
+                if (!source.IncrementToken())
+                    throw new ArgumentException("analyzer returned no terms for multiTerm term: " + part);
+                termAtt.FillBytesRef();
+                if (source.IncrementToken())
+                    throw new ArgumentException("analyzer returned too many terms for multiTerm term: " + part);
+            }
+            catch (IOException e)
+            {
+                throw new SystemException("error analyzing range part: " + part, e);
+            }
+
+            try
+            {
+                source.End();
+                source.Dispose();
+            }
+            catch (IOException e)
+            {
+                throw new SystemException("Unable to end & close TokenStream after analyzing multiTerm term: " + part, e);
+            }
+
+            return BytesRef.DeepCopyOf(bytes);
+        }
+
+        protected Query NewRangeQuery(String field, String part1, String part2, bool startInclusive, bool endInclusive)
+        {
+            BytesRef start;
+            BytesRef end;
+
+            if (part1 == null)
+            {
+                start = null;
+            }
+            else
+            {
+                start = analyzeRangeTerms ? AnalyzeMultitermTerm(field, part1) : new BytesRef(part1);
+            }
+
+            if (part2 == null)
+            {
+                end = null;
+            }
+            else
+            {
+                end = analyzeRangeTerms ? AnalyzeMultitermTerm(field, part2) : new BytesRef(part2);
+            }
+
+            TermRangeQuery query = new TermRangeQuery(field, start, end, startInclusive, endInclusive);
+
+            query.SetRewriteMethod(multiTermRewriteMethod);
+            return query;
+        }
+
+        protected Query NewMatchAllDocsQuery()
+        {
+            return new MatchAllDocsQuery();
+        }
+
+        protected Query NewWildcardQuery(Term t)
+        {
+            WildcardQuery query = new WildcardQuery(t);
+            query.SetRewriteMethod(multiTermRewriteMethod);
+            return query;
+        }
+
+        protected Query GetBooleanQuery(IList<BooleanClause> clauses)
+        {
+            return GetBooleanQuery(clauses, false);
+        }
+
+        protected Query GetBooleanQuery(IList<BooleanClause> clauses, bool disableCoord)
+        {
+            if (clauses.Count == 0)
+            {
+                return null; // all clause words were filtered away by the analyzer.
+            }
+            BooleanQuery query = NewBooleanQuery(disableCoord);
+            foreach (BooleanClause clause in clauses)
+            {
+                query.Add(clause);
+            }
+            return query;
+        }
+
+        protected Query GetWildcardQuery(String field, String termStr)
+        {
+            if ("*".Equals(field))
+            {
+                if ("*".Equals(termStr)) return NewMatchAllDocsQuery();
+            }
+            if (!allowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
+                throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery");
+            if (lowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewWildcardQuery(t);
+        }
+
+        protected Query GetRegexpQuery(String field, String termStr)
+        {
+            if (lowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewRegexpQuery(t);
+        }
+
+        protected Query GetPrefixQuery(String field, String termStr)
+        {
+            if (!allowLeadingWildcard && termStr.StartsWith("*"))
+                throw new ParseException("'*' not allowed as first character in PrefixQuery");
+            if (lowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewPrefixQuery(t);
+        }
+
+        protected Query GetFuzzyQuery(String field, String termStr, float minSimilarity)
+        {
+            if (lowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewFuzzyQuery(t, minSimilarity, fuzzyPrefixLength);
+        }
+
+        internal Query HandleBareTokenQuery(String qfield, Token term, Token fuzzySlop, bool prefix, bool wildcard, bool fuzzy, bool regexp)
+        {
+            Query q;
+
+            String termImage = DiscardEscapeChar(term.image);
+            if (wildcard)
+            {
+                q = GetWildcardQuery(qfield, term.image);
+            }
+            else if (prefix)
+            {
+                q = GetPrefixQuery(qfield,
+                    DiscardEscapeChar(term.image.Substring
+                        (0, term.image.Length - 1)));
+            }
+            else if (regexp)
+            {
+                q = GetRegexpQuery(qfield, term.image.Substring(1, term.image.Length - 1));
+            }
+            else if (fuzzy)
+            {
+                q = HandleBareFuzzy(qfield, fuzzySlop, termImage);
+            }
+            else
+            {
+                q = GetFieldQuery(qfield, termImage, false);
+            }
+            return q;
+        }
+
+        internal Query HandleBareFuzzy(String qfield, Token fuzzySlop, String termImage)
+        {
+            Query q;
+            float fms = fuzzyMinSim;
+            try
+            {
+                fms = float.Parse(fuzzySlop.image.Substring(1));
+            }
+            catch (Exception) { }
+            if (fms < 0.0f)
+            {
+                throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
+            }
+            else if (fms >= 1.0f && fms != (int)fms)
+            {
+                throw new ParseException("Fractional edit distances are not allowed!");
+            }
+            q = GetFuzzyQuery(qfield, termImage, fms);
+            return q;
+        }
+
+        internal Query HandleQuotedTerm(String qfield, Token term, Token fuzzySlop)
+        {
+            int s = phraseSlop;  // default
+            if (fuzzySlop != null)
+            {
+                try
+                {
+                    s = (int)float.Parse(fuzzySlop.image.Substring(1));
+                }
+                catch (Exception ignored) { }
+            }
+            return GetFieldQuery(qfield, DiscardEscapeChar(term.image.Substring(1, term.image.Length - 1)), s);
+        }
+
+        internal Query HandleBoost(Query q, Token boost)
+        {
+            if (boost != null)
+            {
+                float f = (float)1.0;
+                try
+                {
+                    f = float.Parse(boost.image);
+                }
+                catch (Exception)
+                {
+                    /* Should this be handled somehow? (defaults to "no boost", if
+                     * boost number is invalid)
+                     */
+                }
+
+                // avoid boosting null queries, such as those caused by stop words
+                if (q != null)
+                {
+                    q.Boost = f;
+                }
+            }
+            return q;
+        }
+
+        internal String DiscardEscapeChar(String input)
+        {
+            // Create char array to hold unescaped char sequence
+            char[] output = new char[input.Length];
+
+            // The length of the output can be less than the input
+            // due to discarded escape chars. This variable holds
+            // the actual length of the output
+            int length = 0;
+
+            // We remember whether the last processed character was
+            // an escape character
+            bool lastCharWasEscapeChar = false;
+
+            // The multiplier the current unicode digit must be multiplied with.
+            // E. g. the first digit must be multiplied with 16^3, the second with 16^2...
+            int codePointMultiplier = 0;
+
+            // Used to calculate the codepoint of the escaped unicode character
+            int codePoint = 0;
+
+            for (int i = 0; i < input.Length; i++)
+            {
+                char curChar = input[i];
+                if (codePointMultiplier > 0)
+                {
+                    codePoint += HexToInt(curChar) * codePointMultiplier;
+                    codePointMultiplier = Number.URShift(codePointMultiplier, 4);
+                    if (codePointMultiplier == 0)
+                    {
+                        output[length++] = (char)codePoint;
+                        codePoint = 0;
+                    }
+                }
+                else if (lastCharWasEscapeChar)
+                {
+                    if (curChar == 'u')
+                    {
+                        // found an escaped unicode character
+                        codePointMultiplier = 16 * 16 * 16;
+                    }
+                    else
+                    {
+                        // this character was escaped
+                        output[length] = curChar;
+                        length++;
+                    }
+                    lastCharWasEscapeChar = false;
+                }
+                else
+                {
+                    if (curChar == '\\')
+                    {
+                        lastCharWasEscapeChar = true;
+                    }
+                    else
+                    {
+                        output[length] = curChar;
+                        length++;
+                    }
+                }
+            }
+
+            if (codePointMultiplier > 0)
+            {
+                throw new ParseException("Truncated unicode escape sequence.");
+            }
+
+            if (lastCharWasEscapeChar)
+            {
+                throw new ParseException("Term can not end with escape character.");
+            }
+
+            return new String(output, 0, length);
+        }
+
+        internal static int HexToInt(char c)
+        {
+            if ('0' <= c && c <= '9')
+            {
+                return c - '0';
+            }
+            else if ('a' <= c && c <= 'f')
+            {
+                return c - 'a' + 10;
+            }
+            else if ('A' <= c && c <= 'F')
+            {
+                return c - 'A' + 10;
+            }
+            else
+            {
+                throw new ParseException("Non-hex character in Unicode escape sequence: " + c);
+            }
+        }
+
+        public static String Escape(String s)
+        {
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < s.Length; i++)
+            {
+                char c = s[i];
+                // These characters are part of the query syntax and must be escaped
+                if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':'
+                  || c == '^' || c == '[' || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~'
+                  || c == '*' || c == '?' || c == '|' || c == '&' || c == '/')
+                {
+                    sb.Append('\\');
+                }
+                sb.Append(c);
+            }
+            return sb.ToString();
+        }
+    }
+}


Mime
View raw message