lucene-java-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From busc...@apache.org
Subject svn commit: r800191 [10/12] - in /lucene/java/trunk: ./ contrib/ contrib/queryparser/ contrib/queryparser/src/ contrib/queryparser/src/java/ contrib/queryparser/src/java/org/ contrib/queryparser/src/java/org/apache/ contrib/queryparser/src/java/org/apa...
Date Mon, 03 Aug 2009 03:38:50 GMT
Added: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiAnalyzerWrapper.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiAnalyzerWrapper.java?rev=800191&view=auto
==============================================================================
--- lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiAnalyzerWrapper.java (added)
+++ lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiAnalyzerWrapper.java Mon Aug  3 03:38:44 2009
@@ -0,0 +1,320 @@
+package org.apache.lucene.queryParser.original;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.Reader;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.LowerCaseFilter;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenFilter;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.standard.StandardTokenizer;
+import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
+import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.original.QueryParserWrapper;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * This test case is a copy of the core Lucene query parser test, it was adapted
+ * to use new QueryParserWrapper instead of the old query parser.
+ * 
+ * Test QueryParser's ability to deal with Analyzers that return more than one
+ * token per position or that return tokens with a position increment > 1.
+ * 
+ */
+public class TestMultiAnalyzerWrapper extends LuceneTestCase {
+
+  private static int multiToken = 0;
+
+  public void testMultiAnalyzer() throws ParseException {
+
+    QueryParserWrapper qp = new QueryParserWrapper("", new MultiAnalyzer());
+
+    // trivial, no multiple tokens:
+    assertEquals("foo", qp.parse("foo").toString());
+    assertEquals("foo", qp.parse("\"foo\"").toString());
+    assertEquals("foo foobar", qp.parse("foo foobar").toString());
+    assertEquals("\"foo foobar\"", qp.parse("\"foo foobar\"").toString());
+    assertEquals("\"foo foobar blah\"", qp.parse("\"foo foobar blah\"")
+        .toString());
+
+    // two tokens at the same position:
+    assertEquals("(multi multi2) foo", qp.parse("multi foo").toString());
+    assertEquals("foo (multi multi2)", qp.parse("foo multi").toString());
+    assertEquals("(multi multi2) (multi multi2)", qp.parse("multi multi")
+        .toString());
+    assertEquals("+(foo (multi multi2)) +(bar (multi multi2))", qp.parse(
+        "+(foo multi) +(bar multi)").toString());
+    assertEquals("+(foo (multi multi2)) field:\"bar (multi multi2)\"", qp
+        .parse("+(foo multi) field:\"bar multi\"").toString());
+
+    // phrases:
+    assertEquals("\"(multi multi2) foo\"", qp.parse("\"multi foo\"").toString());
+    assertEquals("\"foo (multi multi2)\"", qp.parse("\"foo multi\"").toString());
+    assertEquals("\"foo (multi multi2) foobar (multi multi2)\"", qp.parse(
+        "\"foo multi foobar multi\"").toString());
+
+    // fields:
+    assertEquals("(field:multi field:multi2) field:foo", qp.parse(
+        "field:multi field:foo").toString());
+    assertEquals("field:\"(multi multi2) foo\"", qp
+        .parse("field:\"multi foo\"").toString());
+
+    // three tokens at one position:
+    assertEquals("triplemulti multi3 multi2", qp.parse("triplemulti")
+        .toString());
+    assertEquals("foo (triplemulti multi3 multi2) foobar", qp.parse(
+        "foo triplemulti foobar").toString());
+
+    // phrase with non-default slop:
+    assertEquals("\"(multi multi2) foo\"~10", qp.parse("\"multi foo\"~10")
+        .toString());
+
+    // phrase with non-default boost:
+    assertEquals("\"(multi multi2) foo\"^2.0", qp.parse("\"multi foo\"^2")
+        .toString());
+
+    // phrase after changing default slop
+    qp.setPhraseSlop(99);
+    assertEquals("\"(multi multi2) foo\"~99 bar", qp.parse("\"multi foo\" bar")
+        .toString());
+    assertEquals("\"(multi multi2) foo\"~99 \"foo bar\"~2", qp.parse(
+        "\"multi foo\" \"foo bar\"~2").toString());
+    qp.setPhraseSlop(0);
+
+    // non-default operator:
+    qp.setDefaultOperator(QueryParserWrapper.AND_OPERATOR);
+    assertEquals("+(multi multi2) +foo", qp.parse("multi foo").toString());
+
+  }
+
+  // public void testMultiAnalyzerWithSubclassOfQueryParser() throws
+  // ParseException {
+  // this test doesn't make sense when using the new QueryParser API
+  // DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer());
+  // qp.setPhraseSlop(99); // modified default slop
+  //
+  // // direct call to (super's) getFieldQuery to demonstrate differnce
+  // // between phrase and multiphrase with modified default slop
+  // assertEquals("\"foo bar\"~99",
+  // qp.getSuperFieldQuery("","foo bar").toString());
+  // assertEquals("\"(multi multi2) bar\"~99",
+  // qp.getSuperFieldQuery("","multi bar").toString());
+  //
+  //    
+  // // ask sublcass to parse phrase with modified default slop
+  // assertEquals("\"(multi multi2) foo\"~99 bar",
+  // qp.parse("\"multi foo\" bar").toString());
+  //    
+  // }
+
+  public void testPosIncrementAnalyzer() throws ParseException {
+    QueryParserWrapper qp = new QueryParserWrapper("",
+        new PosIncrementAnalyzer());
+    assertEquals("quick brown", qp.parse("the quick brown").toString());
+    assertEquals("\"quick brown\"", qp.parse("\"the quick brown\"").toString());
+    assertEquals("quick brown fox", qp.parse("the quick brown fox").toString());
+    assertEquals("\"quick brown fox\"", qp.parse("\"the quick brown fox\"")
+        .toString());
+  }
+
+  /**
+   * Expands "multi" to "multi" and "multi2", both at the same position, and
+   * expands "triplemulti" to "triplemulti", "multi3", and "multi2".
+   */
+  private class MultiAnalyzer extends Analyzer {
+
+    public MultiAnalyzer() {
+    }
+
+    public TokenStream tokenStream(String fieldName, Reader reader) {
+      TokenStream result = new StandardTokenizer(reader);
+      result = new TestFilter(result);
+      result = new LowerCaseFilter(result);
+      return result;
+    }
+  }
+
+  private final class TestFilter extends TokenFilter {
+
+    private String prevType;
+    private int prevStartOffset;
+    private int prevEndOffset;
+
+    TermAttribute termAtt;
+    PositionIncrementAttribute posIncrAtt;
+    OffsetAttribute offsetAtt;
+    TypeAttribute typeAtt;
+
+    public TestFilter(TokenStream in) {
+      super(in);
+      termAtt = (TermAttribute) addAttribute(TermAttribute.class);
+      posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
+      offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
+      typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
+
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see
+     * org.apache.lucene.analysis.TokenStream#next(org.apache.lucene.analysis
+     * .Token)
+     */
+    @Override
+    public Token next(Token reusableToken) throws IOException {
+
+      if (multiToken > 0) {
+        reusableToken.setTermBuffer("multi" + (multiToken + 1));
+        reusableToken.setStartOffset(prevStartOffset);
+        reusableToken.setEndOffset(prevEndOffset);
+        reusableToken.setType(prevType);
+        reusableToken.setPositionIncrement(0);
+        multiToken--;
+        return reusableToken;
+      } else {
+        boolean next = (reusableToken = input.next(token)) != null;
+        if (next == false) {
+          return null;
+        }
+        prevType = reusableToken.type();
+        prevStartOffset = reusableToken.startOffset();
+        prevEndOffset = reusableToken.endOffset();
+        String text = reusableToken.term();
+        if (text.equals("triplemulti")) {
+          multiToken = 2;
+          return reusableToken;
+        } else if (text.equals("multi")) {
+          multiToken = 1;
+          return reusableToken;
+        } else {
+          return reusableToken;
+        }
+      }
+
+    }
+
+    private Token token = new Token();
+
+    public final boolean incrementToken() throws java.io.IOException {
+      if (multiToken > 0) {
+        termAtt.setTermBuffer("multi" + (multiToken + 1));
+        offsetAtt.setOffset(prevStartOffset, prevEndOffset);
+        typeAtt.setType(prevType);
+        posIncrAtt.setPositionIncrement(0);
+        multiToken--;
+        return true;
+      } else {
+        boolean next = input.incrementToken();
+        if (next == false) {
+          return false;
+        }
+        prevType = typeAtt.type();
+        prevStartOffset = offsetAtt.startOffset();
+        prevEndOffset = offsetAtt.endOffset();
+        String text = termAtt.term();
+        if (text.equals("triplemulti")) {
+          multiToken = 2;
+          return true;
+        } else if (text.equals("multi")) {
+          multiToken = 1;
+          return true;
+        } else {
+          return true;
+        }
+      }
+    }
+
+  }
+
+  /**
+   * Analyzes "the quick brown" as: quick(incr=2) brown(incr=1). Does not work
+   * correctly for input other than "the quick brown ...".
+   */
+  private class PosIncrementAnalyzer extends Analyzer {
+
+    public PosIncrementAnalyzer() {
+    }
+
+    public TokenStream tokenStream(String fieldName, Reader reader) {
+      TokenStream result = new StandardTokenizer(reader);
+      result = new TestPosIncrementFilter(result);
+      result = new LowerCaseFilter(result);
+      return result;
+    }
+  }
+
+  private class TestPosIncrementFilter extends TokenFilter {
+
+    TermAttribute termAtt;
+    PositionIncrementAttribute posIncrAtt;
+
+    public TestPosIncrementFilter(TokenStream in) {
+      super(in);
+      termAtt = (TermAttribute) addAttribute(TermAttribute.class);
+      posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
+    }
+
+    private Token token = new Token();
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see org.apache.lucene.analysis.TokenStream#next()
+     */
+    @Override
+    public Token next(Token reusableToken) throws IOException {
+      while (null != (reusableToken = input.next(token))) {
+        String term = reusableToken.term();
+        if (term.equals("the")) {
+          // stopword, do nothing
+        } else if (term.equals("quick")) {
+          reusableToken.setPositionIncrement(2);
+          return reusableToken;
+        } else {
+          reusableToken.setPositionIncrement(1);
+          return reusableToken;
+        }
+      }
+      return null;
+    }
+
+    public final boolean incrementToken() throws java.io.IOException {
+      while (input.incrementToken()) {
+        if (termAtt.term().equals("the")) {
+          // stopword, do nothing
+        } else if (termAtt.term().equals("quick")) {
+          posIncrAtt.setPositionIncrement(2);
+          return true;
+        } else {
+          posIncrAtt.setPositionIncrement(1);
+          return true;
+        }
+      }
+      return false;
+    }
+
+  }
+
+}

Propchange: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiAnalyzerWrapper.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQPHelper.java?rev=800191&view=auto
==============================================================================
--- lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQPHelper.java (added)
+++ lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQPHelper.java Mon Aug  3 03:38:44 2009
@@ -0,0 +1,368 @@
+package org.apache.lucene.queryParser.original;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.queryParser.core.QueryNodeException;
+import org.apache.lucene.queryParser.original.config.OriginalQueryConfigHandler;
+import org.apache.lucene.queryParser.original.config.DefaultOperatorAttribute.Operator;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * This test case is a copy of the core Lucene query parser test, it was adapted
+ * to use new QueryParserHelper instead of the old query parser.
+ * 
+ * Tests QueryParser.
+ */
+public class TestMultiFieldQPHelper extends LuceneTestCase {
+
+  /**
+   * test stop words arsing for both the non static form, and for the
+   * corresponding static form (qtxt, fields[]).
+   */
+  public void tesStopwordsParsing() throws Exception {
+    assertStopQueryEquals("one", "b:one t:one");
+    assertStopQueryEquals("one stop", "b:one t:one");
+    assertStopQueryEquals("one (stop)", "b:one t:one");
+    assertStopQueryEquals("one ((stop))", "b:one t:one");
+    assertStopQueryEquals("stop", null);
+    assertStopQueryEquals("(stop)", null);
+    assertStopQueryEquals("((stop))", null);
+  }
+
+  // verify parsing of query using a stopping analyzer
+  private void assertStopQueryEquals(String qtxt, String expectedRes)
+      throws Exception {
+    String[] fields = { "b", "t" };
+    Occur occur[] = { Occur.SHOULD, Occur.SHOULD };
+    TestQPHelper.QPTestAnalyzer a = new TestQPHelper.QPTestAnalyzer();
+    OriginalQueryParserHelper mfqp = new OriginalQueryParserHelper();
+    mfqp.setMultiFields(fields);
+    mfqp.setAnalyzer(a);
+
+    Query q = mfqp.parse(qtxt, null);
+    assertEquals(expectedRes, q.toString());
+
+    q = QueryParserUtil.parse(qtxt, fields, occur, a);
+    assertEquals(expectedRes, q.toString());
+  }
+
+  public void testSimple() throws Exception {
+    String[] fields = { "b", "t" };
+    OriginalQueryParserHelper mfqp = new OriginalQueryParserHelper();
+    mfqp.setMultiFields(fields);
+    mfqp.setAnalyzer(new StandardAnalyzer());
+
+    Query q = mfqp.parse("one", null);
+    assertEquals("b:one t:one", q.toString());
+
+    q = mfqp.parse("one two", null);
+    assertEquals("(b:one t:one) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("+one +two", null);
+    assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+
+    q = mfqp.parse("+one -two -three", null);
+    assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q
+        .toString());
+
+    q = mfqp.parse("one^2 two", null);
+    assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("one~ two", null);
+    assertEquals("(b:one~0.5 t:one~0.5) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("one~0.8 two^2", null);
+    assertEquals("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.toString());
+
+    q = mfqp.parse("one* two*", null);
+    assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString());
+
+    q = mfqp.parse("[a TO c] two", null);
+    assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("w?ldcard", null);
+    assertEquals("b:w?ldcard t:w?ldcard", q.toString());
+
+    q = mfqp.parse("\"foo bar\"", null);
+    assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString());
+
+    q = mfqp.parse("\"aa bb cc\" \"dd ee\"", null);
+    assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q
+        .toString());
+
+    q = mfqp.parse("\"foo bar\"~4", null);
+    assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString());
+
+    // LUCENE-1213: QueryParser was ignoring slop when phrase
+    // had a field.
+    q = mfqp.parse("b:\"foo bar\"~4", null);
+    assertEquals("b:\"foo bar\"~4", q.toString());
+
+    // make sure that terms which have a field are not touched:
+    q = mfqp.parse("one f:two", null);
+    assertEquals("(b:one t:one) f:two", q.toString());
+
+    // AND mode:
+    mfqp.setDefaultOperator(Operator.AND);
+    q = mfqp.parse("one two", null);
+    assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+    q = mfqp.parse("\"aa bb cc\" \"dd ee\"", null);
+    assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")",
+        q.toString());
+
+  }
+
+  public void testBoostsSimple() throws Exception {
+    Map boosts = new HashMap();
+    boosts.put("b", new Float(5));
+    boosts.put("t", new Float(10));
+    String[] fields = { "b", "t" };
+    OriginalQueryParserHelper mfqp = new OriginalQueryParserHelper();
+    mfqp.setMultiFields(fields);
+    mfqp.setFieldsBoost(boosts);
+    mfqp.setAnalyzer(new StandardAnalyzer());
+
+    // Check for simple
+    Query q = mfqp.parse("one", null);
+    assertEquals("b:one^5.0 t:one^10.0", q.toString());
+
+    // Check for AND
+    q = mfqp.parse("one AND two", null);
+    assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q
+        .toString());
+
+    // Check for OR
+    q = mfqp.parse("one OR two", null);
+    assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString());
+
+    // Check for AND and a field
+    q = mfqp.parse("one AND two AND foo:test", null);
+    assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q
+        .toString());
+
+    q = mfqp.parse("one^3 AND two^4", null);
+    assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)",
+        q.toString());
+  }
+
+  public void testStaticMethod1() throws QueryNodeException {
+    String[] fields = { "b", "t" };
+    String[] queries = { "one", "two" };
+    Query q = QueryParserUtil.parse(queries, fields, new StandardAnalyzer());
+    assertEquals("b:one t:two", q.toString());
+
+    String[] queries2 = { "+one", "+two" };
+    q = QueryParserUtil.parse(queries2, fields, new StandardAnalyzer());
+    assertEquals("(+b:one) (+t:two)", q.toString());
+
+    String[] queries3 = { "one", "+two" };
+    q = QueryParserUtil.parse(queries3, fields, new StandardAnalyzer());
+    assertEquals("b:one (+t:two)", q.toString());
+
+    String[] queries4 = { "one +more", "+two" };
+    q = QueryParserUtil.parse(queries4, fields, new StandardAnalyzer());
+    assertEquals("(b:one +b:more) (+t:two)", q.toString());
+
+    String[] queries5 = { "blah" };
+    try {
+      q = QueryParserUtil.parse(queries5, fields, new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+
+    // check also with stop words for this static form (qtxts[], fields[]).
+    TestQPHelper.QPTestAnalyzer stopA = new TestQPHelper.QPTestAnalyzer();
+
+    String[] queries6 = { "((+stop))", "+((stop))" };
+    q = QueryParserUtil.parse(queries6, fields, stopA);
+    assertEquals("", q.toString());
+
+    String[] queries7 = { "one ((+stop)) +more", "+((stop)) +two" };
+    q = QueryParserUtil.parse(queries7, fields, stopA);
+    assertEquals("(b:one +b:more) (+t:two)", q.toString());
+
+  }
+
+  public void testStaticMethod2() throws QueryNodeException {
+    String[] fields = { "b", "t" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT };
+    Query q = QueryParserUtil.parse("one", fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+b:one -t:one", q.toString());
+
+    q = QueryParserUtil.parse("one two", fields, flags, new StandardAnalyzer());
+    assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = QueryParserUtil.parse("blah", fields, flags2, new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testStaticMethod2Old() throws QueryNodeException {
+    String[] fields = { "b", "t" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT };
+    OriginalQueryParserHelper parser = new OriginalQueryParserHelper();
+    parser.setMultiFields(fields);
+    parser.setAnalyzer(new StandardAnalyzer());
+
+    Query q = QueryParserUtil.parse("one", fields, flags,
+        new StandardAnalyzer());// , fields, flags, new
+    // StandardAnalyzer());
+    assertEquals("+b:one -t:one", q.toString());
+
+    q = QueryParserUtil.parse("one two", fields, flags, new StandardAnalyzer());
+    assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = QueryParserUtil.parse("blah", fields, flags2, new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testStaticMethod3() throws QueryNodeException {
+    String[] queries = { "one", "two", "three" };
+    String[] fields = { "f1", "f2", "f3" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD };
+    Query q = QueryParserUtil.parse(queries, fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+f1:one -f2:two f3:three", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = QueryParserUtil
+          .parse(queries, fields, flags2, new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testStaticMethod3Old() throws QueryNodeException {
+    String[] queries = { "one", "two" };
+    String[] fields = { "b", "t" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT };
+    Query q = QueryParserUtil.parse(queries, fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+b:one -t:two", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = QueryParserUtil
+          .parse(queries, fields, flags2, new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testAnalyzerReturningNull() throws QueryNodeException {
+    String[] fields = new String[] { "f1", "f2", "f3" };
+    OriginalQueryParserHelper parser = new OriginalQueryParserHelper();
+    parser.setMultiFields(fields);
+    parser.setAnalyzer(new AnalyzerReturningNull());
+
+    Query q = parser.parse("bla AND blo", null);
+    assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
+    // the following queries are not affected as their terms are not
+    // analyzed anyway:
+    q = parser.parse("bla*", null);
+    assertEquals("f1:bla* f2:bla* f3:bla*", q.toString());
+    q = parser.parse("bla~", null);
+    assertEquals("f1:bla~0.5 f2:bla~0.5 f3:bla~0.5", q.toString());
+    q = parser.parse("[a TO c]", null);
+    assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString());
+  }
+
+  public void testStopWordSearching() throws Exception {
+    Analyzer analyzer = new StandardAnalyzer();
+    Directory ramDir = new RAMDirectory();
+    IndexWriter iw = new IndexWriter(ramDir, analyzer, true,
+        IndexWriter.MaxFieldLength.LIMITED);
+    Document doc = new Document();
+    doc.add(new Field("body", "blah the footest blah", Field.Store.NO,
+        Field.Index.ANALYZED));
+    iw.addDocument(doc);
+    iw.close();
+
+    OriginalQueryParserHelper mfqp = new OriginalQueryParserHelper();
+
+    mfqp.setMultiFields(new String[] { "body" });
+    mfqp.setAnalyzer(analyzer);
+    mfqp.setDefaultOperator(Operator.AND);
+    Query q = mfqp.parse("the footest", null);
+    IndexSearcher is = new IndexSearcher(ramDir);
+    ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
+    assertEquals(1, hits.length);
+    is.close();
+  }
+
+  /**
+   * Return empty tokens for field "f1".
+   */
+  private static class AnalyzerReturningNull extends Analyzer {
+    StandardAnalyzer stdAnalyzer = new StandardAnalyzer();
+
+    public AnalyzerReturningNull() {
+    }
+
+    public TokenStream tokenStream(String fieldName, Reader reader) {
+      if ("f1".equals(fieldName)) {
+        return new EmptyTokenStream();
+      } else {
+        return stdAnalyzer.tokenStream(fieldName, reader);
+      }
+    }
+
+    private static class EmptyTokenStream extends TokenStream {
+      public Token next(final Token reusableToken) {
+        return null;
+      }
+    }
+  }
+
+}

Propchange: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQPHelper.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQueryParserWrapper.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQueryParserWrapper.java?rev=800191&view=auto
==============================================================================
--- lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQueryParserWrapper.java (added)
+++ lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQueryParserWrapper.java Mon Aug  3 03:38:44 2009
@@ -0,0 +1,366 @@
+package org.apache.lucene.queryParser.original;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.original.MultiFieldQueryParserWrapper;
+import org.apache.lucene.queryParser.original.QueryParserWrapper;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * Tests multi field query parsing using the {@link MultiFieldQueryParserWrapper}.
+ */
+public class TestMultiFieldQueryParserWrapper extends LuceneTestCase {
+
+  /**
+   * test stop words arsing for both the non static form, and for the
+   * corresponding static form (qtxt, fields[]).
+   */
+  public void tesStopwordsParsing() throws Exception {
+    assertStopQueryEquals("one", "b:one t:one");
+    assertStopQueryEquals("one stop", "b:one t:one");
+    assertStopQueryEquals("one (stop)", "b:one t:one");
+    assertStopQueryEquals("one ((stop))", "b:one t:one");
+    assertStopQueryEquals("stop", "");
+    assertStopQueryEquals("(stop)", "");
+    assertStopQueryEquals("((stop))", "");
+  }
+
+  // verify parsing of query using a stopping analyzer
+  private void assertStopQueryEquals(String qtxt, String expectedRes)
+      throws Exception {
+    String[] fields = { "b", "t" };
+    Occur occur[] = { Occur.SHOULD, Occur.SHOULD };
+    TestQueryParserWrapper.QPTestAnalyzer a = new TestQueryParserWrapper.QPTestAnalyzer();
+    MultiFieldQueryParserWrapper mfqp = new MultiFieldQueryParserWrapper(
+        fields, a);
+
+    Query q = mfqp.parse(qtxt);
+    assertEquals(expectedRes, q.toString());
+
+    q = MultiFieldQueryParserWrapper.parse(qtxt, fields, occur, a);
+    assertEquals(expectedRes, q.toString());
+  }
+
+  public void testSimple() throws Exception {
+    String[] fields = { "b", "t" };
+    MultiFieldQueryParserWrapper mfqp = new MultiFieldQueryParserWrapper(
+        fields, new StandardAnalyzer());
+
+    Query q = mfqp.parse("one");
+    assertEquals("b:one t:one", q.toString());
+
+    q = mfqp.parse("one two");
+    assertEquals("(b:one t:one) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("+one +two");
+    assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+
+    q = mfqp.parse("+one -two -three");
+    assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q
+        .toString());
+
+    q = mfqp.parse("one^2 two");
+    assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("one~ two");
+    assertEquals("(b:one~0.5 t:one~0.5) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("one~0.8 two^2");
+    assertEquals("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.toString());
+
+    q = mfqp.parse("one* two*");
+    assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString());
+
+    q = mfqp.parse("[a TO c] two");
+    assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString());
+
+    q = mfqp.parse("w?ldcard");
+    assertEquals("b:w?ldcard t:w?ldcard", q.toString());
+
+    q = mfqp.parse("\"foo bar\"");
+    assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString());
+
+    q = mfqp.parse("\"aa bb cc\" \"dd ee\"");
+    assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q
+        .toString());
+
+    q = mfqp.parse("\"foo bar\"~4");
+    assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString());
+
+    // LUCENE-1213: MultiFieldQueryParserWrapper was ignoring slop when phrase
+    // had a field.
+    q = mfqp.parse("b:\"foo bar\"~4");
+    assertEquals("b:\"foo bar\"~4", q.toString());
+
+    // make sure that terms which have a field are not touched:
+    q = mfqp.parse("one f:two");
+    assertEquals("(b:one t:one) f:two", q.toString());
+
+    // AND mode:
+    mfqp.setDefaultOperator(QueryParserWrapper.AND_OPERATOR);
+    q = mfqp.parse("one two");
+    assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+    q = mfqp.parse("\"aa bb cc\" \"dd ee\"");
+    assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")",
+        q.toString());
+
+  }
+
+  public void testBoostsSimple() throws Exception {
+    Map boosts = new HashMap();
+    boosts.put("b", new Float(5));
+    boosts.put("t", new Float(10));
+    String[] fields = { "b", "t" };
+    MultiFieldQueryParserWrapper mfqp = new MultiFieldQueryParserWrapper(
+        fields, new StandardAnalyzer(), boosts);
+
+    // Check for simple
+    Query q = mfqp.parse("one");
+    assertEquals("b:one^5.0 t:one^10.0", q.toString());
+
+    // Check for AND
+    q = mfqp.parse("one AND two");
+    assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q
+        .toString());
+
+    // Check for OR
+    q = mfqp.parse("one OR two");
+    assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString());
+
+    // Check for AND and a field
+    q = mfqp.parse("one AND two AND foo:test");
+    assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q
+        .toString());
+
+    q = mfqp.parse("one^3 AND two^4");
+    assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)",
+        q.toString());
+  }
+
+  public void testStaticMethod1() throws ParseException {
+    String[] fields = { "b", "t" };
+    String[] queries = { "one", "two" };
+    Query q = MultiFieldQueryParserWrapper.parse(queries, fields,
+        new StandardAnalyzer());
+    assertEquals("b:one t:two", q.toString());
+
+    String[] queries2 = { "+one", "+two" };
+    q = MultiFieldQueryParserWrapper.parse(queries2, fields,
+        new StandardAnalyzer());
+    assertEquals("(+b:one) (+t:two)", q.toString());
+
+    String[] queries3 = { "one", "+two" };
+    q = MultiFieldQueryParserWrapper.parse(queries3, fields,
+        new StandardAnalyzer());
+    assertEquals("b:one (+t:two)", q.toString());
+
+    String[] queries4 = { "one +more", "+two" };
+    q = MultiFieldQueryParserWrapper.parse(queries4, fields,
+        new StandardAnalyzer());
+    assertEquals("(b:one +b:more) (+t:two)", q.toString());
+
+    String[] queries5 = { "blah" };
+    try {
+      q = MultiFieldQueryParserWrapper.parse(queries5, fields,
+          new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+
+    // check also with stop words for this static form (qtxts[], fields[]).
+    TestQueryParserWrapper.QPTestAnalyzer stopA = new TestQueryParserWrapper.QPTestAnalyzer();
+
+    String[] queries6 = { "((+stop))", "+((stop))" };
+    q = MultiFieldQueryParserWrapper.parse(queries6, fields, stopA);
+    assertEquals("", q.toString());
+
+    String[] queries7 = { "one ((+stop)) +more", "+((stop)) +two" };
+    q = MultiFieldQueryParserWrapper.parse(queries7, fields, stopA);
+    assertEquals("(b:one +b:more) (+t:two)", q.toString());
+
+  }
+
+  public void testStaticMethod2() throws ParseException {
+    String[] fields = { "b", "t" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT };
+    Query q = MultiFieldQueryParserWrapper.parse("one", fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+b:one -t:one", q.toString());
+
+    q = MultiFieldQueryParserWrapper.parse("one two", fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = MultiFieldQueryParserWrapper.parse("blah", fields, flags2,
+          new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testStaticMethod2Old() throws ParseException {
+    String[] fields = { "b", "t" };
+    // int[] flags = {MultiFieldQueryParserWrapper.REQUIRED_FIELD,
+    // MultiFieldQueryParserWrapper.PROHIBITED_FIELD};
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT };
+    MultiFieldQueryParserWrapper parser = new MultiFieldQueryParserWrapper(
+        fields, new StandardAnalyzer());
+
+    Query q = MultiFieldQueryParserWrapper.parse("one", fields, flags,
+        new StandardAnalyzer());// , fields, flags, new StandardAnalyzer());
+    assertEquals("+b:one -t:one", q.toString());
+
+    q = MultiFieldQueryParserWrapper.parse("one two", fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = MultiFieldQueryParserWrapper.parse("blah", fields, flags2,
+          new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testStaticMethod3() throws ParseException {
+    String[] queries = { "one", "two", "three" };
+    String[] fields = { "f1", "f2", "f3" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD };
+    Query q = MultiFieldQueryParserWrapper.parse(queries, fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+f1:one -f2:two f3:three", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = MultiFieldQueryParserWrapper.parse(queries, fields, flags2,
+          new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testStaticMethod3Old() throws ParseException {
+    String[] queries = { "one", "two" };
+    String[] fields = { "b", "t" };
+    BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
+        BooleanClause.Occur.MUST_NOT };
+    Query q = MultiFieldQueryParserWrapper.parse(queries, fields, flags,
+        new StandardAnalyzer());
+    assertEquals("+b:one -t:two", q.toString());
+
+    try {
+      BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+      q = MultiFieldQueryParserWrapper.parse(queries, fields, flags2,
+          new StandardAnalyzer());
+      fail();
+    } catch (IllegalArgumentException e) {
+      // expected exception, array length differs
+    }
+  }
+
+  public void testAnalyzerReturningNull() throws ParseException {
+    String[] fields = new String[] { "f1", "f2", "f3" };
+    MultiFieldQueryParserWrapper parser = new MultiFieldQueryParserWrapper(
+        fields, new AnalyzerReturningNull());
+    Query q = parser.parse("bla AND blo");
+    assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
+    // the following queries are not affected as their terms are not analyzed
+    // anyway:
+    q = parser.parse("bla*");
+    assertEquals("f1:bla* f2:bla* f3:bla*", q.toString());
+    q = parser.parse("bla~");
+    assertEquals("f1:bla~0.5 f2:bla~0.5 f3:bla~0.5", q.toString());
+    q = parser.parse("[a TO c]");
+    assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString());
+  }
+
+  public void testStopWordSearching() throws Exception {
+    Analyzer analyzer = new StandardAnalyzer();
+    Directory ramDir = new RAMDirectory();
+    IndexWriter iw = new IndexWriter(ramDir, analyzer, true,
+        IndexWriter.MaxFieldLength.LIMITED);
+    Document doc = new Document();
+    doc.add(new Field("body", "blah the footest blah", Field.Store.NO,
+        Field.Index.ANALYZED));
+    iw.addDocument(doc);
+    iw.close();
+
+    MultiFieldQueryParserWrapper mfqp = new MultiFieldQueryParserWrapper(
+        new String[] { "body" }, analyzer);
+    mfqp.setDefaultOperator(QueryParserWrapper.Operator.AND);
+    Query q = mfqp.parse("the footest");
+    IndexSearcher is = new IndexSearcher(ramDir);
+    ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
+    assertEquals(1, hits.length);
+    is.close();
+  }
+
+  /**
+   * Return empty tokens for field "f1".
+   */
+  private static class AnalyzerReturningNull extends Analyzer {
+    StandardAnalyzer stdAnalyzer = new StandardAnalyzer();
+
+    public AnalyzerReturningNull() {
+    }
+
+    public TokenStream tokenStream(String fieldName, Reader reader) {
+      if ("f1".equals(fieldName)) {
+        return new EmptyTokenStream();
+      } else {
+        return stdAnalyzer.tokenStream(fieldName, reader);
+      }
+    }
+
+    private static class EmptyTokenStream extends TokenStream {
+      public Token next(final Token reusableToken) {
+        return null;
+      }
+    }
+  }
+
+}

Propchange: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestMultiFieldQueryParserWrapper.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestQPHelper.java?rev=800191&view=auto
==============================================================================
--- lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestQPHelper.java (added)
+++ lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestQPHelper.java Mon Aug  3 03:38:44 2009
@@ -0,0 +1,1130 @@
+package org.apache.lucene.queryParser.original;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.Reader;
+import java.text.Collator;
+import java.text.DateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.KeywordAnalyzer;
+import org.apache.lucene.analysis.LowerCaseTokenizer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.analysis.StopFilter;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenFilter;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
+import org.apache.lucene.document.DateField;
+import org.apache.lucene.document.DateTools;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.messages.MessageImpl;
+import org.apache.lucene.queryParser.core.QueryNodeException;
+import org.apache.lucene.queryParser.core.messages.QueryParserMessages;
+import org.apache.lucene.queryParser.core.nodes.FuzzyQueryNode;
+import org.apache.lucene.queryParser.core.nodes.QueryNode;
+import org.apache.lucene.queryParser.core.nodes.WildcardQueryNode;
+import org.apache.lucene.queryParser.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryParser.core.processors.QueryNodeProcessorPipeline;
+import org.apache.lucene.queryParser.original.config.OriginalQueryConfigHandler;
+import org.apache.lucene.queryParser.original.config.DefaultOperatorAttribute.Operator;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.FuzzyQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.PhraseQuery;
+import org.apache.lucene.search.PrefixQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermRangeQuery;
+import org.apache.lucene.search.WildcardQuery;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * This test case is a copy of the core Lucene query parser test, it was adapted
+ * to use new QueryParserHelper instead of the old query parser.
+ * 
+ * Tests QueryParser.
+ */
+public class TestQPHelper extends LuceneTestCase {
+
+  public static Analyzer qpAnalyzer = new QPTestAnalyzer();
+
+  public static class QPTestFilter extends TokenFilter {
+    TermAttribute termAtt;
+    OffsetAttribute offsetAtt;
+
+    /**
+     * Filter which discards the token 'stop' and which expands the token
+     * 'phrase' into 'phrase1 phrase2'
+     */
+    public QPTestFilter(TokenStream in) {
+      super(in);
+      termAtt = (TermAttribute) addAttribute(TermAttribute.class);
+      offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
+    }
+
+    boolean inPhrase = false;
+    int savedStart = 0, savedEnd = 0;
+
+    public Token next(Token reusableToken) throws IOException {
+      Token token = reusableToken;
+
+      if (inPhrase) {
+        inPhrase = false;
+        token.setTermBuffer("phrase2");
+        token.setStartOffset(savedStart);
+        token.setEndOffset(savedEnd);
+        return reusableToken;
+      } else
+        while ((token = this.input.next(reusableToken)) != null) {
+          if (token.term().equals("phrase")) {
+            inPhrase = true;
+            savedStart = token.startOffset();
+            savedEnd = token.endOffset();
+            token.setTermBuffer("phrase1");
+            token.setStartOffset(savedStart);
+            token.setEndOffset(savedEnd);
+            return token;
+          } else if (!token.term().equals("stop"))
+            return token;
+        }
+
+      return null;
+
+    }
+
+    public boolean incrementToken() throws IOException {
+      if (inPhrase) {
+        inPhrase = false;
+        termAtt.setTermBuffer("phrase2");
+        offsetAtt.setOffset(savedStart, savedEnd);
+        return true;
+      } else
+        while (input.incrementToken()) {
+          if (termAtt.term().equals("phrase")) {
+            inPhrase = true;
+            savedStart = offsetAtt.startOffset();
+            savedEnd = offsetAtt.endOffset();
+            termAtt.setTermBuffer("phrase1");
+            offsetAtt.setOffset(savedStart, savedEnd);
+            return true;
+          } else if (!termAtt.term().equals("stop"))
+            return true;
+        }
+      return false;
+    }
+  }
+
+  public static class QPTestAnalyzer extends Analyzer {
+
+    /** Filters LowerCaseTokenizer with StopFilter. */
+    public final TokenStream tokenStream(String fieldName, Reader reader) {
+      return new QPTestFilter(new LowerCaseTokenizer(reader));
+    }
+  }
+
+  public static class QPTestParser extends OriginalQueryParserHelper {
+    public QPTestParser(Analyzer a) {
+      ((QueryNodeProcessorPipeline)getQueryNodeProcessor())
+          .addProcessor(new QPTestParserQueryNodeProcessor());
+      this.setAnalyzer(a);
+
+    }
+
+    private static class QPTestParserQueryNodeProcessor extends
+        QueryNodeProcessorImpl {
+
+      protected QueryNode postProcessNode(QueryNode node)
+          throws QueryNodeException {
+
+        return node;
+
+      }
+
+      protected QueryNode preProcessNode(QueryNode node)
+          throws QueryNodeException {
+
+        if (node instanceof WildcardQueryNode || node instanceof FuzzyQueryNode) {
+
+          throw new QueryNodeException(new MessageImpl(
+              QueryParserMessages.EMPTY_MESSAGE));
+
+        }
+
+        return node;
+
+      }
+
+      protected List<QueryNode> setChildrenOrder(List<QueryNode> children)
+          throws QueryNodeException {
+
+        return children;
+
+      }
+
+    }
+
+  }
+
+  private int originalMaxClauses;
+
+  public void setUp() throws Exception {
+    super.setUp();
+    originalMaxClauses = BooleanQuery.getMaxClauseCount();
+  }
+
+  public OriginalQueryParserHelper getParser(Analyzer a) throws Exception {
+    if (a == null)
+      a = new SimpleAnalyzer();
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(a);
+
+    qp.setDefaultOperator(Operator.OR);
+
+    return qp;
+
+  }
+
+  public Query getQuery(String query, Analyzer a) throws Exception {
+    return getParser(a).parse(query, "field");
+  }
+
+  public void assertQueryEquals(String query, Analyzer a, String result)
+      throws Exception {
+    Query q = getQuery(query, a);
+    String s = q.toString("field");
+    if (!s.equals(result)) {
+      fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+          + "/");
+    }
+  }
+
+  public void assertQueryEquals(OriginalQueryParserHelper qp, String field,
+      String query, String result) throws Exception {
+    Query q = qp.parse(query, field);
+    String s = q.toString(field);
+    if (!s.equals(result)) {
+      fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+          + "/");
+    }
+  }
+
+  public void assertEscapedQueryEquals(String query, Analyzer a, String result)
+      throws Exception {
+    String escapedQuery = QueryParserUtil.escape(query);
+    if (!escapedQuery.equals(result)) {
+      fail("Query /" + query + "/ yielded /" + escapedQuery + "/, expecting /"
+          + result + "/");
+    }
+  }
+
+  public void assertWildcardQueryEquals(String query, boolean lowercase,
+      String result, boolean allowLeadingWildcard) throws Exception {
+    OriginalQueryParserHelper qp = getParser(null);
+    qp.setLowercaseExpandedTerms(lowercase);
+    qp.setAllowLeadingWildcard(allowLeadingWildcard);
+    Query q = qp.parse(query, "field");
+    String s = q.toString("field");
+    if (!s.equals(result)) {
+      fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
+          + result + "/");
+    }
+  }
+
+  public void assertWildcardQueryEquals(String query, boolean lowercase,
+      String result) throws Exception {
+    assertWildcardQueryEquals(query, lowercase, result, false);
+  }
+
+  public void assertWildcardQueryEquals(String query, String result)
+      throws Exception {
+    OriginalQueryParserHelper qp = getParser(null);
+    Query q = qp.parse(query, "field");
+    String s = q.toString("field");
+    if (!s.equals(result)) {
+      fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
+          + result + "/");
+    }
+  }
+
+  public Query getQueryDOA(String query, Analyzer a) throws Exception {
+    if (a == null)
+      a = new SimpleAnalyzer();
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(a);
+    qp.setDefaultOperator(Operator.AND);
+
+    return qp.parse(query, "field");
+
+  }
+
+  public void assertQueryEqualsDOA(String query, Analyzer a, String result)
+      throws Exception {
+    Query q = getQueryDOA(query, a);
+    String s = q.toString("field");
+    if (!s.equals(result)) {
+      fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+          + "/");
+    }
+  }
+
+  public void testCJK() throws Exception {
+    // Test Ideographic Space - As wide as a CJK character cell (fullwidth)
+    // used google to translate the word "term" to japanese -> ??
+    assertQueryEquals("term\u3000term\u3000term", null,
+        "term\u0020term\u0020term");
+    assertQueryEquals("??\u3000??\u3000??", null, "??\u0020??\u0020??");
+  }
+
+  public void testSimple() throws Exception {
+    assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
+    assertQueryEquals("term term term", null, "term term term");
+    assertQueryEquals("t�rm term term", new WhitespaceAnalyzer(),
+        "t�rm term term");
+    assertQueryEquals("�mlaut", new WhitespaceAnalyzer(), "�mlaut");
+
+    assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
+    assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
+
+    assertQueryEquals("a AND b", null, "+a +b");
+    assertQueryEquals("(a AND b)", null, "+a +b");
+    assertQueryEquals("c OR (a AND b)", null, "c (+a +b)");
+
+    assertQueryEquals("a AND NOT b", null, "+a -b");
+
+    assertQueryEquals("a AND -b", null, "+a -b");
+
+    assertQueryEquals("a AND !b", null, "+a -b");
+
+    assertQueryEquals("a && b", null, "+a +b");
+
+    assertQueryEquals("a && ! b", null, "+a -b");
+
+    assertQueryEquals("a OR b", null, "a b");
+    assertQueryEquals("a || b", null, "a b");
+
+    assertQueryEquals("a OR !b", null, "a -b");
+
+    assertQueryEquals("a OR ! b", null, "a -b");
+
+    assertQueryEquals("a OR -b", null, "a -b");
+
+    assertQueryEquals("+term -term term", null, "+term -term term");
+    assertQueryEquals("foo:term AND field:anotherTerm", null,
+        "+foo:term +anotherterm");
+    assertQueryEquals("term AND \"phrase phrase\"", null,
+        "+term +\"phrase phrase\"");
+    assertQueryEquals("\"hello there\"", null, "\"hello there\"");
+    assertTrue(getQuery("a AND b", null) instanceof BooleanQuery);
+    assertTrue(getQuery("hello", null) instanceof TermQuery);
+    assertTrue(getQuery("\"hello there\"", null) instanceof PhraseQuery);
+
+    assertQueryEquals("germ term^2.0", null, "germ term^2.0");
+    assertQueryEquals("(term)^2.0", null, "term^2.0");
+    assertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0");
+    assertQueryEquals("term^2.0", null, "term^2.0");
+    assertQueryEquals("term^2", null, "term^2.0");
+    assertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0");
+    assertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0");
+
+    assertQueryEquals("(foo OR bar) AND (baz OR boo)", null,
+        "+(foo bar) +(baz boo)");
+    assertQueryEquals("((a OR b) AND NOT c) OR d", null, "(+(a b) -c) d");
+    assertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null,
+        "+(apple \"steve jobs\") -(foo bar baz)");
+    assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
+        "+(title:dog title:cat) -author:\"bob dole\"");
+
+  }
+
+  public void testPunct() throws Exception {
+    Analyzer a = new WhitespaceAnalyzer();
+    assertQueryEquals("a&b", a, "a&b");
+    assertQueryEquals("a&&b", a, "a&&b");
+    assertQueryEquals(".NET", a, ".NET");
+  }
+
+  public void testSlop() throws Exception {
+
+    assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
+    assertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
+    assertQueryEquals("\"term\"~2", null, "term");
+    assertQueryEquals("\" \"~2 germ", null, "germ");
+    assertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
+  }
+
+  public void testNumber() throws Exception {
+    // The numbers go away because SimpleAnalzyer ignores them
+    assertQueryEquals("3", null, "");
+    assertQueryEquals("term 1.0 1 2", null, "term");
+    assertQueryEquals("term term1 term2", null, "term term term");
+
+    Analyzer a = new StandardAnalyzer();
+    assertQueryEquals("3", a, "3");
+    assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
+    assertQueryEquals("term term1 term2", a, "term term1 term2");
+  }
+
+  public void testWildcard() throws Exception {
+    assertQueryEquals("term*", null, "term*");
+    assertQueryEquals("term*^2", null, "term*^2.0");
+    assertQueryEquals("term~", null, "term~0.5");
+    assertQueryEquals("term~0.7", null, "term~0.7");
+
+    assertQueryEquals("term~^2", null, "term~0.5^2.0");
+
+    assertQueryEquals("term^2~", null, "term~0.5^2.0");
+    assertQueryEquals("term*germ", null, "term*germ");
+    assertQueryEquals("term*germ^3", null, "term*germ^3.0");
+
+    assertTrue(getQuery("term*", null) instanceof PrefixQuery);
+    assertTrue(getQuery("term*^2", null) instanceof PrefixQuery);
+    assertTrue(getQuery("term~", null) instanceof FuzzyQuery);
+    assertTrue(getQuery("term~0.7", null) instanceof FuzzyQuery);
+    FuzzyQuery fq = (FuzzyQuery) getQuery("term~0.7", null);
+    assertEquals(0.7f, fq.getMinSimilarity(), 0.1f);
+    assertEquals(FuzzyQuery.defaultPrefixLength, fq.getPrefixLength());
+    fq = (FuzzyQuery) getQuery("term~", null);
+    assertEquals(0.5f, fq.getMinSimilarity(), 0.1f);
+    assertEquals(FuzzyQuery.defaultPrefixLength, fq.getPrefixLength());
+
+    assertQueryNodeException("term~1.1"); // value > 1, throws exception
+
+    assertTrue(getQuery("term*germ", null) instanceof WildcardQuery);
+
+    /*
+     * Tests to see that wild card terms are (or are not) properly lower-cased
+     * with propery parser configuration
+     */
+    // First prefix queries:
+    // by default, convert to lowercase:
+    assertWildcardQueryEquals("Term*", true, "term*");
+    // explicitly set lowercase:
+    assertWildcardQueryEquals("term*", true, "term*");
+    assertWildcardQueryEquals("Term*", true, "term*");
+    assertWildcardQueryEquals("TERM*", true, "term*");
+    // explicitly disable lowercase conversion:
+    assertWildcardQueryEquals("term*", false, "term*");
+    assertWildcardQueryEquals("Term*", false, "Term*");
+    assertWildcardQueryEquals("TERM*", false, "TERM*");
+    // Then 'full' wildcard queries:
+    // by default, convert to lowercase:
+    assertWildcardQueryEquals("Te?m", "te?m");
+    // explicitly set lowercase:
+    assertWildcardQueryEquals("te?m", true, "te?m");
+    assertWildcardQueryEquals("Te?m", true, "te?m");
+    assertWildcardQueryEquals("TE?M", true, "te?m");
+    assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
+    // explicitly disable lowercase conversion:
+    assertWildcardQueryEquals("te?m", false, "te?m");
+    assertWildcardQueryEquals("Te?m", false, "Te?m");
+    assertWildcardQueryEquals("TE?M", false, "TE?M");
+    assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+    // Fuzzy queries:
+    assertWildcardQueryEquals("Term~", "term~0.5");
+    assertWildcardQueryEquals("Term~", true, "term~0.5");
+    assertWildcardQueryEquals("Term~", false, "Term~0.5");
+    // Range queries:
+
+    // TODO: implement this on QueryParser
+    // Q0002E_INVALID_SYNTAX_CANNOT_PARSE: Syntax Error, cannot parse '[A TO
+    // C]': Lexical error at line 1, column 1. Encountered: "[" (91), after
+    // : ""
+    assertWildcardQueryEquals("[A TO C]", "[a TO c]");
+    assertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
+    assertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
+    // Test suffix queries: first disallow
+    try {
+      assertWildcardQueryEquals("*Term", true, "*term");
+      fail();
+    } catch (QueryNodeException pe) {
+      // expected exception
+    }
+    try {
+      assertWildcardQueryEquals("?Term", true, "?term");
+      fail();
+    } catch (QueryNodeException pe) {
+      // expected exception
+    }
+    // Test suffix queries: then allow
+    assertWildcardQueryEquals("*Term", true, "*term", true);
+    assertWildcardQueryEquals("?Term", true, "?term", true);
+  }
+
+  public void testLeadingWildcardType() throws Exception {
+    OriginalQueryParserHelper qp = getParser(null);
+    qp.setAllowLeadingWildcard(true);
+    assertEquals(WildcardQuery.class, qp.parse("t*erm*", "field").getClass());
+    assertEquals(WildcardQuery.class, qp.parse("?term*", "field").getClass());
+    assertEquals(WildcardQuery.class, qp.parse("*term*", "field").getClass());
+  }
+
+  public void testQPA() throws Exception {
+    assertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
+    assertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
+
+    assertQueryEquals("term term term", qpAnalyzer, "term term term");
+    assertQueryEquals("term +stop term", qpAnalyzer, "term term");
+    assertQueryEquals("term -stop term", qpAnalyzer, "term term");
+
+    assertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
+    assertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
+    assertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
+
+    assertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
+    assertQueryEquals("term phrase term", qpAnalyzer,
+        "term \"phrase1 phrase2\" term");
+
+    assertQueryEquals("term AND NOT phrase term", qpAnalyzer,
+        "+term -\"phrase1 phrase2\" term");
+
+    assertQueryEquals("stop^3", qpAnalyzer, "");
+    assertQueryEquals("stop", qpAnalyzer, "");
+    assertQueryEquals("(stop)^3", qpAnalyzer, "");
+    assertQueryEquals("((stop))^3", qpAnalyzer, "");
+    assertQueryEquals("(stop^3)", qpAnalyzer, "");
+    assertQueryEquals("((stop)^3)", qpAnalyzer, "");
+    assertQueryEquals("(stop)", qpAnalyzer, "");
+    assertQueryEquals("((stop))", qpAnalyzer, "");
+    assertTrue(getQuery("term term term", qpAnalyzer) instanceof BooleanQuery);
+    assertTrue(getQuery("term +stop", qpAnalyzer) instanceof TermQuery);
+  }
+
+  public void testRange() throws Exception {
+    assertQueryEquals("[ a TO z]", null, "[a TO z]");
+    assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)getQuery("[ a TO z]", null)).getRewriteMethod());
+
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    
+    qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+    assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE,((TermRangeQuery)qp.parse("[ a TO z]", "field")).getRewriteMethod());
+
+    assertQueryEquals("[ a TO z ]", null, "[a TO z]");
+    assertQueryEquals("{ a TO z}", null, "{a TO z}");
+    assertQueryEquals("{ a TO z }", null, "{a TO z}");
+    assertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
+    assertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
+    assertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
+    assertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
+    assertQueryEquals("gack ( bar blar { a TO z}) ", null,
+        "gack (bar blar {a TO z})");
+  }
+
+  public void testFarsiRangeCollating() throws Exception {
+
+    RAMDirectory ramDir = new RAMDirectory();
+    IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true,
+        IndexWriter.MaxFieldLength.LIMITED);
+    Document doc = new Document();
+    doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
+        Field.Index.UN_TOKENIZED));
+    iw.addDocument(doc);
+    iw.close();
+    IndexSearcher is = new IndexSearcher(ramDir);
+
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(new WhitespaceAnalyzer());
+
+    // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
+    // RuleBasedCollator. However, the Arabic Locale seems to order the
+    // Farsi
+    // characters properly.
+    Collator c = Collator.getInstance(new Locale("ar"));
+    qp.setRangeCollator(c);
+
+    // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
+    // orders the U+0698 character before the U+0633 character, so the
+    // single
+    // index Term below should NOT be returned by a ConstantScoreRangeQuery
+    // with a Farsi Collator (or an Arabic one for the case when Farsi is
+    // not
+    // supported).
+
+    // Test ConstantScoreRangeQuery
+    qp.setMultiTermRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+    ScoreDoc[] result = is.search(qp.parse("[ \u062F TO \u0698 ]", "content"),
+        null, 1000).scoreDocs;
+    assertEquals("The index Term should not be included.", 0, result.length);
+
+    result = is.search(qp.parse("[ \u0633 TO \u0638 ]", "content"), null, 1000).scoreDocs;
+    assertEquals("The index Term should be included.", 1, result.length);
+
+    // Test RangeQuery
+    qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+    result = is.search(qp.parse("[ \u062F TO \u0698 ]", "content"), null, 1000).scoreDocs;
+    assertEquals("The index Term should not be included.", 0, result.length);
+
+    result = is.search(qp.parse("[ \u0633 TO \u0638 ]", "content"), null, 1000).scoreDocs;
+    assertEquals("The index Term should be included.", 1, result.length);
+
+    is.close();
+  }
+
+  /** for testing legacy DateField support */
+  private String getLegacyDate(String s) throws Exception {
+    DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
+    return DateField.dateToString(df.parse(s));
+  }
+
+  /** for testing DateTools support */
+  private String getDate(String s, DateTools.Resolution resolution)
+      throws Exception {
+    DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
+    return getDate(df.parse(s), resolution);
+  }
+
+  /** for testing DateTools support */
+  private String getDate(Date d, DateTools.Resolution resolution)
+      throws Exception {
+    if (resolution == null) {
+      return DateField.dateToString(d);
+    } else {
+      return DateTools.dateToString(d, resolution);
+    }
+  }
+
+  private String getLocalizedDate(int year, int month, int day,
+      boolean extendLastDate) {
+    DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
+    Calendar calendar = Calendar.getInstance();
+    calendar.set(year, month, day);
+    if (extendLastDate) {
+      calendar.set(Calendar.HOUR_OF_DAY, 23);
+      calendar.set(Calendar.MINUTE, 59);
+      calendar.set(Calendar.SECOND, 59);
+      calendar.set(Calendar.MILLISECOND, 999);
+    }
+    return df.format(calendar.getTime());
+  }
+
+  /** for testing legacy DateField support */
+  public void testLegacyDateRange() throws Exception {
+    String startDate = getLocalizedDate(2002, 1, 1, false);
+    String endDate = getLocalizedDate(2002, 1, 4, false);
+    Calendar endDateExpected = Calendar.getInstance();
+    endDateExpected.set(2002, 1, 4, 23, 59, 59);
+    endDateExpected.set(Calendar.MILLISECOND, 999);
+    assertQueryEquals("[ " + startDate + " TO " + endDate + "]", null, "["
+        + getLegacyDate(startDate) + " TO "
+        + DateField.dateToString(endDateExpected.getTime()) + "]");
+    assertQueryEquals("{  " + startDate + "    " + endDate + "   }", null, "{"
+        + getLegacyDate(startDate) + " TO " + getLegacyDate(endDate) + "}");
+  }
+
+  public void testDateRange() throws Exception {
+    String startDate = getLocalizedDate(2002, 1, 1, false);
+    String endDate = getLocalizedDate(2002, 1, 4, false);
+    Calendar endDateExpected = Calendar.getInstance();
+    endDateExpected.set(2002, 1, 4, 23, 59, 59);
+    endDateExpected.set(Calendar.MILLISECOND, 999);
+    final String defaultField = "default";
+    final String monthField = "month";
+    final String hourField = "hour";
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+
+    // Don't set any date resolution and verify if DateField is used
+    assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
+        endDateExpected.getTime(), null);
+
+    Map<CharSequence, DateTools.Resolution> dateRes =  new HashMap<CharSequence, DateTools.Resolution>();
+    
+    // set a field specific date resolution    
+    dateRes.put(monthField, DateTools.Resolution.MONTH);
+    qp.setDateResolution(dateRes);
+
+    // DateField should still be used for defaultField
+    assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
+        endDateExpected.getTime(), null);
+
+    // set default date resolution to MILLISECOND
+    qp.setDateResolution(DateTools.Resolution.MILLISECOND);
+
+    // set second field specific date resolution
+    dateRes.put(hourField, DateTools.Resolution.HOUR);
+    qp.setDateResolution(dateRes);
+
+    // for this field no field specific date resolution has been set,
+    // so verify if the default resolution is used
+    assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
+        endDateExpected.getTime(), DateTools.Resolution.MILLISECOND);
+
+    // verify if field specific date resolutions are used for these two
+    // fields
+    assertDateRangeQueryEquals(qp, monthField, startDate, endDate,
+        endDateExpected.getTime(), DateTools.Resolution.MONTH);
+
+    assertDateRangeQueryEquals(qp, hourField, startDate, endDate,
+        endDateExpected.getTime(), DateTools.Resolution.HOUR);
+  }
+
+  public void assertDateRangeQueryEquals(OriginalQueryParserHelper qp,
+      String field, String startDate, String endDate, Date endDateInclusive,
+      DateTools.Resolution resolution) throws Exception {
+    assertQueryEquals(qp, field, field + ":[" + startDate + " TO " + endDate
+        + "]", "[" + getDate(startDate, resolution) + " TO "
+        + getDate(endDateInclusive, resolution) + "]");
+    assertQueryEquals(qp, field, field + ":{" + startDate + " TO " + endDate
+        + "}", "{" + getDate(startDate, resolution) + " TO "
+        + getDate(endDate, resolution) + "}");
+  }
+
+  public void testEscaped() throws Exception {
+    Analyzer a = new WhitespaceAnalyzer();
+
+    /*
+     * assertQueryEquals("\\[brackets", a, "\\[brackets");
+     * assertQueryEquals("\\[brackets", null, "brackets");
+     * assertQueryEquals("\\\\", a, "\\\\"); assertQueryEquals("\\+blah", a,
+     * "\\+blah"); assertQueryEquals("\\(blah", a, "\\(blah");
+     * 
+     * assertQueryEquals("\\-blah", a, "\\-blah"); assertQueryEquals("\\!blah",
+     * a, "\\!blah"); assertQueryEquals("\\{blah", a, "\\{blah");
+     * assertQueryEquals("\\}blah", a, "\\}blah"); assertQueryEquals("\\:blah",
+     * a, "\\:blah"); assertQueryEquals("\\^blah", a, "\\^blah");
+     * assertQueryEquals("\\[blah", a, "\\[blah"); assertQueryEquals("\\]blah",
+     * a, "\\]blah"); assertQueryEquals("\\\"blah", a, "\\\"blah");
+     * assertQueryEquals("\\(blah", a, "\\(blah"); assertQueryEquals("\\)blah",
+     * a, "\\)blah"); assertQueryEquals("\\~blah", a, "\\~blah");
+     * assertQueryEquals("\\*blah", a, "\\*blah"); assertQueryEquals("\\?blah",
+     * a, "\\?blah"); //assertQueryEquals("foo \\&\\& bar", a,
+     * "foo \\&\\& bar"); //assertQueryEquals("foo \\|| bar", a,
+     * "foo \\|| bar"); //assertQueryEquals("foo \\AND bar", a,
+     * "foo \\AND bar");
+     */
+
+    assertQueryEquals("\\a", a, "a");
+
+    assertQueryEquals("a\\-b:c", a, "a-b:c");
+    assertQueryEquals("a\\+b:c", a, "a+b:c");
+    assertQueryEquals("a\\:b:c", a, "a:b:c");
+    assertQueryEquals("a\\\\b:c", a, "a\\b:c");
+
+    assertQueryEquals("a:b\\-c", a, "a:b-c");
+    assertQueryEquals("a:b\\+c", a, "a:b+c");
+    assertQueryEquals("a:b\\:c", a, "a:b:c");
+    assertQueryEquals("a:b\\\\c", a, "a:b\\c");
+
+    assertQueryEquals("a:b\\-c*", a, "a:b-c*");
+    assertQueryEquals("a:b\\+c*", a, "a:b+c*");
+    assertQueryEquals("a:b\\:c*", a, "a:b:c*");
+
+    assertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
+
+    assertQueryEquals("a:b\\-?c", a, "a:b-?c");
+    assertQueryEquals("a:b\\+?c", a, "a:b+?c");
+    assertQueryEquals("a:b\\:?c", a, "a:b:?c");
+
+    assertQueryEquals("a:b\\\\?c", a, "a:b\\?c");
+
+    assertQueryEquals("a:b\\-c~", a, "a:b-c~0.5");
+    assertQueryEquals("a:b\\+c~", a, "a:b+c~0.5");
+    assertQueryEquals("a:b\\:c~", a, "a:b:c~0.5");
+    assertQueryEquals("a:b\\\\c~", a, "a:b\\c~0.5");
+
+    // TODO: implement Range queries on QueryParser
+    assertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]");
+    assertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
+    assertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
+
+    assertQueryEquals(
+        "[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]",
+        a, "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
+
+    assertQueryEquals("a\\\\\\+b", a, "a\\+b");
+
+    assertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
+    assertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
+    assertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
+
+    assertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
+
+    assertQueryNodeException("XY\\"); // there must be a character after the
+    // escape char
+
+    // test unicode escaping
+    assertQueryEquals("a\\u0062c", a, "abc");
+    assertQueryEquals("XY\\u005a", a, "XYZ");
+    assertQueryEquals("XY\\u005A", a, "XYZ");
+    assertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
+
+    assertQueryNodeException("XY\\u005G"); // test non-hex character in escaped
+    // unicode sequence
+    assertQueryNodeException("XY\\u005"); // test incomplete escaped unicode
+    // sequence
+
+    // Tests bug LUCENE-800
+    assertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
+    assertQueryNodeException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing
+    // paranthesis
+    assertQueryEquals("\\*", a, "*");
+    assertQueryEquals("\\\\", a, "\\"); // escaped backslash
+
+    assertQueryNodeException("\\"); // a backslash must always be escaped
+
+    // LUCENE-1189
+    assertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
+  }
+
+  public void testQueryStringEscaping() throws Exception {
+    Analyzer a = new WhitespaceAnalyzer();
+
+    assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
+    assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
+    assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+    assertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
+
+    assertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
+    assertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
+    assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+    assertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
+
+    assertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
+    assertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
+    assertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
+
+    assertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
+
+    assertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
+    assertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
+    assertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
+
+    assertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
+
+    assertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
+    assertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
+    assertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
+    assertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
+
+    assertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
+    assertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
+    assertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
+
+    // LUCENE-881
+    assertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
+    assertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
+  }
+
+  public void testTabNewlineCarriageReturn() throws Exception {
+    assertQueryEqualsDOA("+weltbank +worlbank", null, "+weltbank +worlbank");
+
+    assertQueryEqualsDOA("+weltbank\n+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \n+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \n +worlbank", null, "+weltbank +worlbank");
+
+    assertQueryEqualsDOA("+weltbank\r+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \r+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \r +worlbank", null, "+weltbank +worlbank");
+
+    assertQueryEqualsDOA("+weltbank\r\n+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \r\n+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \r\n +worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \r \n +worlbank", null,
+        "+weltbank +worlbank");
+
+    assertQueryEqualsDOA("+weltbank\t+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \t+worlbank", null, "+weltbank +worlbank");
+    assertQueryEqualsDOA("weltbank \t +worlbank", null, "+weltbank +worlbank");
+  }
+
+  public void testSimpleDAO() throws Exception {
+    assertQueryEqualsDOA("term term term", null, "+term +term +term");
+    assertQueryEqualsDOA("term +term term", null, "+term +term +term");
+    assertQueryEqualsDOA("term term +term", null, "+term +term +term");
+    assertQueryEqualsDOA("term +term +term", null, "+term +term +term");
+    assertQueryEqualsDOA("-term term term", null, "-term +term +term");
+  }
+
+  public void testBoost() throws Exception {
+    StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(
+        new String[] { "on" });
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(oneStopAnalyzer);
+
+    Query q = qp.parse("on^1.0", "field");
+    assertNotNull(q);
+    q = qp.parse("\"hello\"^2.0", "field");
+    assertNotNull(q);
+    assertEquals(q.getBoost(), (float) 2.0, (float) 0.5);
+    q = qp.parse("hello^2.0", "field");
+    assertNotNull(q);
+    assertEquals(q.getBoost(), (float) 2.0, (float) 0.5);
+    q = qp.parse("\"on\"^1.0", "field");
+    assertNotNull(q);
+
+    OriginalQueryParserHelper qp2 = new OriginalQueryParserHelper();
+    qp2.setAnalyzer(new StandardAnalyzer());
+
+    q = qp2.parse("the^3", "field");
+    // "the" is a stop word so the result is an empty query:
+    assertNotNull(q);
+    assertEquals("", q.toString());
+    assertEquals(1.0f, q.getBoost(), 0.01f);
+  }
+
+  public void assertQueryNodeException(String queryString) throws Exception {
+    try {
+      getQuery(queryString, null);
+    } catch (QueryNodeException expected) {
+      return;
+    }
+    fail("ParseException expected, not thrown");
+  }
+
+  public void testException() throws Exception {
+    assertQueryNodeException("\"some phrase");
+    assertQueryNodeException("(foo bar");
+    assertQueryNodeException("foo bar))");
+    assertQueryNodeException("field:term:with:colon some more terms");
+    assertQueryNodeException("(sub query)^5.0^2.0 plus more");
+    assertQueryNodeException("secret AND illegal) AND access:confidential");
+  }
+
+  public void testCustomQueryParserWildcard() {
+    try {
+      new QPTestParser(new WhitespaceAnalyzer()).parse("a?t", "contents");
+      fail("Wildcard queries should not be allowed");
+    } catch (QueryNodeException expected) {
+      // expected exception
+    }
+  }
+
+  public void testCustomQueryParserFuzzy() throws Exception {
+    try {
+      new QPTestParser(new WhitespaceAnalyzer()).parse("xunit~", "contents");
+      fail("Fuzzy queries should not be allowed");
+    } catch (QueryNodeException expected) {
+      // expected exception
+    }
+  }
+
+  public void testBooleanQuery() throws Exception {
+    BooleanQuery.setMaxClauseCount(2);
+    try {
+      OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+      qp.setAnalyzer(new WhitespaceAnalyzer());
+
+      qp.parse("one two three", "field");
+      fail("ParseException expected due to too many boolean clauses");
+    } catch (QueryNodeException expected) {
+      // too many boolean clauses, so ParseException is expected
+    }
+  }
+
+  /**
+   * This test differs from TestPrecedenceQueryParser
+   */
+  public void testPrecedence() throws Exception {
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(new WhitespaceAnalyzer());
+
+    Query query1 = qp.parse("A AND B OR C AND D", "field");
+    Query query2 = qp.parse("+A +B +C +D", "field");
+
+    assertEquals(query1, query2);
+  }
+
+  public void testLocalDateFormat() throws IOException, QueryNodeException {
+
+    RAMDirectory ramDir = new RAMDirectory();
+    IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true,
+        IndexWriter.MaxFieldLength.LIMITED);
+    addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
+    addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
+    iw.close();
+    IndexSearcher is = new IndexSearcher(ramDir);
+    assertHits(1, "[12/1/2005 TO 12/3/2005]", is);
+    assertHits(2, "[12/1/2005 TO 12/4/2005]", is);
+    assertHits(1, "[12/3/2005 TO 12/4/2005]", is);
+    assertHits(1, "{12/1/2005 TO 12/3/2005}", is);
+    assertHits(1, "{12/1/2005 TO 12/4/2005}", is);
+    assertHits(0, "{12/3/2005 TO 12/4/2005}", is);
+    is.close();
+  }
+
+  public void testStarParsing() throws Exception {
+    // final int[] type = new int[1];
+    // OriginalQueryParserHelper qp = new OriginalQueryParserHelper("field", new
+    // WhitespaceAnalyzer()) {
+    // protected Query getWildcardQuery(String field, String termStr) throws
+    // ParseException {
+    // // override error checking of superclass
+    // type[0]=1;
+    // return new TermQuery(new Term(field,termStr));
+    // }
+    // protected Query getPrefixQuery(String field, String termStr) throws
+    // ParseException {
+    // // override error checking of superclass
+    // type[0]=2;
+    // return new TermQuery(new Term(field,termStr));
+    // }
+    //
+    // protected Query getFieldQuery(String field, String queryText) throws
+    // ParseException {
+    // type[0]=3;
+    // return super.getFieldQuery(field, queryText);
+    // }
+    // };
+    //
+    // TermQuery tq;
+    //
+    // tq = (TermQuery)qp.parse("foo:zoo*");
+    // assertEquals("zoo",tq.getTerm().text());
+    // assertEquals(2,type[0]);
+    //
+    // tq = (TermQuery)qp.parse("foo:zoo*^2");
+    // assertEquals("zoo",tq.getTerm().text());
+    // assertEquals(2,type[0]);
+    // assertEquals(tq.getBoost(),2,0);
+    //
+    // tq = (TermQuery)qp.parse("foo:*");
+    // assertEquals("*",tq.getTerm().text());
+    // assertEquals(1,type[0]); // could be a valid prefix query in the
+    // future too
+    //
+    // tq = (TermQuery)qp.parse("foo:*^2");
+    // assertEquals("*",tq.getTerm().text());
+    // assertEquals(1,type[0]);
+    // assertEquals(tq.getBoost(),2,0);
+    //
+    // tq = (TermQuery)qp.parse("*:foo");
+    // assertEquals("*",tq.getTerm().field());
+    // assertEquals("foo",tq.getTerm().text());
+    // assertEquals(3,type[0]);
+    //
+    // tq = (TermQuery)qp.parse("*:*");
+    // assertEquals("*",tq.getTerm().field());
+    // assertEquals("*",tq.getTerm().text());
+    // assertEquals(1,type[0]); // could be handled as a prefix query in the
+    // future
+    //
+    // tq = (TermQuery)qp.parse("(*:*)");
+    // assertEquals("*",tq.getTerm().field());
+    // assertEquals("*",tq.getTerm().text());
+    // assertEquals(1,type[0]);
+
+  }
+
+  public void testStopwords() throws Exception {
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(
+        new StopAnalyzer(new String[] { "the", "foo" }));
+
+    Query result = qp.parse("a:the OR a:foo", "a");
+    assertNotNull("result is null and it shouldn't be", result);
+    assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery);
+    assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: "
+        + 0, ((BooleanQuery) result).clauses().size() == 0);
+    result = qp.parse("a:woo OR a:the", "a");
+    assertNotNull("result is null and it shouldn't be", result);
+    assertTrue("result is not a TermQuery", result instanceof TermQuery);
+    result = qp.parse(
+        "(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)",
+        "a");
+    assertNotNull("result is null and it shouldn't be", result);
+    assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery);
+    System.out.println("Result: " + result);
+    assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: "
+        + 2, ((BooleanQuery) result).clauses().size() == 2);
+  }
+
+  public void testPositionIncrement() throws Exception {
+    boolean dflt = StopFilter.getEnablePositionIncrementsDefault();
+    StopFilter.setEnablePositionIncrementsDefault(true);
+    try {
+      OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+      qp.setAnalyzer(
+          new StopAnalyzer(new String[] { "the", "in", "are", "this" }));
+
+      qp.setEnablePositionIncrements(true);
+
+      String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
+      // 0 2 5 7 8
+      int expectedPositions[] = { 1, 3, 4, 6, 9 };
+      PhraseQuery pq = (PhraseQuery) qp.parse(qtxt, "a");
+      // System.out.println("Query text: "+qtxt);
+      // System.out.println("Result: "+pq);
+      Term t[] = pq.getTerms();
+      int pos[] = pq.getPositions();
+      for (int i = 0; i < t.length; i++) {
+        // System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
+        assertEquals("term " + i + " = " + t[i] + " has wrong term-position!",
+            expectedPositions[i], pos[i]);
+      }
+
+    } finally {
+      StopFilter.setEnablePositionIncrementsDefault(dflt);
+    }
+  }
+
+  public void testMatchAllDocs() throws Exception {
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(new WhitespaceAnalyzer());
+
+    assertEquals(new MatchAllDocsQuery(), qp.parse("*:*", "field"));
+    assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)", "field"));
+    BooleanQuery bq = (BooleanQuery) qp.parse("+*:* -*:*", "field");
+    assertTrue(bq.getClauses()[0].getQuery() instanceof MatchAllDocsQuery);
+    assertTrue(bq.getClauses()[1].getQuery() instanceof MatchAllDocsQuery);
+  }
+
+  private void assertHits(int expected, String query, IndexSearcher is)
+      throws IOException, QueryNodeException {
+    OriginalQueryParserHelper qp = new OriginalQueryParserHelper();
+    qp.setAnalyzer(new WhitespaceAnalyzer());
+    qp.setLocale(Locale.ENGLISH);
+
+    Query q = qp.parse(query, "date");
+    ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
+    assertEquals(expected, hits.length);
+  }
+
+  private static void addDateDoc(String content, int year, int month, int day,
+      int hour, int minute, int second, IndexWriter iw) throws IOException {
+    Document d = new Document();
+    d.add(new Field("f", content, Field.Store.YES, Field.Index.ANALYZED));
+    Calendar cal = Calendar.getInstance();
+    cal.set(year, month - 1, day, hour, minute, second);
+    d.add(new Field("date", DateField.dateToString(cal.getTime()),
+        Field.Store.YES, Field.Index.NOT_ANALYZED));
+    iw.addDocument(d);
+  }
+
+  public void tearDown() throws Exception {
+    super.tearDown();
+    BooleanQuery.setMaxClauseCount(originalMaxClauses);
+  }
+
+}

Propchange: lucene/java/trunk/contrib/queryparser/src/test/org/apache/lucene/queryParser/original/TestQPHelper.java
------------------------------------------------------------------------------
    svn:eol-style = native



Mime
View raw message