lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [1/5] lucenenet git commit: BUG: Lucene.Net.Automaton (BasicAutomata + BasicOperations + MinimizationOperations + SpecialOperations): Corrected accessibility from internal to public and removed InternalsVisibleTo attributes unneeded as a result of these
Date Sat, 16 Sep 2017 06:26:44 GMT
Repository: lucenenet
Updated Branches:
  refs/heads/master b781e1178 -> 234c9b4c3


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
index f29882a..00df67c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
@@ -184,7 +184,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, false));
@@ -224,7 +224,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, false));
@@ -604,7 +604,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.ignoreCase = ignoreCase;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, ignoreCase));
@@ -679,7 +679,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.ignoreCase = ignoreCase;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
                 TokenStream syns = new SynonymFilter(tokenizer, map, ignoreCase);
@@ -724,7 +724,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.ignoreCase = ignoreCase;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, ignoreCase));
@@ -774,7 +774,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.ignoreCase = ignoreCase;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, ignoreCase));
@@ -813,7 +813,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
true);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -895,7 +895,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -928,7 +928,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -961,7 +961,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -992,7 +992,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -1025,7 +1025,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -1094,7 +1094,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -1125,7 +1125,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -1157,7 +1157,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));
@@ -1188,7 +1188,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, true));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestWordnetSynonymParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestWordnetSynonymParser.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestWordnetSynonymParser.cs
index f545114..cc372cc 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestWordnetSynonymParser.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestWordnetSynonymParser.cs
@@ -57,7 +57,7 @@ namespace Lucene.Net.Analysis.Synonym
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer,
map, false));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Tr/TestTurkishLowerCaseFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Tr/TestTurkishLowerCaseFilter.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Tr/TestTurkishLowerCaseFilter.cs
index e2cab3f..a49a557 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Tr/TestTurkishLowerCaseFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Tr/TestTurkishLowerCaseFilter.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Analysis.Tr
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new TurkishLowerCaseFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
index 79249c8..87b8c22 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
@@ -151,7 +151,7 @@ namespace Lucene.Net.Analysis.Util
 
         private sealed class AnalyzerAnonymousInnerClassHelper : Analyzer
         {
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new LetterTokenizerAnonymousInnerClassHelper(TEST_VERSION_CURRENT,
reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -214,7 +214,7 @@ namespace Lucene.Net.Analysis.Util
 
         private sealed class AnalyzerAnonymousInnerClassHelper2 : Analyzer
         {
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new LetterTokenizerAnonymousInnerClassHelper2(TEST_VERSION_CURRENT,
reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -260,7 +260,7 @@ namespace Lucene.Net.Analysis.Util
             public AnalyzerAnonymousInnerClassHelper3()
             { }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new NumberAndSurrogatePairTokenizer(TEST_VERSION_CURRENT,
reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
index 2bc4ab3..81c5b1d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
@@ -71,7 +71,7 @@ namespace Lucene.Net.Analysis.Util
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new ElisionFilter(tokenizer,
FrenchAnalyzer.DEFAULT_ARTICLES));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerTest.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerTest.cs
index 7f1b0ea..c234e32 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerTest.cs
@@ -126,7 +126,7 @@ namespace Lucene.Net.Analysis.Wikipedia
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new WikipediaTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -152,7 +152,7 @@ namespace Lucene.Net.Analysis.Wikipedia
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 Tokenizer tokenizer = new WikipediaTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
index 03abf65..2d3c668 100644
--- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
@@ -2060,7 +2060,7 @@ namespace Lucene.Net.Search.Highlight
          *      java.io.Reader)
          */
 
-        protected internal override TokenStreamComponents CreateComponents(String arg0, TextReader
arg1)
+        protected override TokenStreamComponents CreateComponents(String arg0, TextReader
arg1)
         {
             Tokenizer stream = new MockTokenizer(arg1, MockTokenizer.SIMPLE, true);
             stream.AddAttribute<ICharTermAttribute>();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
index c7a19cc..0e0d585 100644
--- a/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
@@ -56,7 +56,7 @@ namespace Lucene.Net.Search.Highlight
 
         internal class AnalyzerAnonymousHelper : Analyzer
         {
-            protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
             {
                 MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE,
false);
                 tokenizer.EnableChecks = (false);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
index 21b4a98..4e8f696 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
@@ -209,7 +209,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
         internal sealed class BigramAnalyzer : Analyzer
         {
-            protected internal override TokenStreamComponents CreateComponents(String fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(String fieldName, TextReader
reader)
             {
                 return new TokenStreamComponents(new BasicNGramTokenizer(reader));
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
index 5dc704f..029fd3d 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
@@ -366,7 +366,7 @@ namespace Lucene.Net.Search.VectorHighlight
                 this.tokens = tokens;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(String fieldName,
TextReader reader)
+            protected override TokenStreamComponents CreateComponents(String fieldName, TextReader
reader)
             {
                 Tokenizer ts = new TokenizerAnonymousHelper(Token.TOKEN_ATTRIBUTE_FACTORY,
reader, tokens);
                 return new TokenStreamComponents(ts);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Properties/AssemblyInfo.cs b/src/Lucene.Net/Properties/AssemblyInfo.cs
index f04b339..29b4581 100644
--- a/src/Lucene.Net/Properties/AssemblyInfo.cs
+++ b/src/Lucene.Net/Properties/AssemblyInfo.cs
@@ -42,12 +42,7 @@ using System.Runtime.CompilerServices;
 // must update overridden methods to match.
 [assembly: InternalsVisibleTo("Lucene.Net.Tests")]
 [assembly: InternalsVisibleTo("Lucene.Net.TestFramework")]
-[assembly: InternalsVisibleTo("Lucene.Net.Highlighter")] // For Automaton
-[assembly: InternalsVisibleTo("Lucene.Net.ICU")] // For Automaton
 [assembly: InternalsVisibleTo("Lucene.Net.Misc")]
-[assembly: InternalsVisibleTo("Lucene.Net.Suggest")] // For Automaton
-[assembly: InternalsVisibleTo("Lucene.Net.Tests.Analysis.Common")] // For Automaton
-[assembly: InternalsVisibleTo("Lucene.Net.Tests.Highlighter")] // For Automaton
 [assembly: InternalsVisibleTo("Lucene.Net.Tests.ICU")] // For Analysis.Util.TestSegmentingTokenizerBase
 [assembly: InternalsVisibleTo("Lucene.Net.Tests.Misc")]
 [assembly: InternalsVisibleTo("Lucene.Net.Tests.QueryParser")]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net/Util/Automaton/BasicAutomata.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Util/Automaton/BasicAutomata.cs b/src/Lucene.Net/Util/Automaton/BasicAutomata.cs
index d7b50e0..fcebb46 100644
--- a/src/Lucene.Net/Util/Automaton/BasicAutomata.cs
+++ b/src/Lucene.Net/Util/Automaton/BasicAutomata.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Util.Automaton
     /// <para/>
     /// @lucene.experimental
     /// </summary>
-    internal sealed class BasicAutomata
+    public sealed class BasicAutomata
     {
         private BasicAutomata()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net/Util/Automaton/BasicOperations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs b/src/Lucene.Net/Util/Automaton/BasicOperations.cs
index bc4b488..d6efcb9 100644
--- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs
+++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs
@@ -41,12 +41,8 @@ namespace Lucene.Net.Util.Automaton
     /// <para/>
     /// @lucene.experimental
     /// </summary>
-    internal sealed class BasicOperations
+    public static class BasicOperations // LUCENENET specific - made static since all members
are static
     {
-        private BasicOperations()
-        {
-        }
-
         /// <summary>
         /// Returns an automaton that accepts the concatenation of the languages of the
         /// given automata.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net/Util/Automaton/MinimizationOperations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Util/Automaton/MinimizationOperations.cs b/src/Lucene.Net/Util/Automaton/MinimizationOperations.cs
index 2d4490b..b71c468 100644
--- a/src/Lucene.Net/Util/Automaton/MinimizationOperations.cs
+++ b/src/Lucene.Net/Util/Automaton/MinimizationOperations.cs
@@ -38,12 +38,8 @@ namespace Lucene.Net.Util.Automaton
     /// <para/>
     /// @lucene.experimental
     /// </summary>
-    internal sealed class MinimizationOperations
+    public static class MinimizationOperations // LUCENENET specific - made static since
all members are static
     {
-        private MinimizationOperations()
-        {
-        }
-
         /// <summary>
         /// Minimizes (and determinizes if not already deterministic) the given
         /// automaton.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Util/Automaton/SpecialOperations.cs b/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
index d5f75db..b312fb9 100644
--- a/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
+++ b/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
@@ -1,5 +1,4 @@
 using Lucene.Net.Support;
-using System.Collections;
 using System.Collections.Generic;
 using System.Linq;
 using System.Text;
@@ -42,12 +41,8 @@ namespace Lucene.Net.Util.Automaton
     /// <para/>
     /// @lucene.experimental
     /// </summary>
-    internal sealed class SpecialOperations
+    public static class SpecialOperations // LUCENENET specific - made static since all members
are static
     {
-        private SpecialOperations()
-        {
-        }
-
         /// <summary>
         /// Finds the largest entry whose value is less than or equal to <paramref name="c"/>,
or 0 if
         /// there is no such entry.


Mime
View raw message