lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [3/5] lucenenet git commit: BUG: Lucene.Net.Automaton (BasicAutomata + BasicOperations + MinimizationOperations + SpecialOperations): Corrected accessibility from internal to public and removed InternalsVisibleTo attributes unneeded as a result of these
Date Sat, 16 Sep 2017 06:26:46 GMT
BUG: Lucene.Net.Automaton (BasicAutomata + BasicOperations + MinimizationOperations + SpecialOperations): Corrected accessibility from internal to public and removed InternalsVisibleTo attributes unneeded as a result of these changes.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e4f25624
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e4f25624
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e4f25624

Branch: refs/heads/master
Commit: e4f25624fca6b7bb2fc59afceac9b683a29a200d
Parents: b781e11
Author: Shad Storhaug <shad@shadstorhaug.com>
Authored: Fri Sep 15 15:29:38 2017 +0700
Committer: Shad Storhaug <shad@shadstorhaug.com>
Committed: Fri Sep 15 15:29:38 2017 +0700

----------------------------------------------------------------------
 .../Analysis/Th/ThaiAnalyzer.cs                 |  2 +-
 .../Collation/ICUCollationKeyAnalyzer.cs        |  2 +-
 .../Highlight/Highlighter.cs                    |  2 +-
 .../Spell/SuggestWordQueue.cs                   |  2 +-
 src/Lucene.Net.Suggest/Suggest/Lookup.cs        |  2 +-
 .../Ar/TestArabicNormalizationFilter.cs         |  2 +-
 .../Analysis/Ar/TestArabicStemFilter.cs         |  2 +-
 .../Analysis/Bg/TestBulgarianStemmer.cs         |  2 +-
 .../Analysis/Br/TestBrazilianStemmer.cs         |  2 +-
 .../CharFilters/HTMLStripCharFilterTest.cs      |  4 +--
 .../CharFilters/TestMappingCharFilter.cs        | 12 ++++----
 .../Analysis/Cjk/TestCJKAnalyzer.cs             |  8 ++---
 .../Analysis/Cjk/TestCJKBigramFilter.cs         | 10 +++---
 .../Analysis/Cjk/TestCJKWidthFilter.cs          |  4 +--
 .../Ckb/TestSoraniNormalizationFilter.cs        |  2 +-
 .../Analysis/Ckb/TestSoraniStemFilter.cs        |  2 +-
 .../Analysis/Cn/TestChineseTokenizer.cs         |  4 +--
 .../Commongrams/CommonGramsFilterTest.cs        |  8 ++---
 .../Compound/TestCompoundWordTokenFilter.cs     | 12 ++++----
 .../Analysis/Core/TestAnalyzers.cs              |  4 +--
 .../Analysis/Core/TestBugInSomething.cs         |  8 ++---
 .../Analysis/Core/TestDuelingAnalyzers.cs       | 12 ++++----
 .../Analysis/Core/TestFactories.cs              |  4 +--
 .../Analysis/Core/TestRandomChains.cs           |  4 +--
 .../Analysis/Core/TestStandardAnalyzer.cs       |  8 ++---
 .../Analysis/Core/TestStopFilter.cs             |  2 +-
 .../Analysis/Core/TestUAX29URLEmailTokenizer.cs | 14 ++++-----
 .../Analysis/Cz/TestCzechStemmer.cs             |  2 +-
 .../Analysis/De/TestGermanLightStemFilter.cs    |  6 ++--
 .../Analysis/De/TestGermanMinimalStemFilter.cs  |  6 ++--
 .../De/TestGermanNormalizationFilter.cs         |  4 +--
 .../Analysis/De/TestGermanStemFilter.cs         |  6 ++--
 .../Analysis/El/TestGreekStemmer.cs             |  2 +-
 .../Analysis/En/TestEnglishMinimalStemFilter.cs |  4 +--
 .../Analysis/En/TestKStemmer.cs                 |  4 +--
 .../Analysis/En/TestPorterStemFilter.cs         |  4 +--
 .../Analysis/Es/TestSpanishLightStemFilter.cs   |  4 +--
 .../Analysis/Fa/TestPersianCharFilter.cs        |  4 +--
 .../Fa/TestPersianNormalizationFilter.cs        |  2 +-
 .../Analysis/Fi/TestFinnishLightStemFilter.cs   |  6 ++--
 .../Analysis/Fr/TestFrenchLightStemFilter.cs    |  6 ++--
 .../Analysis/Fr/TestFrenchMinimalStemFilter.cs  |  6 ++--
 .../Analysis/Ga/TestIrishLowerCaseFilter.cs     |  2 +-
 .../Gl/TestGalicianMinimalStemFilter.cs         |  6 ++--
 .../Analysis/Gl/TestGalicianStemFilter.cs       |  4 +--
 .../Analysis/Hi/TestHindiNormalizer.cs          |  2 +-
 .../Analysis/Hi/TestHindiStemmer.cs             |  2 +-
 .../Analysis/Hu/TestHungarianLightStemFilter.cs |  6 ++--
 .../Analysis/Hunspell/TestHunspellStemFilter.cs |  6 ++--
 .../Analysis/Id/TestIndonesianStemmer.cs        |  6 ++--
 .../Analysis/In/TestIndicNormalizer.cs          |  2 +-
 .../Analysis/It/TestItalianLightStemFilter.cs   |  4 +--
 .../Analysis/Lv/TestLatvianStemmer.cs           |  4 +--
 .../Miscellaneous/TestASCIIFoldingFilter.cs     |  4 +--
 .../Miscellaneous/TestCapitalizationFilter.cs   |  4 +--
 .../Miscellaneous/TestCodepointCountFilter.cs   |  2 +-
 .../Miscellaneous/TestHyphenatedWordsFilter.cs  |  4 +--
 .../Miscellaneous/TestKeepWordFilter.cs         |  2 +-
 .../Analysis/Miscellaneous/TestLengthFilter.cs  |  2 +-
 .../TestLimitTokenPositionFilter.cs             |  2 +-
 .../TestLucene47WordDelimiterFilter.cs          | 10 +++---
 .../TestPerFieldAnalyzerWrapper.cs              |  4 +--
 .../TestRemoveDuplicatesTokenFilter.cs          |  4 +--
 .../TestScandinavianFoldingFilter.cs            |  4 +--
 .../TestScandinavianNormalizationFilter.cs      |  4 +--
 .../Analysis/Miscellaneous/TestTrimFilter.cs    |  6 ++--
 .../Miscellaneous/TestWordDelimiterFilter.cs    | 16 +++++-----
 .../Analysis/NGram/EdgeNGramTokenFilterTest.cs  | 10 +++---
 .../Analysis/NGram/EdgeNGramTokenizerTest.cs    |  4 +--
 .../Analysis/NGram/NGramTokenFilterTest.cs      |  6 ++--
 .../Analysis/NGram/NGramTokenizerTest.cs        |  2 +-
 .../Analysis/No/TestNorwegianLightStemFilter.cs |  8 ++---
 .../No/TestNorwegianMinimalStemFilter.cs        |  8 ++---
 .../Analysis/Path/TestPathHierarchyTokenizer.cs |  4 +--
 .../Path/TestReversePathHierarchyTokenizer.cs   |  4 +--
 .../TestPatternCaptureGroupTokenFilter.cs       |  2 +-
 .../Pattern/TestPatternReplaceCharFilter.cs     |  4 +--
 .../Pattern/TestPatternReplaceFilter.cs         |  6 ++--
 .../Analysis/Pattern/TestPatternTokenizer.cs    |  4 +--
 .../Pt/TestPortugueseLightStemFilter.cs         |  6 ++--
 .../Pt/TestPortugueseMinimalStemFilter.cs       |  6 ++--
 .../Analysis/Pt/TestPortugueseStemFilter.cs     |  6 ++--
 .../Analysis/Reverse/TestReverseStringFilter.cs |  4 +--
 .../Analysis/Ru/TestRussianLightStemFilter.cs   |  6 ++--
 .../Shingle/ShingleAnalyzerWrapperTest.cs       |  2 +-
 .../Analysis/Shingle/ShingleFilterTest.cs       |  6 ++--
 .../Analysis/Snowball/TestSnowball.cs           |  4 +--
 .../Analysis/Snowball/TestSnowballVocab.cs      |  2 +-
 .../Analysis/Sv/TestSwedishLightStemFilter.cs   |  6 ++--
 .../Analysis/Synonym/TestSolrSynonymParser.cs   |  4 +--
 .../Analysis/Synonym/TestSynonymMapFilter.cs    | 32 ++++++++++----------
 .../Synonym/TestWordnetSynonymParser.cs         |  2 +-
 .../Analysis/Tr/TestTurkishLowerCaseFilter.cs   |  2 +-
 .../Analysis/Util/TestCharTokenizers.cs         |  6 ++--
 .../Analysis/Util/TestElision.cs                |  2 +-
 .../Wikipedia/WikipediaTokenizerTest.cs         |  4 +--
 .../Highlight/HighlighterTest.cs                |  2 +-
 .../Highlight/OffsetLimitTokenFilterTest.cs     |  2 +-
 .../VectorHighlight/AbstractTestCase.cs         |  2 +-
 .../VectorHighlight/IndexTimeSynonymTest.cs     |  2 +-
 src/Lucene.Net/Properties/AssemblyInfo.cs       |  5 ---
 src/Lucene.Net/Util/Automaton/BasicAutomata.cs  |  2 +-
 .../Util/Automaton/BasicOperations.cs           |  6 +---
 .../Util/Automaton/MinimizationOperations.cs    |  6 +---
 .../Util/Automaton/SpecialOperations.cs         |  7 +----
 105 files changed, 248 insertions(+), 266 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
index 4371a7b..36de93b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
@@ -111,7 +111,7 @@ namespace Lucene.Net.Analysis.Th
         ///         built from a <see cref="StandardTokenizer"/> filtered with
         ///         <see cref="StandardFilter"/>, <see cref="LowerCaseFilter"/>, <see cref="ThaiWordFilter"/>, and
         ///         <see cref="StopFilter"/> </returns>
-        protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+        protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
         {
             if (m_matchVersion.OnOrAfter(LuceneVersion.LUCENE_48))
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyAnalyzer.cs b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyAnalyzer.cs
index 469417a..45add6b 100644
--- a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyAnalyzer.cs
@@ -79,7 +79,7 @@ namespace Lucene.Net.Collation
         {
         }
 
-        protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+        protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
         {
 #pragma warning disable 612, 618
             if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_40))

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
index 56b78d5..82c6a98 100644
--- a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
@@ -463,7 +463,7 @@ namespace Lucene.Net.Search.Highlight
     {
         public FragmentQueue(int size) : base(size) { }
 
-        protected internal override bool LessThan(TextFragment fragA, TextFragment fragB)
+        protected override bool LessThan(TextFragment fragA, TextFragment fragB)
         {
             if (fragA.Score == fragB.Score)
                 return fragA.FragNum > fragB.FragNum;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Suggest/Spell/SuggestWordQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Spell/SuggestWordQueue.cs b/src/Lucene.Net.Suggest/Spell/SuggestWordQueue.cs
index 18b683c..e6b95da 100644
--- a/src/Lucene.Net.Suggest/Spell/SuggestWordQueue.cs
+++ b/src/Lucene.Net.Suggest/Spell/SuggestWordQueue.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Search.Spell
             this.comparer = comparer;
         }
 
-        protected internal override bool LessThan(SuggestWord wa, SuggestWord wb)
+        protected override bool LessThan(SuggestWord wa, SuggestWord wb)
         {
             int val = comparer.Compare(wa, wb);
             return val < 0;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Suggest/Suggest/Lookup.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Lookup.cs b/src/Lucene.Net.Suggest/Suggest/Lookup.cs
index 93b55fb..dcb1cbf 100644
--- a/src/Lucene.Net.Suggest/Suggest/Lookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Lookup.cs
@@ -166,7 +166,7 @@ namespace Lucene.Net.Search.Suggest
             {
             }
 
-            protected internal override bool LessThan(LookupResult a, LookupResult b)
+            protected override bool LessThan(LookupResult a, LookupResult b)
             {
                 return a.Value < b.Value;
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
index 3bda1ee..a21aafd 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
@@ -136,7 +136,7 @@ namespace Lucene.Net.Analysis.Ar
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new ArabicNormalizationFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
index 024430f..4223fb2 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
@@ -194,7 +194,7 @@ namespace Lucene.Net.Analysis.Ar
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new ArabicStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Bg/TestBulgarianStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Bg/TestBulgarianStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Bg/TestBulgarianStemmer.cs
index 8f4a4d6..9374f71 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Bg/TestBulgarianStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Bg/TestBulgarianStemmer.cs
@@ -247,7 +247,7 @@ namespace Lucene.Net.Analysis.Bg
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new BulgarianStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Br/TestBrazilianStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Br/TestBrazilianStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Br/TestBrazilianStemmer.cs
index febc680..39b15f4 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Br/TestBrazilianStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Br/TestBrazilianStemmer.cs
@@ -187,7 +187,7 @@ namespace Lucene.Net.Analysis.Br
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new BrazilianStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/HTMLStripCharFilterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/HTMLStripCharFilterTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/HTMLStripCharFilterTest.cs
index 0db491d..e81038c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/HTMLStripCharFilterTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/HTMLStripCharFilterTest.cs
@@ -40,13 +40,13 @@ namespace Lucene.Net.Analysis.CharFilters
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new HTMLStripCharFilter(reader);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
index 699e6a2..0f29c28 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
@@ -226,13 +226,13 @@ namespace Lucene.Net.Analysis.CharFilters
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new MappingCharFilter(outerInstance.normMap, reader);
             }
@@ -267,13 +267,13 @@ namespace Lucene.Net.Analysis.CharFilters
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new MappingCharFilter(map, reader);
             }
@@ -305,13 +305,13 @@ namespace Lucene.Net.Analysis.CharFilters
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new MappingCharFilter(map, reader);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKAnalyzer.cs
index 572bb8c..2eebddf 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKAnalyzer.cs
@@ -157,13 +157,13 @@ namespace Lucene.Net.Analysis.Cjk
                 this.norm = norm;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, new CJKBigramFilter(tokenizer));
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new MappingCharFilter(norm, reader);
             }
@@ -211,7 +211,7 @@ namespace Lucene.Net.Analysis.Cjk
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenFilter filter = new FakeStandardTokenizer(tokenizer);
@@ -254,7 +254,7 @@ namespace Lucene.Net.Analysis.Cjk
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new CJKBigramFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKBigramFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKBigramFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKBigramFilter.cs
index b3f5066..fece30c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKBigramFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKBigramFilter.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Analysis.Cjk
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(t, new CJKBigramFilter(t));
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.Cjk
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(t, new CJKBigramFilter(t, (CJKScript)0xff, true));
@@ -76,7 +76,7 @@ namespace Lucene.Net.Analysis.Cjk
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(t, new CJKBigramFilter(t, CJKScript.HAN));
@@ -99,7 +99,7 @@ namespace Lucene.Net.Analysis.Cjk
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(t, new CJKBigramFilter(t, (CJKScript)0xff, false));
@@ -128,7 +128,7 @@ namespace Lucene.Net.Analysis.Cjk
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(t, new CJKBigramFilter(t, CJKScript.HAN, true));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKWidthFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKWidthFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKWidthFilter.cs
index 634d3ea..2e34d01 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKWidthFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cjk/TestCJKWidthFilter.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.Cjk
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new CJKWidthFilter(source));
@@ -85,7 +85,7 @@ namespace Lucene.Net.Analysis.Cjk
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new CJKWidthFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniNormalizationFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniNormalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniNormalizationFilter.cs
index 56fec10..7256e7d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniNormalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniNormalizationFilter.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.Ckb
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new SoraniNormalizationFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniStemFilter.cs
index b053b88..3457947 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ckb/TestSoraniStemFilter.cs
@@ -110,7 +110,7 @@ namespace Lucene.Net.Analysis.Ckb
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new SoraniStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cn/TestChineseTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cn/TestChineseTokenizer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cn/TestChineseTokenizer.cs
index c06e711..8a9d863 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cn/TestChineseTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cn/TestChineseTokenizer.cs
@@ -70,7 +70,7 @@ namespace Lucene.Net.Analysis.Cn
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 return new TokenStreamComponents(new ChineseTokenizer(reader));
             }
@@ -89,7 +89,7 @@ namespace Lucene.Net.Analysis.Cn
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new WhitespaceTokenizer(LuceneVersion.LUCENE_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, new ChineseFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Commongrams/CommonGramsFilterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Commongrams/CommonGramsFilterTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Commongrams/CommonGramsFilterTest.cs
index b4589f8..0bfaacb 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Commongrams/CommonGramsFilterTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Commongrams/CommonGramsFilterTest.cs
@@ -140,7 +140,7 @@ namespace Lucene.Net.Analysis.CommonGrams
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, new CommonGramsQueryFilter(new CommonGramsFilter(TEST_VERSION_CURRENT, tokenizer, commonWords)));
@@ -198,7 +198,7 @@ namespace Lucene.Net.Analysis.CommonGrams
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, new CommonGramsFilter(TEST_VERSION_CURRENT, tokenizer, commonWords));
@@ -306,7 +306,7 @@ namespace Lucene.Net.Analysis.CommonGrams
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 CommonGramsFilter cgf = new CommonGramsFilter(TEST_VERSION_CURRENT, t, commonWords);
@@ -324,7 +324,7 @@ namespace Lucene.Net.Analysis.CommonGrams
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 CommonGramsFilter cgf = new CommonGramsFilter(TEST_VERSION_CURRENT, t, commonWords);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs
index a9930b0..cc43831 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs
@@ -264,14 +264,14 @@ namespace Lucene.Net.Analysis.Compound
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenFilter filter = new DictionaryCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, dict);
                 return new TokenStreamComponents(tokenizer, filter);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new MappingCharFilter(normMap, reader);
             }
@@ -308,7 +308,7 @@ namespace Lucene.Net.Analysis.Compound
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, new DictionaryCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, dict));
@@ -328,7 +328,7 @@ namespace Lucene.Net.Analysis.Compound
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenFilter filter = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, hyphenator);
@@ -366,7 +366,7 @@ namespace Lucene.Net.Analysis.Compound
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new DictionaryCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, dict));
@@ -386,7 +386,7 @@ namespace Lucene.Net.Analysis.Compound
             }
 
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 TokenFilter filter = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, hyphenator);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
index ee09373..6c0ab3c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
@@ -114,7 +114,7 @@ namespace Lucene.Net.Analysis.Core
         private class LowerCaseWhitespaceAnalyzer : Analyzer
         {
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, new LowerCaseFilter(TEST_VERSION_CURRENT, tokenizer));
@@ -125,7 +125,7 @@ namespace Lucene.Net.Analysis.Core
         private class UpperCaseWhitespaceAnalyzer : Analyzer
         {
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, new UpperCaseFilter(TEST_VERSION_CURRENT, tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
index db54452..4e273c6 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
@@ -65,14 +65,14 @@ namespace Lucene.Net.Analysis.Core
                 this.map = map;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new MockTokenizer(new TestRandomChains.CheckThatYouDidntReadAnythingReaderWrapper(reader), MockTokenFilter.ENGLISH_STOPSET, false, -65);
                 TokenFilter f = new CommonGramsFilter(TEST_VERSION_CURRENT, t, cas);
                 return new TokenStreamComponents(t, f);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 reader = new MockCharFilter(reader, 0);
                 reader = new MappingCharFilter(map, reader);
@@ -321,7 +321,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new EdgeNGramTokenizer(TEST_VERSION_CURRENT, reader, 2, 94);
                 //TokenStream stream = new SopTokenFilter(tokenizer);
@@ -356,7 +356,7 @@ namespace Lucene.Net.Analysis.Core
                 this.table = table;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new WikipediaTokenizer(reader);
                 TokenStream stream = new SopTokenFilter(tokenizer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestDuelingAnalyzers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestDuelingAnalyzers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestDuelingAnalyzers.cs
index 38e5671..b79bee8 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestDuelingAnalyzers.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestDuelingAnalyzers.cs
@@ -78,7 +78,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new LetterTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -111,7 +111,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new LetterTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -140,7 +140,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new LetterTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -172,7 +172,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new LetterTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -201,7 +201,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new LetterTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
@@ -233,7 +233,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new LetterTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
index bcf2c3c..9614a34 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
@@ -211,7 +211,7 @@ namespace Lucene.Net.Analysis.Core
                 this.tokenfilter = tokenfilter;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tf = tokenizer.Create(reader);
                 if (tokenfilter != null)
@@ -225,7 +225,7 @@ namespace Lucene.Net.Analysis.Core
             }
 
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 if (charFilter != null)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
index d1a13a7..579a189 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
@@ -855,7 +855,7 @@ namespace Lucene.Net.Analysis.Core
                 }
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Random random = new Random(seed);
                 TokenizerSpec tokenizerSpec = NewTokenizer(random, reader);
@@ -865,7 +865,7 @@ namespace Lucene.Net.Analysis.Core
                 return new TokenStreamComponents(tokenizerSpec.tokenizer, filterSpec.stream);
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 Random random = new Random(seed);
                 CharFilterSpec charfilterspec = NewCharFilterChain(random, reader);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
index ec2d6a8..fb119ec 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Core
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer);
@@ -336,7 +336,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 #pragma warning disable 612, 618
                 Tokenizer tokenizer = new StandardTokenizer(LuceneVersion.LUCENE_36, reader);
@@ -365,7 +365,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 #pragma warning disable 612, 618
                 Tokenizer tokenizer = new StandardTokenizer(LuceneVersion.LUCENE_40, reader);
@@ -408,7 +408,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 TokenStream tokenStream = new MockGraphTokenFilter(Random(), tokenizer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
index 4f4b6ef..7387dd6 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
@@ -225,7 +225,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenFilter filter = new MockSynonymFilter(outerInstance, tokenizer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
index b168398..4a91e86 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Analysis.Core
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(TEST_VERSION_CURRENT, reader);
@@ -119,7 +119,7 @@ namespace Lucene.Net.Analysis.Core
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(TEST_VERSION_CURRENT, reader);
                 tokenizer.MaxTokenLength = int.MaxValue; // Tokenize arbitrary length URLs
@@ -136,7 +136,7 @@ namespace Lucene.Net.Analysis.Core
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(TEST_VERSION_CURRENT, reader);
                 TokenFilter filter = new EmailFilter(tokenizer);
@@ -587,7 +587,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 #pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_31, reader);
@@ -615,7 +615,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 #pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_34, reader);
@@ -642,7 +642,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 #pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_36, reader);
@@ -671,7 +671,7 @@ namespace Lucene.Net.Analysis.Core
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
 #pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_40, reader);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cz/TestCzechStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cz/TestCzechStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cz/TestCzechStemmer.cs
index 2bc754a..8a83dc0 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cz/TestCzechStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Cz/TestCzechStemmer.cs
@@ -312,7 +312,7 @@ namespace Lucene.Net.Analysis.Cz
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new CzechStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanLightStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanLightStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanLightStemFilter.cs
index 2443285..c57fe41 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanLightStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanLightStemFilter.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.De
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new GermanLightStemFilter(source));
@@ -71,7 +71,7 @@ namespace Lucene.Net.Analysis.De
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -103,7 +103,7 @@ namespace Lucene.Net.Analysis.De
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new GermanLightStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanMinimalStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanMinimalStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanMinimalStemFilter.cs
index fd5c0e9..357f148 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanMinimalStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanMinimalStemFilter.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.De
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new GermanMinimalStemFilter(source));
@@ -78,7 +78,7 @@ namespace Lucene.Net.Analysis.De
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -118,7 +118,7 @@ namespace Lucene.Net.Analysis.De
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new GermanMinimalStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanNormalizationFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanNormalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanNormalizationFilter.cs
index c1762f1..81915e3 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanNormalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanNormalizationFilter.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.De
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string field, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string field, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream stream = new GermanNormalizationFilter(tokenizer);
@@ -94,7 +94,7 @@ namespace Lucene.Net.Analysis.De
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new GermanNormalizationFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanStemFilter.cs
index 4767a00..94b4f00 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanStemFilter.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.De
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(t, new GermanStemFilter(new LowerCaseFilter(TEST_VERSION_CURRENT, t)));
@@ -82,7 +82,7 @@ namespace Lucene.Net.Analysis.De
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -114,7 +114,7 @@ namespace Lucene.Net.Analysis.De
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new GermanStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/El/TestGreekStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/El/TestGreekStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/El/TestGreekStemmer.cs
index 8127f55..92df565 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/El/TestGreekStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/El/TestGreekStemmer.cs
@@ -553,7 +553,7 @@ namespace Lucene.Net.Analysis.El
 			  this.outerInstance = outerInstance;
 		  }
 
-		  protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+		  protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
 		  {
 			Tokenizer tokenizer = new KeywordTokenizer(reader);
 			return new TokenStreamComponents(tokenizer, new GreekStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestEnglishMinimalStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestEnglishMinimalStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestEnglishMinimalStemFilter.cs
index 60bd1d7..9147d6b 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestEnglishMinimalStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestEnglishMinimalStemFilter.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.En
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new EnglishMinimalStemFilter(source));
@@ -81,7 +81,7 @@ namespace Lucene.Net.Analysis.En
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new EnglishMinimalStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestKStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestKStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestKStemmer.cs
index 5d73650..57a9537 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestKStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestKStemmer.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.En
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
                 return new TokenStreamComponents(tokenizer, new KStemFilter(tokenizer));
@@ -76,7 +76,7 @@ namespace Lucene.Net.Analysis.En
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new KStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestPorterStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestPorterStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestPorterStemFilter.cs
index 4df6e84..3535041 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestPorterStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/En/TestPorterStemFilter.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.En
             public AnalyzerAnonymousInnerClassHelper()
             {
             }
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer t = new MockTokenizer(reader, MockTokenizer.KEYWORD, false);
                 return new TokenStreamComponents(t, new PorterStemFilter(t));
@@ -79,7 +79,7 @@ namespace Lucene.Net.Analysis.En
 
         private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
         {
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new PorterStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Es/TestSpanishLightStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Es/TestSpanishLightStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Es/TestSpanishLightStemFilter.cs
index bffc6ec..767fd63 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Es/TestSpanishLightStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Es/TestSpanishLightStemFilter.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.Es
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new SpanishLightStemFilter(source));
@@ -73,7 +73,7 @@ namespace Lucene.Net.Analysis.Es
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new SpanishLightStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianCharFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianCharFilter.cs
index 39fa9de..c03b033 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianCharFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianCharFilter.cs
@@ -30,12 +30,12 @@ namespace Lucene.Net.Analysis.Fa
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 return new TokenStreamComponents(new MockTokenizer(reader));
             }
 
-            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            protected override TextReader InitReader(string fieldName, TextReader reader)
             {
                 return new PersianCharFilter(reader);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
index 8431df5..99a9b13 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
@@ -90,7 +90,7 @@ namespace Lucene.Net.Analysis.Fa
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new PersianNormalizationFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fi/TestFinnishLightStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fi/TestFinnishLightStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fi/TestFinnishLightStemFilter.cs
index 67b3025..e700453 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fi/TestFinnishLightStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fi/TestFinnishLightStemFilter.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.Fi
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new FinnishLightStemFilter(source));
@@ -71,7 +71,7 @@ namespace Lucene.Net.Analysis.Fi
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -103,7 +103,7 @@ namespace Lucene.Net.Analysis.Fi
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new FinnishLightStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchLightStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchLightStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchLightStemFilter.cs
index a1183b9..78cd4ed 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchLightStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchLightStemFilter.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.Fr
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new FrenchLightStemFilter(source));
@@ -205,7 +205,7 @@ namespace Lucene.Net.Analysis.Fr
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -237,7 +237,7 @@ namespace Lucene.Net.Analysis.Fr
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new FrenchLightStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchMinimalStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchMinimalStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchMinimalStemFilter.cs
index e3f65a7..ec93003 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchMinimalStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchMinimalStemFilter.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.Fr
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(source, new FrenchMinimalStemFilter(source));
@@ -80,7 +80,7 @@ namespace Lucene.Net.Analysis.Fr
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -120,7 +120,7 @@ namespace Lucene.Net.Analysis.Fr
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new FrenchMinimalStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ga/TestIrishLowerCaseFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ga/TestIrishLowerCaseFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ga/TestIrishLowerCaseFilter.cs
index fb54eda..c674f58 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ga/TestIrishLowerCaseFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ga/TestIrishLowerCaseFilter.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Analysis.Ga
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new IrishLowerCaseFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianMinimalStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianMinimalStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianMinimalStemFilter.cs
index f173c0e..1b9ae3c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianMinimalStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianMinimalStemFilter.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.Gl
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 return new TokenStreamComponents(tokenizer, new GalicianMinimalStemFilter(tokenizer));
@@ -79,7 +79,7 @@ namespace Lucene.Net.Analysis.Gl
                 this.exclusionSet = exclusionSet;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
@@ -111,7 +111,7 @@ namespace Lucene.Net.Analysis.Gl
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new GalicianMinimalStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianStemFilter.cs
index d70094c..a867d8d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Gl/TestGalicianStemFilter.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Gl
             {
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer source = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                 TokenStream result = new LowerCaseFilter(TEST_VERSION_CURRENT, source);
@@ -68,7 +68,7 @@ namespace Lucene.Net.Analysis.Gl
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new GalicianStemFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiNormalizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiNormalizer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiNormalizer.cs
index baf430c..5a4b59d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiNormalizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiNormalizer.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Analysis.Hi
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new HindiNormalizationFilter(tokenizer));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4f25624/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiStemmer.cs
index 26b2990..08cf43a 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hi/TestHindiStemmer.cs
@@ -108,7 +108,7 @@ namespace Lucene.Net.Analysis.Hi
                 this.outerInstance = outerInstance;
             }
 
-            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
                 return new TokenStreamComponents(tokenizer, new HindiStemFilter(tokenizer));


Mime
View raw message