lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [39/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory
Date Sun, 26 Feb 2017 23:37:27 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
new file mode 100644
index 0000000..da0395e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
@@ -0,0 +1,438 @@
+using System;
+using System.Diagnostics;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using Attributes;
+
+    [TestFixture]
+    public class TestConcurrentMergeScheduler : LuceneTestCase
+    {
+        private class FailOnlyOnFlush : MockDirectoryWrapper.Failure
+        {
+            private readonly TestConcurrentMergeScheduler OuterInstance;
+
+            public FailOnlyOnFlush(TestConcurrentMergeScheduler outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            new internal bool DoFail;
+            internal bool HitExc;
+
+            public override void SetDoFail()
+            {
+                this.DoFail = true;
+                HitExc = false;
+            }
+
+            public override void ClearDoFail()
+            {
+                this.DoFail = false;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (DoFail && TestThread() && Random().NextBoolean())
+                {
+                    bool isDoFlush = Util.StackTraceHelper.DoesStackTraceContainMethod("Flush");
+                    bool isClose = Util.StackTraceHelper.DoesStackTraceContainMethod("Close");    
+
+                    if (isDoFlush && !isClose )
+                    {
+                        HitExc = true;
+                        throw new IOException(Thread.CurrentThread.Name + ": now failing during flush");
+                    }
+                }
+            }
+        }
+
+        // Make sure running BG merges still work fine even when
+        // we are hitting exceptions during flushing.
+        [Test]
+        public virtual void TestFlushExceptions()
+        {
+            MockDirectoryWrapper directory = NewMockDirectory();
+            FailOnlyOnFlush failure = new FailOnlyOnFlush(this);
+            directory.FailOn(failure);
+
+            IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+            int extraCount = 0;
+
+            for (int i = 0; i < 10; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + i);
+                }
+
+                for (int j = 0; j < 20; j++)
+                {
+                    idField.SetStringValue(Convert.ToString(i * 20 + j));
+                    writer.AddDocument(doc);
+                }
+
+                // must cycle here because sometimes the merge flushes
+                // the doc we just added and so there's nothing to
+                // flush, and we don't hit the exception
+                while (true)
+                {
+                    writer.AddDocument(doc);
+                    failure.SetDoFail();
+                    try
+                    {
+                        writer.Flush(true, true);
+                        if (failure.HitExc)
+                        {
+                            Assert.Fail("failed to hit IOException");
+                        }
+                        extraCount++;
+                    }
+                    catch (IOException ioe)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(ioe.StackTrace);
+                        }
+                        failure.ClearDoFail();
+                        break;
+                    }
+                }
+                Assert.AreEqual(20 * (i + 1) + extraCount, writer.NumDocs);
+            }
+
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(directory);
+            Assert.AreEqual(200 + extraCount, reader.NumDocs);
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        // Test that deletes committed after a merge started and
+        // before it finishes, are correctly merged back:
+        [Test]
+        public virtual void TestDeleteMerging()
+        {
+            Directory directory = NewDirectory();
+
+            LogDocMergePolicy mp = new LogDocMergePolicy();
+            // Force degenerate merging so we can get a mix of
+            // merging of segments with and without deletes at the
+            // start:
+            mp.MinMergeDocs = 1000;
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(mp));
+
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+            for (int i = 0; i < 10; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: cycle");
+                }
+                for (int j = 0; j < 100; j++)
+                {
+                    idField.SetStringValue(Convert.ToString(i * 100 + j));
+                    writer.AddDocument(doc);
+                }
+
+                int delID = i;
+                while (delID < 100 * (1 + i))
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: del " + delID);
+                    }
+                    writer.DeleteDocuments(new Term("id", "" + delID));
+                    delID += 10;
+                }
+
+                writer.Commit();
+            }
+
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(directory);
+            // Verify that we did not lose any deletes...
+            Assert.AreEqual(450, reader.NumDocs);
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestNoExtraFiles()
+        {
+            Directory directory = NewDirectory();
+            IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+
+            for (int iter = 0; iter < 7; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+
+                for (int j = 0; j < 21; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("content", "a b c", Field.Store.NO));
+                    writer.AddDocument(doc);
+                }
+
+                writer.Dispose();
+                TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles");
+
+                // Reopen
+                writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2));
+            }
+
+            writer.Dispose();
+
+            directory.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestNoWaitClose()
+        {
+            Directory directory = NewDirectory();
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(100)));
+
+            for (int iter = 0; iter < 10; iter++)
+            {
+                for (int j = 0; j < 201; j++)
+                {
+                    idField.SetStringValue(Convert.ToString(iter * 201 + j));
+                    writer.AddDocument(doc);
+                }
+
+                int delID = iter * 201;
+                for (int j = 0; j < 20; j++)
+                {
+                    writer.DeleteDocuments(new Term("id", Convert.ToString(delID)));
+                    delID += 5;
+                }
+
+                // Force a bunch of merge threads to kick off so we
+                // stress out aborting them on close:
+                ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 3;
+                writer.AddDocument(doc);
+                writer.Commit();
+
+                writer.Dispose(false);
+
+                IndexReader reader = DirectoryReader.Open(directory);
+                Assert.AreEqual((1 + iter) * 182, reader.NumDocs);
+                reader.Dispose();
+
+                // Reopen
+                writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy(100)));
+            }
+            writer.Dispose();
+
+            directory.Dispose();
+        }
+
+        // LUCENE-4544
+        [Test]
+        public virtual void TestMaxMergeCount()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            int maxMergeCount = TestUtil.NextInt(Random(), 1, 5);
+            int maxMergeThreads = TestUtil.NextInt(Random(), 1, maxMergeCount);
+            CountdownEvent enoughMergesWaiting = new CountdownEvent(maxMergeCount);
+            AtomicInt32 runningMergeCount = new AtomicInt32(0);
+            AtomicBoolean failed = new AtomicBoolean();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: maxMergeCount=" + maxMergeCount + " maxMergeThreads=" + maxMergeThreads);
+            }
+
+            ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousInnerClassHelper(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
+            cms.SetMaxMergesAndThreads(maxMergeCount, maxMergeThreads);
+            iwc.SetMergeScheduler(cms);
+            iwc.SetMaxBufferedDocs(2);
+
+            TieredMergePolicy tmp = new TieredMergePolicy();
+            iwc.SetMergePolicy(tmp);
+            tmp.MaxMergeAtOnce = 2;
+            tmp.SegmentsPerTier = 2;
+
+            IndexWriter w = new IndexWriter(dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewField("field", "field", TextField.TYPE_NOT_STORED));
+            while (enoughMergesWaiting.CurrentCount != 0 && !failed.Get())
+            {
+                for (int i = 0; i < 10; i++)
+                {
+                    w.AddDocument(doc);
+                }
+            }
+            w.Dispose(false);
+            dir.Dispose();
+        }
+
+        private class ConcurrentMergeSchedulerAnonymousInnerClassHelper : ConcurrentMergeScheduler
+        {
+            private readonly TestConcurrentMergeScheduler OuterInstance;
+
+            private int maxMergeCount;
+            private CountdownEvent EnoughMergesWaiting;
+            private AtomicInt32 RunningMergeCount;
+            private AtomicBoolean Failed;
+
+            public ConcurrentMergeSchedulerAnonymousInnerClassHelper(TestConcurrentMergeScheduler outerInstance, int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed)
+            {
+                this.OuterInstance = outerInstance;
+                this.maxMergeCount = maxMergeCount;
+                this.EnoughMergesWaiting = enoughMergesWaiting;
+                this.RunningMergeCount = runningMergeCount;
+                this.Failed = failed;
+            }
+
+            protected override void DoMerge(MergePolicy.OneMerge merge)
+            {
+                try
+                {
+                    // Stall all incoming merges until we see
+                    // maxMergeCount:
+                    int count = RunningMergeCount.IncrementAndGet();
+                    try
+                    {
+                        Assert.IsTrue(count <= maxMergeCount, "count=" + count + " vs maxMergeCount=" + maxMergeCount);
+                        EnoughMergesWaiting.Signal();
+
+                        // Stall this merge until we see exactly
+                        // maxMergeCount merges waiting
+                        while (true)
+                        {
+                            // wait for 10 milliseconds
+                            if (EnoughMergesWaiting.Wait(new TimeSpan(0, 0, 0, 0, 10)) || Failed.Get())
+                            {
+                                break;
+                            }
+                        }
+                        // Then sleep a bit to give a chance for the bug
+                        // (too many pending merges) to appear:
+                        Thread.Sleep(20);
+                        base.DoMerge(merge);
+                    }
+                    finally
+                    {
+                        RunningMergeCount.DecrementAndGet();
+                    }
+                }
+                catch (Exception t)
+                {
+                    Failed.Set(true);
+                    m_writer.MergeFinish(merge);
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+
+        private class TrackingCMS : ConcurrentMergeScheduler
+        {
+            internal long TotMergedBytes;
+
+            public TrackingCMS()
+            {
+                SetMaxMergesAndThreads(5, 5);
+            }
+
+            protected override void DoMerge(MergePolicy.OneMerge merge)
+            {
+                TotMergedBytes += merge.TotalBytesSize;
+                base.DoMerge(merge);
+            }
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestTotalBytesSize()
+        {
+            Directory d = NewDirectory();
+            if (d is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)d).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMaxBufferedDocs(5);
+            iwc.SetMergeScheduler(new TrackingCMS());
+            if (TestUtil.GetPostingsFormat("id").Equals("SimpleText"))
+            {
+                // no
+                iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()));
+            }
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, iwc);
+            for (int i = 0; i < 1000; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "" + i, Field.Store.NO));
+                w.AddDocument(doc);
+
+                if (Random().NextBoolean())
+                {
+                    w.DeleteDocuments(new Term("id", "" + Random().Next(i + 1)));
+                }
+            }
+            Assert.IsTrue(((TrackingCMS)w.w.Config.MergeScheduler).TotMergedBytes != 0);
+            w.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs b/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
new file mode 100644
index 0000000..b397eb3
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
@@ -0,0 +1,421 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FailOnNonBulkMergesInfoStream = Lucene.Net.Util.FailOnNonBulkMergesInfoStream;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using StoredField = StoredField;
+    using StringField = StringField;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestConsistentFieldNumbers : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSameFieldNumbersAcrossSegments()
+        {
+            for (int i = 0; i < 2; i++)
+            {
+                Directory dir = NewDirectory();
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+
+                Document d1 = new Document();
+                d1.Add(new StringField("f1", "first field", Field.Store.YES));
+                d1.Add(new StringField("f2", "second field", Field.Store.YES));
+                writer.AddDocument(d1);
+
+                if (i == 1)
+                {
+                    writer.Dispose();
+                    writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+                }
+                else
+                {
+                    writer.Commit();
+                }
+
+                Document d2 = new Document();
+                FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+                customType2.StoreTermVectors = true;
+                d2.Add(new TextField("f2", "second field", Field.Store.NO));
+                d2.Add(new Field("f1", "first field", customType2));
+                d2.Add(new TextField("f3", "third field", Field.Store.NO));
+                d2.Add(new TextField("f4", "fourth field", Field.Store.NO));
+                writer.AddDocument(d2);
+
+                writer.Dispose();
+
+                SegmentInfos sis = new SegmentInfos();
+                sis.Read(dir);
+                Assert.AreEqual(2, sis.Count);
+
+                FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+
+                Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis2.FieldInfo(1).Name);
+                Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+                Assert.AreEqual("f4", fis2.FieldInfo(3).Name);
+
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                sis = new SegmentInfos();
+                sis.Read(dir);
+                Assert.AreEqual(1, sis.Count);
+
+                FieldInfos fis3 = SegmentReader.ReadFieldInfos(sis.Info(0));
+
+                Assert.AreEqual("f1", fis3.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis3.FieldInfo(1).Name);
+                Assert.AreEqual("f3", fis3.FieldInfo(2).Name);
+                Assert.AreEqual("f4", fis3.FieldInfo(3).Name);
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            Directory dir1 = NewDirectory();
+            Directory dir2 = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+
+            Document d1 = new Document();
+            d1.Add(new TextField("f1", "first field", Field.Store.YES));
+            d1.Add(new TextField("f2", "second field", Field.Store.YES));
+            writer.AddDocument(d1);
+
+            writer.Dispose();
+            writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+
+            Document d2 = new Document();
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.StoreTermVectors = true;
+            d2.Add(new TextField("f2", "second field", Field.Store.YES));
+            d2.Add(new Field("f1", "first field", customType2));
+            d2.Add(new TextField("f3", "third field", Field.Store.YES));
+            d2.Add(new TextField("f4", "fourth field", Field.Store.YES));
+            writer.AddDocument(d2);
+
+            writer.Dispose();
+
+            writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+            writer.AddIndexes(dir2);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir1);
+            Assert.AreEqual(2, sis.Count);
+
+            FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+            FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+
+            Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+            Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+            // make sure the ordering of the "external" segment is preserved
+            Assert.AreEqual("f2", fis2.FieldInfo(0).Name);
+            Assert.AreEqual("f1", fis2.FieldInfo(1).Name);
+            Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+            Assert.AreEqual("f4", fis2.FieldInfo(3).Name);
+
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFieldNumberGaps()
+        {
+            int numIters = AtLeast(13);
+            for (int i = 0; i < numIters; i++)
+            {
+                Directory dir = NewDirectory();
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+                    Document d = new Document();
+                    d.Add(new TextField("f1", "d1 first field", Field.Store.YES));
+                    d.Add(new TextField("f2", "d1 second field", Field.Store.YES));
+                    writer.AddDocument(d);
+                    writer.Dispose();
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    Assert.AreEqual(1, sis.Count);
+                    FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                    Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                }
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+                    Document d = new Document();
+                    d.Add(new TextField("f1", "d2 first field", Field.Store.YES));
+                    d.Add(new StoredField("f3", new byte[] { 1, 2, 3 }));
+                    writer.AddDocument(d);
+                    writer.Dispose();
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    Assert.AreEqual(2, sis.Count);
+                    FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                    FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+                    Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                    Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
+                    Assert.IsNull(fis2.FieldInfo(1));
+                    Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+                }
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+                    Document d = new Document();
+                    d.Add(new TextField("f1", "d3 first field", Field.Store.YES));
+                    d.Add(new TextField("f2", "d3 second field", Field.Store.YES));
+                    d.Add(new StoredField("f3", new byte[] { 1, 2, 3, 4, 5 }));
+                    writer.AddDocument(d);
+                    writer.Dispose();
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    Assert.AreEqual(3, sis.Count);
+                    FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                    FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+                    FieldInfos fis3 = SegmentReader.ReadFieldInfos(sis.Info(2));
+                    Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                    Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
+                    Assert.IsNull(fis2.FieldInfo(1));
+                    Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+                    Assert.AreEqual("f1", fis3.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis3.FieldInfo(1).Name);
+                    Assert.AreEqual("f3", fis3.FieldInfo(2).Name);
+                }
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+                    writer.DeleteDocuments(new Term("f1", "d1"));
+                    // nuke the first segment entirely so that the segment with gaps is
+                    // loaded first!
+                    writer.ForceMergeDeletes();
+                    writer.Dispose();
+                }
+
+                IndexWriter writer_ = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(new LogByteSizeMergePolicy()).SetInfoStream(new FailOnNonBulkMergesInfoStream()));
+                writer_.ForceMerge(1);
+                writer_.Dispose();
+
+                SegmentInfos sis_ = new SegmentInfos();
+                sis_.Read(dir);
+                Assert.AreEqual(1, sis_.Count);
+                FieldInfos fis1_ = SegmentReader.ReadFieldInfos(sis_.Info(0));
+                Assert.AreEqual("f1", fis1_.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis1_.FieldInfo(1).Name);
+                Assert.AreEqual("f3", fis1_.FieldInfo(2).Name);
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestManyFields()
+        {
+            int NUM_DOCS = AtLeast(200);
+            int MAX_FIELDS = AtLeast(50);
+
+            int[][] docs = RectangularArrays.ReturnRectangularIntArray(NUM_DOCS, 4);
+            for (int i = 0; i < docs.Length; i++)
+            {
+                for (int j = 0; j < docs[i].Length; j++)
+                {
+                    docs[i][j] = Random().Next(MAX_FIELDS);
+                }
+            }
+
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Document d = new Document();
+                for (int j = 0; j < docs[i].Length; j++)
+                {
+                    d.Add(GetField(docs[i][j]));
+                }
+
+                writer.AddDocument(d);
+            }
+
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            foreach (SegmentCommitInfo si in sis.Segments)
+            {
+                FieldInfos fis = SegmentReader.ReadFieldInfos(si);
+
+                foreach (FieldInfo fi in fis)
+                {
+                    Field expected = GetField(Convert.ToInt32(fi.Name));
+                    Assert.AreEqual(expected.FieldType.IsIndexed, fi.IsIndexed);
+                    Assert.AreEqual(expected.FieldType.StoreTermVectors, fi.HasVectors);
+                }
+            }
+
+            dir.Dispose();
+        }
+
+        private Field GetField(int number)
+        {
+            int mode = number % 16;
+            string fieldName = "" + number;
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.IsTokenized = false;
+
+            FieldType customType3 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType3.IsTokenized = false;
+
+            FieldType customType4 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType4.IsTokenized = false;
+            customType4.StoreTermVectors = true;
+            customType4.StoreTermVectorOffsets = true;
+
+            FieldType customType5 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType5.StoreTermVectors = true;
+            customType5.StoreTermVectorOffsets = true;
+
+            FieldType customType6 = new FieldType(TextField.TYPE_STORED);
+            customType6.IsTokenized = false;
+            customType6.StoreTermVectors = true;
+            customType6.StoreTermVectorOffsets = true;
+
+            FieldType customType7 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType7.IsTokenized = false;
+            customType7.StoreTermVectors = true;
+            customType7.StoreTermVectorOffsets = true;
+
+            FieldType customType8 = new FieldType(TextField.TYPE_STORED);
+            customType8.IsTokenized = false;
+            customType8.StoreTermVectors = true;
+            customType8.StoreTermVectorPositions = true;
+
+            FieldType customType9 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType9.StoreTermVectors = true;
+            customType9.StoreTermVectorPositions = true;
+
+            FieldType customType10 = new FieldType(TextField.TYPE_STORED);
+            customType10.IsTokenized = false;
+            customType10.StoreTermVectors = true;
+            customType10.StoreTermVectorPositions = true;
+
+            FieldType customType11 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType11.IsTokenized = false;
+            customType11.StoreTermVectors = true;
+            customType11.StoreTermVectorPositions = true;
+
+            FieldType customType12 = new FieldType(TextField.TYPE_STORED);
+            customType12.StoreTermVectors = true;
+            customType12.StoreTermVectorOffsets = true;
+            customType12.StoreTermVectorPositions = true;
+
+            FieldType customType13 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType13.StoreTermVectors = true;
+            customType13.StoreTermVectorOffsets = true;
+            customType13.StoreTermVectorPositions = true;
+
+            FieldType customType14 = new FieldType(TextField.TYPE_STORED);
+            customType14.IsTokenized = false;
+            customType14.StoreTermVectors = true;
+            customType14.StoreTermVectorOffsets = true;
+            customType14.StoreTermVectorPositions = true;
+
+            FieldType customType15 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType15.IsTokenized = false;
+            customType15.StoreTermVectors = true;
+            customType15.StoreTermVectorOffsets = true;
+            customType15.StoreTermVectorPositions = true;
+
+            switch (mode)
+            {
+                case 0:
+                    return new Field(fieldName, "some text", customType);
+
+                case 1:
+                    return new TextField(fieldName, "some text", Field.Store.NO);
+
+                case 2:
+                    return new Field(fieldName, "some text", customType2);
+
+                case 3:
+                    return new Field(fieldName, "some text", customType3);
+
+                case 4:
+                    return new Field(fieldName, "some text", customType4);
+
+                case 5:
+                    return new Field(fieldName, "some text", customType5);
+
+                case 6:
+                    return new Field(fieldName, "some text", customType6);
+
+                case 7:
+                    return new Field(fieldName, "some text", customType7);
+
+                case 8:
+                    return new Field(fieldName, "some text", customType8);
+
+                case 9:
+                    return new Field(fieldName, "some text", customType9);
+
+                case 10:
+                    return new Field(fieldName, "some text", customType10);
+
+                case 11:
+                    return new Field(fieldName, "some text", customType11);
+
+                case 12:
+                    return new Field(fieldName, "some text", customType12);
+
+                case 13:
+                    return new Field(fieldName, "some text", customType13);
+
+                case 14:
+                    return new Field(fieldName, "some text", customType14);
+
+                case 15:
+                    return new Field(fieldName, "some text", customType15);
+
+                default:
+                    return null;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCrash.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCrash.cs b/src/Lucene.Net.Tests/Index/TestCrash.cs
new file mode 100644
index 0000000..ee1db50
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCrash.cs
@@ -0,0 +1,229 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using NoLockFactory = Lucene.Net.Store.NoLockFactory;
+
+    [TestFixture]
+    public class TestCrash : LuceneTestCase
+    {
+        private IndexWriter InitIndex(IConcurrentMergeScheduler scheduler, Random random, bool initialCommit)
+        {
+            return InitIndex(scheduler, random, NewMockDirectory(random), initialCommit);
+        }
+
+        private IndexWriter InitIndex(IConcurrentMergeScheduler scheduler, Random random, MockDirectoryWrapper dir, bool initialCommit)
+        {
+            dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
+
+            scheduler.SetSuppressExceptions();
+
+            IndexWriter writer = new IndexWriter(dir,
+                NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+                .SetMaxBufferedDocs(10)
+                .SetMergeScheduler(scheduler));
+
+            if (initialCommit)
+            {
+                writer.Commit();
+            }
+
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            doc.Add(NewTextField("id", "0", Field.Store.NO));
+            for (int i = 0; i < 157; i++)
+            {
+                writer.AddDocument(doc);
+            }
+
+            return writer;
+        }
+
+        private void Crash(IndexWriter writer)
+        {
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+            var cms = (IConcurrentMergeScheduler)writer.Config.MergeScheduler;
+            cms.Sync();
+            dir.Crash();
+            cms.Sync();
+            dir.ClearCrash();
+        }
+
+        [Test]
+        public virtual void TestCrashWhileIndexing(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            // this test relies on being able to open a reader before any commit
+            // happened, so we must create an initial commit just to allow that, but
+            // before any documents were added.
+            IndexWriter writer = InitIndex(scheduler, Random(), true);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            // We create leftover files because merging could be
+            // running when we crash:
+            dir.AssertNoUnrefencedFilesOnClose = false;
+
+            Crash(writer);
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs < 157);
+            reader.Dispose();
+
+            // Make a new dir, copying from the crashed dir, and
+            // open IW on it, to confirm IW "recovers" after a
+            // crash:
+            Directory dir2 = NewDirectory(dir);
+            dir.Dispose();
+
+            (new RandomIndexWriter(Random(), dir2, Similarity, TimeZone)).Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWriterAfterCrash(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            // this test relies on being able to open a reader before any commit
+            // happened, so we must create an initial commit just to allow that, but
+            // before any documents were added.
+            Console.WriteLine("TEST: initIndex");
+            IndexWriter writer = InitIndex(scheduler, Random(), true);
+            Console.WriteLine("TEST: done initIndex");
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            // We create leftover files because merging could be
+            // running / store files could be open when we crash:
+            dir.AssertNoUnrefencedFilesOnClose = false;
+
+            dir.PreventDoubleWrite = false;
+            Console.WriteLine("TEST: now crash");
+            Crash(writer);
+            writer = InitIndex(scheduler, Random(), dir, false);
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs < 314);
+            reader.Dispose();
+
+            // Make a new dir, copying from the crashed dir, and
+            // open IW on it, to confirm IW "recovers" after a
+            // crash:
+            Directory dir2 = NewDirectory(dir);
+            dir.Dispose();
+
+            (new RandomIndexWriter(Random(), dir2, Similarity, TimeZone)).Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCrashAfterReopen(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            IndexWriter writer = InitIndex(scheduler, Random(), false);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            // We create leftover files because merging could be
+            // running when we crash:
+            dir.AssertNoUnrefencedFilesOnClose = false;
+
+            writer.Dispose();
+            writer = InitIndex(scheduler, Random(), dir, false);
+            Assert.AreEqual(314, writer.MaxDoc);
+            Crash(writer);
+
+            /*
+            System.out.println("\n\nTEST: open reader");
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.Length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " +
+            dir.FileLength(l[i]) + " bytes");
+            */
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs >= 157);
+            reader.Dispose();
+
+            // Make a new dir, copying from the crashed dir, and
+            // open IW on it, to confirm IW "recovers" after a
+            // crash:
+            Directory dir2 = NewDirectory(dir);
+            dir.Dispose();
+
+            (new RandomIndexWriter(Random(), dir2, Similarity, TimeZone)).Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCrashAfterClose(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            IndexWriter writer = InitIndex(scheduler, Random(), false);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            writer.Dispose();
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.Length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.FileLength(l[i]) + " bytes");
+            */
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCrashAfterCloseNoWait(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            IndexWriter writer = InitIndex(scheduler, Random(), false);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            writer.Dispose(false);
+
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.Length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.FileLength(l[i]) + " bytes");
+            */
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs b/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
new file mode 100644
index 0000000..e53c933
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
@@ -0,0 +1,201 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FilterDirectory = Lucene.Net.Store.FilterDirectory;
+    using FSDirectory = Lucene.Net.Store.FSDirectory;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestCrashCausesCorruptIndex : LuceneTestCase
+    {
+        internal DirectoryInfo Path;
+
+        /// <summary>
+        /// LUCENE-3627: this test fails.
+        /// </summary>
+        [Test]
+        public virtual void TestCrashCorruptsIndexing()
+        {
+            Path = CreateTempDir("testCrashCorruptsIndexing");
+
+            IndexAndCrashOnCreateOutputSegments2();
+
+            SearchForFleas(2);
+
+            IndexAfterRestart();
+
+            SearchForFleas(3);
+        }
+
+        /// <summary>
+        /// index 1 document and commit.
+        /// prepare for crashing.
+        /// index 1 more document, and upon commit, creation of segments_2 will crash.
+        /// </summary>
+        private void IndexAndCrashOnCreateOutputSegments2()
+        {
+            Directory realDirectory = FSDirectory.Open(Path);
+            CrashAfterCreateOutput crashAfterCreateOutput = new CrashAfterCreateOutput(realDirectory);
+
+            // NOTE: cannot use RandomIndexWriter because it
+            // sometimes commits:
+            IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            indexWriter.AddDocument(Document);
+            // writes segments_1:
+            indexWriter.Commit();
+
+            crashAfterCreateOutput.GetCrashAfterCreateOutput = "segments_2";
+            indexWriter.AddDocument(Document);
+            try
+            {
+                // tries to write segments_2 but hits fake exc:
+                indexWriter.Commit();
+                Assert.Fail("should have hit CrashingException");
+            }
+#pragma warning disable 168
+            catch (CrashingException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            // writes segments_3
+            indexWriter.Dispose();
+            Assert.IsFalse(SlowFileExists(realDirectory, "segments_2"));
+            crashAfterCreateOutput.Dispose();
+        }
+
+        /// <summary>
+        /// Attempts to index another 1 document.
+        /// </summary>
+        private void IndexAfterRestart()
+        {
+            Directory realDirectory = NewFSDirectory(Path);
+
+            // LUCENE-3627 (before the fix): this line fails because
+            // it doesn't know what to do with the created but empty
+            // segments_2 file
+            IndexWriter indexWriter = new IndexWriter(realDirectory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // currently the test fails above.
+            // however, to test the fix, the following lines should pass as well.
+            indexWriter.AddDocument(Document);
+            indexWriter.Dispose();
+            Assert.IsFalse(SlowFileExists(realDirectory, "segments_2"));
+            realDirectory.Dispose();
+        }
+
+        /// <summary>
+        /// Run an example search.
+        /// </summary>
+        private void SearchForFleas(int expectedTotalHits)
+        {
+            Directory realDirectory = NewFSDirectory(Path);
+            IndexReader indexReader = DirectoryReader.Open(realDirectory);
+            IndexSearcher indexSearcher = NewSearcher(indexReader);
+            TopDocs topDocs = indexSearcher.Search(new TermQuery(new Term(TEXT_FIELD, "fleas")), 10);
+            Assert.IsNotNull(topDocs);
+            Assert.AreEqual(expectedTotalHits, topDocs.TotalHits);
+            indexReader.Dispose();
+            realDirectory.Dispose();
+        }
+
+        private const string TEXT_FIELD = "text";
+
+        /// <summary>
+        /// Gets a document with content "my dog has fleas".
+        /// </summary>
+        private Document Document
+        {
+            get
+            {
+                Document document = new Document();
+                document.Add(NewTextField(TEXT_FIELD, "my dog has fleas", Field.Store.NO));
+                return document;
+            }
+        }
+
+        /// <summary>
+        /// The marker RuntimeException that we use in lieu of an
+        /// actual machine crash.
+        /// </summary>
+        private class CrashingException : Exception
+        {
+            public CrashingException(string msg)
+                : base(msg)
+            {
+            }
+        }
+
+        /// <summary>
+        /// this test class provides direct access to "simulating" a crash right after
+        /// realDirectory.CreateOutput(..) has been called on a certain specified name.
+        /// </summary>
+        private class CrashAfterCreateOutput : FilterDirectory
+        {
+            internal string CrashAfterCreateOutput_Renamed;
+
+            public CrashAfterCreateOutput(Directory realDirectory)
+                : base(realDirectory)
+            {
+                SetLockFactory(realDirectory.LockFactory);
+            }
+
+            public virtual string GetCrashAfterCreateOutput
+            {
+                set
+                {
+                    this.CrashAfterCreateOutput_Renamed = value;
+                }
+            }
+
+            public override IndexOutput CreateOutput(string name, IOContext cxt)
+            {
+                IndexOutput indexOutput = m_input.CreateOutput(name, cxt);
+                if (null != CrashAfterCreateOutput_Renamed && name.Equals(CrashAfterCreateOutput_Renamed))
+                {
+                    // CRASH!
+                    indexOutput.Dispose();
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now crash");
+                        Console.WriteLine(new Exception().StackTrace);
+                    }
+                    throw new CrashingException("crashAfterCreateOutput " + CrashAfterCreateOutput_Renamed);
+                }
+                return indexOutput;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCustomNorms.cs b/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
new file mode 100644
index 0000000..0db78f9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
@@ -0,0 +1,144 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using CollectionStatistics = Lucene.Net.Search.CollectionStatistics;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using PerFieldSimilarityWrapper = Lucene.Net.Search.Similarities.PerFieldSimilarityWrapper;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using TermStatistics = Lucene.Net.Search.TermStatistics;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestCustomNorms : LuceneTestCase
+    {
+        internal readonly string FloatTestField = "normsTestFloat";
+        internal readonly string ExceptionTestField = "normsTestExcp";
+
+        [Test]
+        public virtual void TestFloatNorms()
+        {
+            Directory dir = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            Similarity provider = new MySimProvider(this);
+            config.SetSimilarity(provider);
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, config);
+            LineFileDocs docs = new LineFileDocs(Random());
+            int num = AtLeast(100);
+            for (int i = 0; i < num; i++)
+            {
+                Document doc = docs.NextDoc();
+                float nextFloat = (float)Random().NextDouble();
+                // Cast to a double to get more precision output to the string.
+                Field f = new TextField(FloatTestField, "" + (double)nextFloat, Field.Store.YES);
+                f.Boost = nextFloat;
+
+                doc.Add(f);
+                writer.AddDocument(doc);
+                doc.RemoveField(FloatTestField);
+                if (Rarely())
+                {
+                    writer.Commit();
+                }
+            }
+            writer.Commit();
+            writer.Dispose();
+            AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            NumericDocValues norms = open.GetNormValues(FloatTestField);
+            Assert.IsNotNull(norms);
+            for (int i = 0; i < open.MaxDoc; i++)
+            {
+                Document document = open.Document(i);
+                float expected = Convert.ToSingle(document.Get(FloatTestField));
+                Assert.AreEqual(expected, Number.Int32BitsToSingle((int)norms.Get(i)), 0.0f);
+            }
+            open.Dispose();
+            dir.Dispose();
+            docs.Dispose();
+        }
+
+        public class MySimProvider : PerFieldSimilarityWrapper
+        {
+            private readonly TestCustomNorms OuterInstance;
+
+            public MySimProvider(TestCustomNorms outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal Similarity @delegate = new DefaultSimilarity();
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return @delegate.QueryNorm(sumOfSquaredWeights);
+            }
+
+            public override Similarity Get(string field)
+            {
+                if (OuterInstance.FloatTestField.Equals(field))
+                {
+                    return new FloatEncodingBoostSimilarity();
+                }
+                else
+                {
+                    return @delegate;
+                }
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return @delegate.Coord(overlap, maxOverlap);
+            }
+        }
+
+        public class FloatEncodingBoostSimilarity : Similarity
+        {
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                return Number.SingleToInt32Bits(state.Boost);
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                throw new System.NotSupportedException();
+            }
+
+            public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException();
+            }
+        }
+    }
+}
\ No newline at end of file


Mime
View raw message