diff --git a/src/Lucene.Net.TestFramework/Support/TestFramework/Assert.cs b/src/Lucene.Net.TestFramework/Support/TestFramework/Assert.cs
index 5e717c59d4..567504d98f 100644
--- a/src/Lucene.Net.TestFramework/Support/TestFramework/Assert.cs
+++ b/src/Lucene.Net.TestFramework/Support/TestFramework/Assert.cs
@@ -930,7 +930,7 @@ public static void IsTrue(bool condition)
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void NotNull(object anObject)
{
- if (!(anObject is null))
+ if (anObject is not null)
_NUnit.Assert.NotNull(anObject);
}
//
@@ -973,7 +973,7 @@ public static void NotNull(object anObject, string message, params object[] args
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Null(object anObject, string message, params object[] args)
{
- if (!(anObject is null))
+ if (anObject is not null)
_NUnit.Assert.Null(anObject, message, args);
}
//
@@ -988,7 +988,7 @@ public static void Null(object anObject, string message, params object[] args)
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Null(object anObject)
{
- if (!(anObject is null))
+ if (anObject is not null)
_NUnit.Assert.Null(anObject);
}
diff --git a/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs b/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs
index 4f8197c9ab..4bd5db63ce 100644
--- a/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs
+++ b/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs
@@ -30,7 +30,7 @@ namespace Lucene.Net.Index
/// A binary tokenstream that lets you index a single
/// binary token (BytesRef value).
///
- /// cref= CannedBinaryTokenStream
+ ///
public sealed class BinaryTokenStream : TokenStream
{
private readonly IByteTermAttribute bytesAtt;// = addAttribute(typeof(ByteTermAttribute));
@@ -91,4 +91,4 @@ public override void CopyTo(IAttribute target)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
index f400c47841..a212007f2a 100644
--- a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
@@ -49,13 +49,14 @@ public virtual void TestFixedBinary()
{
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
- var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE);
- IndexWriter w = new IndexWriter(dir, config);
+
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
Document doc = new Document();
var bytes = new byte[4];
@@ -116,13 +117,13 @@ public virtual void TestVariableBinary()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
- var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE);
- IndexWriter w = new IndexWriter(dir, config);
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
Document doc = new Document();
var bytes = new byte[4];
@@ -172,4 +173,4 @@ public virtual void TestVariableBinary()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BDocs.cs b/src/Lucene.Net.Tests/Index/Test2BDocs.cs
index 8b9d224fca..aa689a6022 100644
--- a/src/Lucene.Net.Tests/Index/Test2BDocs.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BDocs.cs
@@ -63,7 +63,7 @@ public virtual void TestOverflow()
Arrays.Fill(subReaders, ir);
try
{
- new MultiReader(subReaders);
+ _ = new MultiReader(subReaders); // LUCENENET-specific: discard result
Assert.Fail();
}
catch (Exception expected) when (expected.IsIllegalArgumentException())
@@ -97,4 +97,4 @@ public virtual void TestExactlyAtLimit()
dir2.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BPositions.cs b/src/Lucene.Net.Tests/Index/Test2BPositions.cs
index aeed283dd3..fadad9dffb 100644
--- a/src/Lucene.Net.Tests/Index/Test2BPositions.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BPositions.cs
@@ -42,8 +42,8 @@ namespace Lucene.Net.Index
[SuppressCodecs("SimpleText", "Memory", "Direct")]
[TestFixture]
public class Test2BPositions : LuceneTestCase
- // uses lots of space and takes a few minutes
{
+ // uses lots of space and takes a few minutes
[Ignore("Very slow. Enable manually by removing Ignore.")]
[Test]
public virtual void Test()
@@ -75,7 +75,7 @@ public virtual void Test()
Field field = new Field("field", new MyTokenStream(), ft);
doc.Add(field);
- int numDocs = (int.MaxValue / 26) + 1;
+ const int numDocs = (int.MaxValue / 26) + 1;
for (int i = 0; i < numDocs; i++)
{
w.AddDocument(doc);
@@ -91,10 +91,11 @@ public virtual void Test()
public sealed class MyTokenStream : TokenStream
{
- internal readonly ICharTermAttribute termAtt;
- internal readonly IPositionIncrementAttribute posIncAtt;
+ private readonly ICharTermAttribute termAtt;
+ private readonly IPositionIncrementAttribute posIncAtt;
internal int index;
+ // LUCENENET-specific: must call AddAttribute from ctor in .NET
public MyTokenStream()
{
termAtt = AddAttribute();
@@ -121,4 +122,4 @@ public override void Reset()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BPostings.cs b/src/Lucene.Net.Tests/Index/Test2BPostings.cs
index 19640a9e3b..a192373560 100644
--- a/src/Lucene.Net.Tests/Index/Test2BPostings.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BPostings.cs
@@ -54,14 +54,14 @@ public virtual void Test()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
- var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE);
+ var iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE);
- IndexWriter w = new IndexWriter(dir, config);
+ IndexWriter w = new IndexWriter(dir, iwc);
MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
@@ -77,7 +77,7 @@ public virtual void Test()
Field field = new Field("field", new MyTokenStream(), ft);
doc.Add(field);
- int numDocs = (int.MaxValue / 26) + 1;
+ const int numDocs = (int.MaxValue / 26) + 1;
for (int i = 0; i < numDocs; i++)
{
w.AddDocument(doc);
@@ -93,9 +93,10 @@ public virtual void Test()
public sealed class MyTokenStream : TokenStream
{
- internal readonly ICharTermAttribute termAtt;
+ private readonly ICharTermAttribute termAtt;
internal int index;
+ // LUCENENET-specific: must call AddAttribute from ctor in .NET
public MyTokenStream()
{
termAtt = AddAttribute();
@@ -119,4 +120,4 @@ public override void Reset()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
index b79695a287..d525138939 100644
--- a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
@@ -41,13 +41,13 @@ namespace Lucene.Net.Index
/// so you get > Integer.MAX_VALUE postings data for the term
/// @lucene.experimental
///
+ // disable Lucene3x: older lucene formats always had this issue.
[SuppressCodecs("SimpleText", "Memory", "Direct", "Lucene3x")]
[TestFixture]
public class Test2BPostingsBytes : LuceneTestCase
- // disable Lucene3x: older lucene formats always had this issue.
- // @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes.
- // with some codecs needs more heap space as well.
{
+ // @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes.
+ // with some codecs needs more heap space as well.
[Ignore("Very slow. Enable manually by removing Ignore.")]
[Test]
public virtual void Test()
@@ -58,13 +58,13 @@ public virtual void Test()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
- var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE);
- IndexWriter w = new IndexWriter(dir, config);
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
@@ -106,7 +106,8 @@ public virtual void Test()
{
((MockDirectoryWrapper)dir2).Throttling = Throttling.NEVER;
}
- IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+ IndexWriter w2 = new IndexWriter(dir2,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, null));
w2.AddIndexes(mr);
w2.ForceMerge(1);
w2.Dispose();
@@ -121,7 +122,8 @@ public virtual void Test()
{
((MockDirectoryWrapper)dir3).Throttling = Throttling.NEVER;
}
- IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+ IndexWriter w3 = new IndexWriter(dir3,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, null));
w3.AddIndexes(mr);
w3.ForceMerge(1);
w3.Dispose();
@@ -134,10 +136,11 @@ public virtual void Test()
public sealed class MyTokenStream : TokenStream
{
- internal readonly ICharTermAttribute termAtt;
+ private readonly ICharTermAttribute termAtt;
internal int index;
internal int n;
+ // LUCENENET-specific: must call AddAttribute from ctor in .NET
public MyTokenStream()
{
termAtt = AddAttribute();
@@ -162,4 +165,4 @@ public override void Reset()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
index 5af76e5c36..70cd900834 100644
--- a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
@@ -48,12 +48,13 @@ public virtual void TestFixedSorted()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
- IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE));
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
Document doc = new Document();
var bytes = new byte[2];
@@ -110,13 +111,13 @@ public virtual void Test2BOrds()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
- var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE);
- IndexWriter w = new IndexWriter(dir, config);
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
Document doc = new Document();
var bytes = new byte[4];
@@ -169,4 +170,4 @@ public virtual void Test2BOrds()
// TODO: variable
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test2BTerms.cs b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
index 698ad79fe6..e55c2735fd 100644
--- a/src/Lucene.Net.Tests/Index/Test2BTerms.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
@@ -59,12 +59,12 @@ public class Test2BTerms : LuceneTestCase
private sealed class MyTokenStream : TokenStream
{
- internal readonly int tokensPerDoc;
- internal int tokenCount;
+ private readonly int tokensPerDoc;
+ private int tokenCount;
public readonly IList savedTerms = new JCG.List();
- internal int nextSave;
- internal long termCounter;
- internal readonly Random random;
+ private int nextSave;
+ private long termCounter;
+ private readonly Random random;
public MyTokenStream(Random random, int tokensPerDoc)
: base(new MyAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY))
@@ -140,7 +140,7 @@ public override object Clone()
private sealed class MyAttributeFactory : AttributeFactory
{
- internal readonly AttributeFactory @delegate;
+ private readonly AttributeFactory @delegate;
public MyAttributeFactory(AttributeFactory @delegate)
{
@@ -172,7 +172,7 @@ public virtual void Test2BTerms_Mem()
throw RuntimeException.Create("this test cannot run with PreFlex codec");
}
Console.WriteLine("Starting Test2B");
- long TERM_COUNT = ((long)int.MaxValue) + 100000000;
+ const long TERM_COUNT = ((long)int.MaxValue) + 100000000;
int TERMS_PER_DOC = TestUtil.NextInt32(Random, 100000, 1000000);
@@ -188,12 +188,13 @@ public virtual void Test2BTerms_Mem()
if (true)
{
- IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE));
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
@@ -202,7 +203,7 @@ public virtual void Test2BTerms_Mem()
((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
}
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
MyTokenStream ts = new MyTokenStream(Random, TERMS_PER_DOC);
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
@@ -311,4 +312,4 @@ private void TestSavedTerms(IndexReader r, IList terms)
Assert.IsFalse(failed);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
index f4702c9e45..7a4ae203f8 100644
--- a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
+++ b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
@@ -55,13 +55,13 @@ public virtual void Test()
MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new MMapDirectory(CreateTempDir("4GBStoredFields")));
dir.Throttling = Throttling.NEVER;
- var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
- .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .SetRAMBufferSizeMB(256.0)
- .SetMergeScheduler(new ConcurrentMergeScheduler())
- .SetMergePolicy(NewLogMergePolicy(false, 10))
- .SetOpenMode(OpenMode.CREATE);
- IndexWriter w = new IndexWriter(dir, config);
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .SetRAMBufferSizeMB(256.0)
+ .SetMergeScheduler(new ConcurrentMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ .SetOpenMode(OpenMode.CREATE));
MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
@@ -129,4 +129,4 @@ public virtual void Test()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
index 0e3bd371b4..fe49a296de 100644
--- a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
+++ b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
@@ -68,14 +68,21 @@ public virtual void TestSimpleCase()
IndexWriter writer = null;
- writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE));
+ writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT,
+ new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE));
// add 100 documents
AddDocs(writer, 100);
Assert.AreEqual(100, writer.MaxDoc);
writer.Dispose();
TestUtil.CheckIndex(dir);
- writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy(false)));
+ writer = NewWriter(
+ aux,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMergePolicy(NewLogMergePolicy(false))
+ );
// add 40 documents in separate files
AddDocs(writer, 40);
Assert.AreEqual(40, writer.MaxDoc);
@@ -295,11 +302,23 @@ public virtual void TestAddSelf()
Assert.AreEqual(100, writer.MaxDoc);
writer.Dispose();
- writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
+ writer = NewWriter(
+ aux,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(1000)
+ .SetMergePolicy(NewLogMergePolicy(false))
+ );
// add 140 documents in separate files
AddDocs(writer, 40);
writer.Dispose();
- writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
+ writer = NewWriter(
+ aux,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(1000)
+ .SetMergePolicy(NewLogMergePolicy(false))
+ );
AddDocs(writer, 100);
writer.Dispose();
@@ -335,7 +354,13 @@ public virtual void TestNoTailSegments()
SetUpDirs(dir, aux);
- IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
+ IndexWriter writer = NewWriter(
+ dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(10)
+ .SetMergePolicy(NewLogMergePolicy(4))
+ );
AddDocs(writer, 10);
writer.AddIndexes(aux);
@@ -360,7 +385,12 @@ public virtual void TestNoCopySegments()
SetUpDirs(dir, aux);
- IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(9).SetMergePolicy(NewLogMergePolicy(4)));
+ IndexWriter writer = NewWriter(
+ dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(9)
+ .SetMergePolicy(NewLogMergePolicy(4)));
AddDocs(writer, 2);
writer.AddIndexes(aux);
@@ -385,7 +415,13 @@ public virtual void TestNoMergeAfterCopy()
SetUpDirs(dir, aux);
- IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
+ IndexWriter writer = NewWriter(
+ dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(10)
+ .SetMergePolicy(NewLogMergePolicy(4))
+ );
writer.AddIndexes(aux, new MockDirectoryWrapper(Random, new RAMDirectory(aux, NewIOContext(Random))));
Assert.AreEqual(1060, writer.MaxDoc);
@@ -409,7 +445,8 @@ public virtual void TestMergeAfterCopy()
SetUpDirs(dir, aux, true);
- IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+ IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
IndexWriter writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 20; i++)
{
@@ -420,7 +457,13 @@ public virtual void TestMergeAfterCopy()
Assert.AreEqual(10, reader.NumDocs);
reader.Dispose();
- writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(4).SetMergePolicy(NewLogMergePolicy(4)));
+ writer = NewWriter(
+ dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(4)
+ .SetMergePolicy(NewLogMergePolicy(4))
+ );
if (Verbose)
{
@@ -446,13 +489,20 @@ public virtual void TestMoreMerges()
SetUpDirs(dir, aux, true);
- IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = NewWriter(
+ aux2,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(100)
+ .SetMergePolicy(NewLogMergePolicy(10))
+ );
writer.AddIndexes(aux);
Assert.AreEqual(30, writer.MaxDoc);
Assert.AreEqual(3, writer.SegmentCount);
writer.Dispose();
- IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+ IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 27; i++)
{
@@ -463,7 +513,8 @@ public virtual void TestMoreMerges()
Assert.AreEqual(3, reader.NumDocs);
reader.Dispose();
- dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+ dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux2, dontMergeConfig);
for (int i = 0; i < 8; i++)
{
@@ -474,7 +525,13 @@ public virtual void TestMoreMerges()
Assert.AreEqual(22, reader.NumDocs);
reader.Dispose();
- writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4)));
+ writer = NewWriter(
+ dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(6)
+ .SetMergePolicy(NewLogMergePolicy(4))
+ );
writer.AddIndexes(aux, aux2);
Assert.AreEqual(1040, writer.MaxDoc);
@@ -542,7 +599,7 @@ private void SetUpDirs(Directory dir, Directory aux, bool withID)
{
IndexWriter writer = null;
- writer = NewWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000));
+ writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
if (withID)
{
@@ -556,7 +613,13 @@ private void SetUpDirs(Directory dir, Directory aux, bool withID)
Assert.AreEqual(1, writer.SegmentCount);
writer.Dispose();
- writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
+ writer = NewWriter(
+ aux,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(1000)
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ );
// add 30 documents in 3 segments
for (int i = 0; i < 3; i++)
{
@@ -569,7 +632,13 @@ private void SetUpDirs(Directory dir, Directory aux, bool withID)
AddDocs(writer, 10);
}
writer.Dispose();
- writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
+ writer = NewWriter(
+ aux,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(1000)
+ .SetMergePolicy(NewLogMergePolicy(false, 10))
+ );
}
Assert.AreEqual(30, writer.MaxDoc);
Assert.AreEqual(3, writer.SegmentCount);
@@ -584,7 +653,9 @@ public virtual void TestHangOnClose()
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
lmp.NoCFSRatio = 0.0;
lmp.MergeFactor = 100;
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(5).SetMergePolicy(lmp));
+ IndexWriter writer = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(5).SetMergePolicy(lmp));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -615,7 +686,9 @@ public virtual void TestHangOnClose()
lmp.MinMergeMB = 0.0001;
lmp.NoCFSRatio = 0.0;
lmp.MergeFactor = 4;
- writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp));
+ writer = new IndexWriter(dir2,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp));
writer.AddIndexes(dir);
writer.Dispose();
dir.Dispose();
@@ -624,7 +697,8 @@ public virtual void TestHangOnClose()
// TODO: these are also in TestIndexWriter... add a simple doc-writing method
// like this to LuceneTestCase?
- private void AddDoc(IndexWriter writer)
+ // LUCENENET specific - made static
+ private static void AddDoc(IndexWriter writer)
{
Document doc = new Document();
doc.Add(NewTextField("content", "aaa", Field.Store.NO));
@@ -643,14 +717,16 @@ private abstract class RunAddIndexesThreads
internal const int NUM_THREADS = 5;
internal readonly ThreadJob[] threads = new ThreadJob[NUM_THREADS];
- public RunAddIndexesThreads(TestAddIndexes outerInstance, int numCopy)
+ public RunAddIndexesThreads(int numCopy)
{
NUM_COPY = numCopy;
dir = new MockDirectoryWrapper(Random, new RAMDirectory());
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++)
{
- outerInstance.AddDoc(writer);
+ AddDoc(writer);
}
writer.Dispose();
@@ -748,8 +824,8 @@ internal virtual void CloseDir()
private class CommitAndAddIndexes : RunAddIndexesThreads
{
- public CommitAndAddIndexes(TestAddIndexes outerInstance, int numCopy)
- : base(outerInstance, numCopy)
+ public CommitAndAddIndexes(int numCopy)
+ : base(numCopy)
{
}
@@ -824,7 +900,7 @@ public virtual void TestAddIndexesWithThreads()
{
int NUM_ITER = TestNightly ? 15 : 5;
const int NUM_COPY = 3;
- CommitAndAddIndexes c = new CommitAndAddIndexes(this, NUM_COPY);
+ CommitAndAddIndexes c = new CommitAndAddIndexes(NUM_COPY);
c.LaunchThreads(NUM_ITER);
for (int i = 0; i < 100; i++)
@@ -851,8 +927,8 @@ public virtual void TestAddIndexesWithThreads()
private class CommitAndAddIndexes2 : CommitAndAddIndexes
{
- public CommitAndAddIndexes2(TestAddIndexes outerInstance, int numCopy)
- : base(outerInstance, numCopy)
+ public CommitAndAddIndexes2(int numCopy)
+ : base(numCopy)
{
}
@@ -879,7 +955,7 @@ internal override void Handle(Exception t)
public virtual void TestAddIndexesWithClose()
{
const int NUM_COPY = 3;
- CommitAndAddIndexes2 c = new CommitAndAddIndexes2(this, NUM_COPY);
+ CommitAndAddIndexes2 c = new CommitAndAddIndexes2(NUM_COPY);
//c.writer2.setInfoStream(System.out);
c.LaunchThreads(-1);
@@ -896,8 +972,8 @@ public virtual void TestAddIndexesWithClose()
private class CommitAndAddIndexes3 : RunAddIndexesThreads
{
- public CommitAndAddIndexes3(TestAddIndexes outerInstance, int numCopy)
- : base(outerInstance, numCopy)
+ public CommitAndAddIndexes3(int numCopy)
+ : base(numCopy)
{
}
@@ -990,7 +1066,7 @@ internal override void Handle(Exception t)
public virtual void TestAddIndexesWithCloseNoWait()
{
const int NUM_COPY = 50;
- CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY);
+ CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY);
c.LaunchThreads(-1);
Thread.Sleep(TestUtil.NextInt32(Random, 10, 500));
@@ -1019,7 +1095,7 @@ public virtual void TestAddIndexesWithCloseNoWait()
public virtual void TestAddIndexesWithRollback()
{
int NUM_COPY = TestNightly ? 50 : 5;
- CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY);
+ CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY);
c.LaunchThreads(-1);
Thread.Sleep(TestUtil.NextInt32(Random, 10, 500));
@@ -1055,6 +1131,7 @@ public virtual void TestExistingDeletes()
writer.Dispose();
}
+ // LUCENENET-specific: renamed to avoid conflict with variables above
IndexWriterConfig conf_ = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
IndexWriter writer_ = new IndexWriter(dirs[0], conf_);
@@ -1102,7 +1179,8 @@ public virtual void TestSimpleCaseCustomCodec()
Codec codec = new CustomPerFieldCodec();
IndexWriter writer = null;
- writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec));
+ writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE).SetCodec(codec));
// add 100 documents
AddDocsWithID(writer, 100, 0);
Assert.AreEqual(100, writer.MaxDoc);
@@ -1110,14 +1188,26 @@ public virtual void TestSimpleCaseCustomCodec()
writer.Dispose();
TestUtil.CheckIndex(dir);
- writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false)));
+ writer = NewWriter(
+ aux,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetCodec(codec)
+ .SetMaxBufferedDocs(10)
+ .SetMergePolicy(NewLogMergePolicy(false))
+ );
// add 40 documents in separate files
AddDocs(writer, 40);
Assert.AreEqual(40, writer.MaxDoc);
writer.Commit();
writer.Dispose();
- writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec));
+ writer = NewWriter(
+ aux2,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetCodec(codec)
+ );
// add 40 documents in compound files
AddDocs2(writer, 50);
Assert.AreEqual(50, writer.MaxDoc);
@@ -1125,7 +1215,12 @@ public virtual void TestSimpleCaseCustomCodec()
writer.Dispose();
// test doc count before segments are merged
- writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetCodec(codec));
+ writer = NewWriter(
+ dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetCodec(codec)
+ );
Assert.AreEqual(100, writer.MaxDoc);
writer.AddIndexes(aux, aux2);
Assert.AreEqual(190, writer.MaxDoc);
@@ -1138,16 +1233,9 @@ public virtual void TestSimpleCaseCustomCodec()
private sealed class CustomPerFieldCodec : Lucene46Codec
{
- internal readonly PostingsFormat simpleTextFormat;
- internal readonly PostingsFormat defaultFormat;
- internal readonly PostingsFormat mockSepFormat;
-
- public CustomPerFieldCodec()
- {
- simpleTextFormat = Codecs.PostingsFormat.ForName("SimpleText");
- defaultFormat = Codecs.PostingsFormat.ForName("Lucene41");
- mockSepFormat = Codecs.PostingsFormat.ForName("MockSep");
- }
+ private readonly PostingsFormat simpleTextFormat = PostingsFormat.ForName("SimpleText");
+ private readonly PostingsFormat defaultFormat = PostingsFormat.ForName("Lucene41");
+ private readonly PostingsFormat mockSepFormat = PostingsFormat.ForName("MockSep");
public override PostingsFormat GetPostingsFormatForField(string field)
{
@@ -1186,7 +1274,7 @@ public virtual void TestNonCFSLeftovers()
IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dirs[0]), DirectoryReader.Open(dirs[1]) };
Directory dir = new MockDirectoryWrapper(Random, new RAMDirectory());
- IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NewLogMergePolicy(true));
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(true));
MergePolicy lmp = conf.MergePolicy;
// Force creation of CFS:
lmp.NoCFSRatio = 1.0;
@@ -1380,9 +1468,7 @@ public virtual void TestLocksBlock()
w2.AddIndexes(src);
Assert.Fail("did not hit expected exception");
}
-#pragma warning disable 168
- catch (LockObtainFailedException lofe)
-#pragma warning restore 168
+ catch (LockObtainFailedException /*lofe*/)
{
// expected
}
@@ -1390,4 +1476,4 @@ public virtual void TestLocksBlock()
IOUtils.Dispose(w1, w2, src, dest);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs b/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs
index 9ac3c443ae..dbb4efc9d9 100644
--- a/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs
+++ b/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs
@@ -111,4 +111,4 @@ private void CheckHeaders(Directory dir)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs b/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs
index 4c86228d8d..7c643422cb 100644
--- a/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs
+++ b/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs
@@ -117,4 +117,4 @@ private void CheckHeaders(Directory dir)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs b/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs
index c45c620b75..48410fe188 100644
--- a/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs
+++ b/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs
@@ -37,8 +37,8 @@ private abstract class TimedThread : ThreadJob
{
internal volatile bool failed;
internal int count;
- internal static float RUN_TIME_MSEC = AtLeast(500);
- internal TimedThread[] allThreads;
+ private static float RUN_TIME_MSEC = AtLeast(500);
+ private TimedThread[] allThreads;
public abstract void DoWork();
@@ -73,7 +73,7 @@ public override void Run()
}
}
- internal virtual bool AnyErrors()
+ private bool AnyErrors()
{
for (int i = 0; i < allThreads.Length; i++)
{
@@ -101,7 +101,7 @@ public override void DoWork()
// Update all 100 docs...
for (int i = 0; i < 100; i++)
{
- Documents.Document d = new Documents.Document();
+ Document d = new Document();
d.Add(new StringField("id", Convert.ToString(i), Field.Store.YES));
d.Add(new TextField("contents", English.Int32ToEnglish(i + 10 * count), Field.Store.NO));
writer.UpdateDocument(new Term("id", Convert.ToString(i)), d);
@@ -136,14 +136,15 @@ public virtual void RunTest(Directory directory)
{
TimedThread[] threads = new TimedThread[4];
- IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(7);
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(7);
((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3;
IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random);
// Establish a base index of 100 docs:
for (int i = 0; i < 100; i++)
{
- Documents.Document d = new Documents.Document();
+ Document d = new Document();
d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES));
d.Add(NewTextField("contents", English.Int32ToEnglish(i), Field.Store.NO));
if ((i - 1) % 7 == 0)
@@ -213,7 +214,7 @@ public virtual void TestAtomicUpdates()
{
RunTest(directory);
}
- System.IO.Directory.Delete(dirPath.FullName, true);
+ TestUtil.Rm(dirPath);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
index cc8213f30b..ad0cf561c9 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
@@ -158,19 +158,39 @@ public void testCreateMoreTermsIndex() throws Exception {
}
*/
- internal static readonly string[] oldNames = new string[] {
- "40.cfs", "40.nocfs", "41.cfs", "41.nocfs", "42.cfs",
- "42.nocfs", "45.cfs", "45.nocfs", "461.cfs", "461.nocfs"
+ internal static readonly string[] oldNames = {
+ "40.cfs",
+ "40.nocfs",
+ "41.cfs",
+ "41.nocfs",
+ "42.cfs",
+ "42.nocfs",
+ "45.cfs",
+ "45.nocfs",
+ "461.cfs",
+ "461.nocfs"
};
- internal readonly string[] unsupportedNames = new string[] {
- "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs",
- "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs",
- "24.cfs", "24.nocfs", "29.cfs", "29.nocfs"
+ internal readonly string[] unsupportedNames = {
+ "19.cfs",
+ "19.nocfs",
+ "20.cfs",
+ "20.nocfs",
+ "21.cfs",
+ "21.nocfs",
+ "22.cfs",
+ "22.nocfs",
+ "23.cfs",
+ "23.nocfs",
+ "24.cfs",
+ "24.nocfs",
+ "29.cfs",
+ "29.nocfs"
};
- internal static readonly string[] oldSingleSegmentNames = new string[] {
- "40.optimized.cfs", "40.optimized.nocfs"
+ internal static readonly string[] oldSingleSegmentNames = {
+ "40.optimized.cfs",
+ "40.optimized.nocfs"
};
internal static IDictionary oldIndexDirs;
@@ -178,7 +198,7 @@ public void testCreateMoreTermsIndex() throws Exception {
///
/// Randomizes the use of some of hte constructor variations
///
- private IndexUpgrader NewIndexUpgrader(Directory dir)
+ private static IndexUpgrader NewIndexUpgrader(Directory dir)
{
bool streamType = Random.NextBoolean();
int choice = TestUtil.NextInt32(Random, 0, 2);
@@ -256,9 +276,7 @@ public virtual void TestUnsupportedOldIndexes()
reader = DirectoryReader.Open(dir);
Assert.Fail("DirectoryReader.open should not pass for " + unsupportedNames[i]);
}
-#pragma warning disable 168
- catch (IndexFormatTooOldException e)
-#pragma warning restore 168
+ catch (IndexFormatTooOldException /*e*/)
{
// pass
}
@@ -301,16 +319,16 @@ public virtual void TestUnsupportedOldIndexes()
writer = null;
}
- StringBuilder sb = new StringBuilder(1024);
+ StringBuilder bos = new StringBuilder(512); // LUCENENET specific: allocating 512 chars instead of 1024 bytes
CheckIndex checker = new CheckIndex(dir);
CheckIndex.Status indexStatus;
- using (var infoStream = new StringWriter(sb))
+ using (var infoStream = new StringWriter(bos))
{
checker.InfoStream = infoStream;
indexStatus = checker.DoCheckIndex();
}
Assert.IsFalse(indexStatus.Clean);
- Assert.IsTrue(sb.ToString().Contains(typeof(IndexFormatTooOldException).Name));
+ Assert.IsTrue(bos.ToString().Contains(nameof(IndexFormatTooOldException)));
dir.Dispose();
TestUtil.Rm(oldIndxeDir);
@@ -656,7 +674,8 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM
mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
mp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
// TODO: remove randomness
- IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp);
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp);
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 35; i++)
@@ -676,12 +695,14 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM
mp = new LogByteSizeMergePolicy();
mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
// TODO: remove randomness
- conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp);
+ conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp);
writer = new IndexWriter(dir, conf);
AddNoProxDoc(writer);
writer.Dispose();
- conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES);
+ conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES);
writer = new IndexWriter(dir, conf);
Term searchTerm = new Term("id", "7");
writer.DeleteDocuments(searchTerm);
@@ -854,7 +875,6 @@ public virtual void TestNumericFields()
{
foreach (string name in oldNames)
{
-
Directory dir = oldIndexDirs[name];
IndexReader reader = DirectoryReader.Open(dir);
IndexSearcher searcher = NewSearcher(reader);
@@ -873,6 +893,7 @@ public virtual void TestNumericFields()
}
// check that also lower-precision fields are ok
+ // LUCENENET-specific: renamed to hits_ to avoid conflict with local variable
ScoreDoc[] hits_ = searcher.Search(NumericRangeQuery.NewInt32Range("trieInt", 4, int.MinValue, int.MaxValue, false, false), 100).ScoreDocs;
Assert.AreEqual(34, hits_.Length, "wrong number of hits");
@@ -947,7 +968,6 @@ public virtual void TestUpgradeOldIndex()
[Slow]
public virtual void TestCommandLineArgs()
{
-
foreach (string name in oldIndexDirs.Keys)
{
DirectoryInfo dir = CreateTempDir(name);
@@ -1022,8 +1042,9 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
for (int i = 0; i < 3; i++)
{
// only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
- MergePolicy mp = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
- IndexWriterConfig iwc = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(mp);
+ MergePolicy mp = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy();
+ IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(mp);
IndexWriter w = new IndexWriter(ramDir, iwc);
// add few more docs:
for (int j = 0; j < RandomMultiplier * Random.Next(30); j++)
@@ -1035,8 +1056,10 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
// add dummy segments (which are all in current
// version) to single segment index
- MergePolicy mp_ = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
- IndexWriterConfig iwc_ = (new IndexWriterConfig(TEST_VERSION_CURRENT, null)).SetMergePolicy(mp_);
+ // LUCENENET-specific: renamed variables to avoid conflict with ones above
+ MergePolicy mp_ = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy();
+ IndexWriterConfig iwc_ = new IndexWriterConfig(TEST_VERSION_CURRENT, null)
+ .SetMergePolicy(mp_);
IndexWriter iw = new IndexWriter(dir, iwc_);
iw.AddIndexes(ramDir);
iw.Dispose(false);
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
index 9ac0bf703e..fbeb4a0c8f 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
@@ -36,7 +36,6 @@ namespace Lucene.Net.Index
using BytesRef = Lucene.Net.Util.BytesRef;
using Constants = Lucene.Net.Util.Constants;
using Directory = Lucene.Net.Store.Directory;
- //using IndexOptions = Lucene.Net.Index.IndexOptions;
using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
using Document = Lucene.Net.Documents.Document;
using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField;
@@ -113,19 +112,37 @@ public void testCreateSingleSegmentNoCFS() throws IOException {
// LUCENENET specific to load resources for this type
internal const string CURRENT_RESOURCE_DIRECTORY = "Lucene.Net.Tests.Index.";
- internal static readonly string[] oldNames = new string[] {
- "30.cfs", "30.nocfs", "31.cfs", "31.nocfs", "32.cfs",
- "32.nocfs", "34.cfs", "34.nocfs"
+ internal static readonly string[] oldNames = {
+ "30.cfs",
+ "30.nocfs",
+ "31.cfs",
+ "31.nocfs",
+ "32.cfs",
+ "32.nocfs",
+ "34.cfs",
+ "34.nocfs"
};
- internal readonly string[] unsupportedNames = new string[] {
- "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs",
- "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs",
- "24.cfs", "24.nocfs", "29.cfs", "29.nocfs"
+ internal readonly string[] unsupportedNames = {
+ "19.cfs",
+ "19.nocfs",
+ "20.cfs",
+ "20.nocfs",
+ "21.cfs",
+ "21.nocfs",
+ "22.cfs",
+ "22.nocfs",
+ "23.cfs",
+ "23.nocfs",
+ "24.cfs",
+ "24.nocfs",
+ "29.cfs",
+ "29.nocfs"
};
- internal static readonly string[] oldSingleSegmentNames = new string[] {
- "31.optimized.cfs", "31.optimized.nocfs"
+ internal static readonly string[] oldSingleSegmentNames = {
+ "31.optimized.cfs",
+ "31.optimized.nocfs"
};
internal static IDictionary oldIndexDirs;
@@ -189,9 +206,7 @@ public virtual void TestUnsupportedOldIndexes()
reader = DirectoryReader.Open(dir);
Assert.Fail("DirectoryReader.open should not pass for " + unsupportedNames[i]);
}
-#pragma warning disable 168
- catch (IndexFormatTooOldException e)
-#pragma warning restore 168
+ catch (IndexFormatTooOldException /*e*/)
{
// pass
}
@@ -206,7 +221,8 @@ public virtual void TestUnsupportedOldIndexes()
try
{
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
+ writer = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
Assert.Fail("IndexWriter creation should not pass for " + unsupportedNames[i]);
}
catch (IndexFormatTooOldException e)
@@ -234,13 +250,13 @@ public virtual void TestUnsupportedOldIndexes()
writer = null;
}
- StringBuilder bos = new StringBuilder();
+ StringBuilder bos = new StringBuilder(512); // LUCENENET specific: allocating 512 chars instead of 1024 bytes
CheckIndex checker = new CheckIndex(dir);
checker.InfoStream = new StringWriter(bos);
CheckIndex.Status indexStatus = checker.DoCheckIndex();
Assert.IsFalse(indexStatus.Clean);
checker.InfoStream.Flush();
- Assert.IsTrue(bos.ToString().Contains(typeof(IndexFormatTooOldException).Name));
+ Assert.IsTrue(bos.ToString().Contains(nameof(IndexFormatTooOldException)));
dir.Dispose();
}
@@ -256,7 +272,8 @@ public virtual void TestFullyMergeOldIndex()
Console.WriteLine("\nTEST: index=" + name);
}
Directory dir = NewDirectory(oldIndexDirs[name]);
- IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
+ IndexWriter w = new IndexWriter(dir,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
w.ForceMerge(1);
w.Dispose();
@@ -274,7 +291,8 @@ public virtual void TestAddOldIndexes()
Console.WriteLine("\nTEST: old index " + name);
}
Directory targetDir = NewDirectory();
- IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
+ IndexWriter w = new IndexWriter(targetDir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
w.AddIndexes(oldIndexDirs[name]);
if (Verbose)
{
@@ -294,7 +312,8 @@ public virtual void TestAddOldIndexesReader()
IndexReader reader = DirectoryReader.Open(oldIndexDirs[name]);
Directory targetDir = NewDirectory();
- IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
+ IndexWriter w = new IndexWriter(targetDir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
w.AddIndexes(reader);
w.Dispose();
reader.Dispose();
@@ -619,7 +638,8 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM
mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
mp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
// TODO: remove randomness
- IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS);
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS);
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 35; i++)
@@ -639,12 +659,15 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM
mp = new LogByteSizeMergePolicy();
mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
// TODO: remove randomness
- conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS);
+ conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS);
writer = new IndexWriter(dir, conf);
AddNoProxDoc(writer);
writer.Dispose();
- writer = new IndexWriter(dir, conf.SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES));
+ writer = new IndexWriter(dir,
+ conf.SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES)
+ );
Term searchTerm = new Term("id", "7");
writer.DeleteDocuments(searchTerm);
writer.Dispose();
@@ -816,7 +839,6 @@ public virtual void TestNumericFields()
{
foreach (string name in oldNames)
{
-
Directory dir = oldIndexDirs[name];
IndexReader reader = DirectoryReader.Open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
@@ -897,7 +919,8 @@ public virtual void TestUpgradeOldIndex()
}
Directory dir = NewDirectory(oldIndexDirs[name]);
- (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade();
+ new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)
+ .Upgrade();
CheckAllSegmentsUpgraded(dir);
@@ -924,8 +947,9 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
for (int i = 0; i < 3; i++)
{
// only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
- MergePolicy mp = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
- IndexWriterConfig iwc = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(mp);
+ MergePolicy mp = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy();
+ IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(mp);
IndexWriter w = new IndexWriter(ramDir, iwc);
// add few more docs:
for (int j = 0; j < RandomMultiplier * Random.Next(30); j++)
@@ -937,8 +961,9 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
// add dummy segments (which are all in current
// version) to single segment index
- MergePolicy mp_ = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
- IndexWriterConfig iwc_ = (new IndexWriterConfig(TEST_VERSION_CURRENT, null)).SetMergePolicy(mp_);
+ MergePolicy mp_ = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy();
+ IndexWriterConfig iwc_ = new IndexWriterConfig(TEST_VERSION_CURRENT, null)
+ .SetMergePolicy(mp_);
IndexWriter w_ = new IndexWriter(dir, iwc_);
w_.AddIndexes(ramDir);
w_.Dispose(false);
@@ -946,7 +971,8 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
// determine count of segments in modified index
int origSegCount = GetNumberOfSegments(dir);
- (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade();
+ new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)
+ .Upgrade();
int segCount = CheckAllSegmentsUpgraded(dir);
Assert.AreEqual(origSegCount, segCount, "Index must still contain the same number of segments, as only one segment was upgraded and nothing else merged");
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
index a0337168f9..b81ba92038 100644
--- a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
@@ -33,7 +33,6 @@ namespace Lucene.Net.Index
* limitations under the License.
*/
- using BytesRef = Lucene.Net.Util.BytesRef;
using Directory = Lucene.Net.Store.Directory;
using Document = Documents.Document;
using Field = Field;
@@ -51,7 +50,6 @@ namespace Lucene.Net.Index
// Lucene3x doesnt have totalTermFreq, so the test isn't interesting there.
[TestFixture]
public class TestBagOfPositions : LuceneTestCase
-
{
[Test]
[Slow]
@@ -128,7 +126,7 @@ public virtual void Test()
Document document = new Document();
Field field = new Field("field", "", fieldType);
document.Add(field);
- threads[threadID] = new ThreadAnonymousClass(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
+ threads[threadID] = new ThreadAnonymousClass(maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
threads[threadID].Start();
}
startingGun.Signal();
@@ -160,9 +158,6 @@ public virtual void Test()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestBagOfPositions outerInstance;
-
- private readonly int numTerms;
private readonly int maxTermsPerDoc;
private readonly ConcurrentQueue postings;
private readonly RandomIndexWriter iw;
@@ -171,10 +166,8 @@ private sealed class ThreadAnonymousClass : ThreadJob
private readonly Document document;
private readonly Field field;
- public ThreadAnonymousClass(TestBagOfPositions outerInstance, int numTerms, int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field)
+ public ThreadAnonymousClass(int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field)
{
- this.outerInstance = outerInstance;
- this.numTerms = numTerms;
this.maxTermsPerDoc = maxTermsPerDoc;
this.postings = postings;
this.iw = iw;
@@ -189,7 +182,7 @@ public override void Run()
try
{
startingGun.Wait();
- while (!(postings.Count == 0))
+ while (!postings.IsEmpty)
{
StringBuilder text = new StringBuilder();
int numTerms = threadRandom.Next(maxTermsPerDoc);
@@ -213,4 +206,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
index 8d9ea475eb..87b8324853 100644
--- a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
@@ -32,7 +32,6 @@ namespace Lucene.Net.Index
* limitations under the License.
*/
- using BytesRef = Lucene.Net.Util.BytesRef;
using Directory = Lucene.Net.Store.Directory;
using Document = Documents.Document;
using Field = Field;
@@ -102,7 +101,7 @@ public virtual void Test()
for (int threadID = 0; threadID < threadCount; threadID++)
{
- threads[threadID] = new ThreadAnonymousClass(this, maxTermsPerDoc, postings, iw, startingGun);
+ threads[threadID] = new ThreadAnonymousClass(maxTermsPerDoc, postings, iw, startingGun);
threads[threadID].Start();
}
startingGun.Signal();
@@ -140,16 +139,13 @@ public virtual void Test()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestBagOfPostings outerInstance;
-
private readonly int maxTermsPerDoc;
private readonly ConcurrentQueue postings;
private readonly RandomIndexWriter iw;
private readonly CountdownEvent startingGun;
- public ThreadAnonymousClass(TestBagOfPostings outerInstance, int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun)
+ public ThreadAnonymousClass(int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun)
{
- this.outerInstance = outerInstance;
this.maxTermsPerDoc = maxTermsPerDoc;
this.postings = postings;
this.iw = iw;
@@ -164,7 +160,7 @@ public override void Run()
Field field = NewTextField("field", "", Field.Store.NO);
document.Add(field);
startingGun.Wait();
- while (!(postings.Count == 0))
+ while (!postings.IsEmpty)
{
StringBuilder text = new StringBuilder();
ISet visited = new JCG.HashSet();
@@ -195,4 +191,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
index 838cc8f43e..a90626bc14 100644
--- a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
@@ -111,7 +111,8 @@ private Document Doc(int id)
public virtual void TestUpdatesAreFlushed()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(
+ TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001));
writer.AddDocument(Doc(0)); // val=1
writer.AddDocument(Doc(1)); // val=2
writer.AddDocument(Doc(3)); // val=2
@@ -152,8 +153,8 @@ public virtual void TestSimple()
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
- } // NRT
- else
+ }
+ else // NRT
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
@@ -178,7 +179,7 @@ public virtual void TestUpdateFewSegments()
conf.SetMaxBufferedDocs(2); // generate few segments
conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
- int numDocs = 10;
+ const int numDocs = 10;
long[] expectedValues = new long[numDocs];
for (int i = 0; i < numDocs; i++)
{
@@ -203,8 +204,8 @@ public virtual void TestUpdateFewSegments()
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
- } // NRT
- else
+ }
+ else // NRT
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
@@ -302,8 +303,8 @@ public virtual void TestUpdatesAndDeletes()
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
- } // NRT
- else
+ }
+ else // NRT
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
@@ -355,8 +356,8 @@ public virtual void TestUpdatesWithDeletes()
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
- } // NRT
- else
+ }
+ else // NRT
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
@@ -395,8 +396,8 @@ public virtual void TestUpdateAndDeleteSameDocument()
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
- } // NRT
- else
+ }
+ else // NRT
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
@@ -669,7 +670,7 @@ public virtual void TestDifferentDVFormatPerField()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
- conf.SetCodec(new Lucene46CodecAnonymousClass(this));
+ conf.SetCodec(new Lucene46CodecAnonymousClass());
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@@ -702,13 +703,6 @@ public virtual void TestDifferentDVFormatPerField()
private sealed class Lucene46CodecAnonymousClass : Lucene46Codec
{
- private readonly TestBinaryDocValuesUpdates outerInstance;
-
- public Lucene46CodecAnonymousClass(TestBinaryDocValuesUpdates outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public override DocValuesFormat GetDocValuesFormatForField(string field)
{
return new Lucene45DocValuesFormat();
@@ -1132,6 +1126,7 @@ public virtual void TestUpdateOldSegments()
};
Directory dir = NewDirectory();
+ bool oldValue = OldFormatImpersonationIsActive;
// create a segment with an old Codec
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
conf.SetCodec(oldCodecs[Random.Next(oldCodecs.Length)]);
@@ -1142,35 +1137,11 @@ public virtual void TestUpdateOldSegments()
doc.Add(new BinaryDocValuesField("f", ToBytes(5L)));
writer.AddDocument(doc);
writer.Dispose();
- dir.Dispose();
- }
-
- [Test, LuceneNetSpecific]
- public virtual void TestUpdateOldSegments_OldFormatNotActive()
- {
- bool oldValue = OldFormatImpersonationIsActive;
-
- OldFormatImpersonationIsActive = false;
- Codec[] oldCodecs = new Codec[] {
- new Lucene40RWCodec(),
- new Lucene41RWCodec(),
- new Lucene42RWCodec(),
- new Lucene45RWCodec()
- };
-
- Directory dir = NewDirectory();
- Document doc = new Document();
- doc.Add(new StringField("id", "doc", Store.NO));
- doc.Add(new BinaryDocValuesField("f", ToBytes(5L)));
-
- var conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
- conf.SetCodec(oldCodecs[Random.Next(oldCodecs.Length)]);
-
- var writer = new IndexWriter(dir, conf);
- writer.AddDocument(doc);
+ conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
+ writer = new IndexWriter(dir, conf);
writer.UpdateBinaryDocValue(new Term("id", "doc"), "f", ToBytes(4L));
-
+ OldFormatImpersonationIsActive = false;
try
{
writer.Dispose();
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs b/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs
index 2b3ceb669d..98083cbe10 100644
--- a/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs
+++ b/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs
@@ -82,8 +82,8 @@ public virtual void TestBinary()
[Test]
public virtual void TestToString()
{
- Term term = new Term("foo", new BytesRef(new[] { unchecked((byte)0xff), unchecked((byte)0xfe) }));
+ Term term = new Term("foo", new BytesRef(new[] { (byte)0xff, (byte)0xfe }));
Assert.AreEqual("foo:[ff fe]", term.ToString());
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestByteSlices.cs b/src/Lucene.Net.Tests/Index/TestByteSlices.cs
index 972129a463..429ca6d575 100644
--- a/src/Lucene.Net.Tests/Index/TestByteSlices.cs
+++ b/src/Lucene.Net.Tests/Index/TestByteSlices.cs
@@ -1,6 +1,5 @@
using NUnit.Framework;
using RandomizedTesting.Generators;
-using System;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -36,7 +35,7 @@ public virtual void TestBasic()
// LUCENENET specific: NUnit will crash with an OOM if we do the full test
// with verbosity enabled. So, making this a manual setting that can be
// turned on if, and only if, needed for debugging. If the setting is turned
- // on, we are decresing the number of iterations by 1/3, which seems to
+ // on, we are decreasing the number of iterations by 1/3, which seems to
// keep it from crashing.
bool isVerbose = false;
if (!isVerbose)
@@ -153,4 +152,4 @@ public virtual void TestBasic()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCheckIndex.cs b/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
index c9e990a4c0..bb1c3a7ad0 100644
--- a/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
+++ b/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
@@ -45,7 +45,7 @@ public class TestCheckIndex : LuceneTestCase
public virtual void TestDeletedDocs()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
for (int i = 0; i < 19; i++)
{
Document doc = new Document();
@@ -128,4 +128,4 @@ public virtual void TestBogusTermVectors()
dir.Dispose(); // checkindex
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs b/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs
index 6fa91bb74f..ea162c05df 100644
--- a/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs
+++ b/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs
@@ -2,7 +2,6 @@
using Lucene.Net.Documents;
using NUnit.Framework;
using System;
-using System.IO;
namespace Lucene.Net.Index
{
@@ -76,7 +75,7 @@ public virtual void TestExposeUnclosedFiles()
Directory d = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random, d);
//int numDocs = AtLeast(100);
- int numDocs = 5;
+ const int numDocs = 5;
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
@@ -109,4 +108,4 @@ public virtual void TestExposeUnclosedFiles()
d.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs
index 84cf9674da..7e090a0c18 100644
--- a/src/Lucene.Net.Tests/Index/TestCodecs.cs
+++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs
@@ -100,20 +100,17 @@ public override void BeforeClass()
internal class FieldData : IComparable
{
- private readonly TestCodecs outerInstance;
-
internal readonly FieldInfo fieldInfo;
internal readonly TermData[] terms;
internal readonly bool omitTF;
internal readonly bool storePayloads;
- public FieldData(TestCodecs outerInstance, string name, FieldInfos.Builder fieldInfos, TermData[] terms, bool omitTF, bool storePayloads)
+ public FieldData(string name, FieldInfos.Builder fieldInfos, TermData[] terms, bool omitTF, bool storePayloads)
{
- this.outerInstance = outerInstance;
this.omitTF = omitTF;
this.storePayloads = storePayloads;
// TODO: change this test to use all three
- fieldInfo = fieldInfos.AddOrUpdate(name, new IndexableFieldTypeAnonymousClass(this, omitTF));
+ fieldInfo = fieldInfos.AddOrUpdate(name, new IndexableFieldTypeAnonymousClass(omitTF));
if (storePayloads)
{
fieldInfo.SetStorePayloads();
@@ -129,12 +126,10 @@ public FieldData(TestCodecs outerInstance, string name, FieldInfos.Builder field
private sealed class IndexableFieldTypeAnonymousClass : IIndexableFieldType
{
- private readonly FieldData outerInstance;
private readonly bool omitTF;
- public IndexableFieldTypeAnonymousClass(FieldData outerInstance, bool omitTF)
+ public IndexableFieldTypeAnonymousClass(bool omitTF)
{
- this.outerInstance = outerInstance;
this.omitTF = omitTF;
}
@@ -154,7 +149,7 @@ public IndexableFieldTypeAnonymousClass(FieldData outerInstance, bool omitTF)
public bool OmitNorms => false;
- public IndexOptions IndexOptions => omitTF ? Index.IndexOptions.DOCS_ONLY : Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
+ public IndexOptions IndexOptions => omitTF ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
public DocValuesType DocValueType => DocValuesType.NONE;
}
@@ -186,14 +181,11 @@ public virtual void Write(FieldsConsumer consumer)
internal class PositionData
{
- private readonly TestCodecs outerInstance;
-
internal int pos;
internal BytesRef payload;
- internal PositionData(TestCodecs outerInstance, int pos, BytesRef payload)
+ internal PositionData(int pos, BytesRef payload)
{
- this.outerInstance = outerInstance;
this.pos = pos;
this.payload = payload;
}
@@ -201,17 +193,14 @@ internal PositionData(TestCodecs outerInstance, int pos, BytesRef payload)
internal class TermData : IComparable
{
- private readonly TestCodecs outerInstance;
-
internal string text2;
internal readonly BytesRef text;
internal int[] docs;
internal PositionData[][] positions;
internal FieldData field;
- public TermData(TestCodecs outerInstance, string text, int[] docs, PositionData[][] positions)
+ public TermData(string text, int[] docs, PositionData[][] positions)
{
- this.outerInstance = outerInstance;
this.text = new BytesRef(text);
this.text2 = text;
this.docs = docs;
@@ -322,12 +311,12 @@ internal virtual TermData[] MakeRandomTerms(bool omitTF, bool storePayloads)
payload = null;
}
- positions[j][k] = new PositionData(this, position, payload);
+ positions[j][k] = new PositionData(position, payload);
}
}
}
- terms[i] = new TermData(this, text2, docs, positions);
+ terms[i] = new TermData(text2, docs, positions);
}
return terms;
@@ -342,12 +331,12 @@ public virtual void TestFixedPostings()
{
int[] docs = new int[] { i };
string text = i.ToString(Character.MaxRadix);
- terms[i] = new TermData(this, text, docs, null);
+ terms[i] = new TermData(text, docs, null);
}
FieldInfos.Builder builder = new FieldInfos.Builder();
- FieldData field = new FieldData(this, "field", builder, terms, true, false);
+ FieldData field = new FieldData("field", builder, terms, true, false);
FieldData[] fields = new FieldData[] { field };
FieldInfos fieldInfos = builder.Finish();
// LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws
@@ -384,7 +373,7 @@ public virtual void TestFixedPostings()
Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc());
}
}
- Assert.IsFalse(termsEnum.MoveNext());
+ Assert.IsFalse(termsEnum.MoveNext()); // LUCENENET NOTE: using IsFalse instead of original assertNull due to enumerator pattern
for (int i = 0; i < NUM_TERMS; i++)
{
@@ -392,6 +381,10 @@ public virtual void TestFixedPostings()
}
Assert.IsFalse(fieldsEnum.MoveNext());
+
+ // LUCENENET specific: the following original Java lines handled by the `using` statements above
+ // reader.close();
+ // dir.close();
}
[Test]
@@ -404,7 +397,7 @@ public virtual void TestRandomPostings()
{
bool omitTF = 0 == (i % 3);
bool storePayloads = 1 == (i % 3);
- fields[i] = new FieldData(this, fieldNames[i], builder, this.MakeRandomTerms(omitTF, storePayloads), omitTF, storePayloads);
+ fields[i] = new FieldData(fieldNames[i], builder, this.MakeRandomTerms(omitTF, storePayloads), omitTF, storePayloads);
}
// LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws
@@ -430,18 +423,22 @@ public virtual void TestRandomPostings()
Verify[] threads = new Verify[NUM_TEST_THREADS - 1];
for (int i = 0; i < NUM_TEST_THREADS - 1; i++)
{
- threads[i] = new Verify(this, si, fields, terms);
- threads[i].IsBackground = (true);
+ threads[i] = new Verify(si, fields, terms);
+ threads[i].IsBackground = true;
threads[i].Start();
}
- (new Verify(this, si, fields, terms)).Run();
+ new Verify(si, fields, terms).Run();
for (int i = 0; i < NUM_TEST_THREADS - 1; i++)
{
threads[i].Join();
if (Debugging.AssertsEnabled) Debugging.Assert(!threads[i].failed);
}
+
+ // LUCENENET specific: The following original Java lines are handled by the `using` statements above
+ // terms.close();
+ // dir.close();
}
[Test]
@@ -511,16 +508,13 @@ private ScoreDoc[] Search(IndexWriter writer, Query q, int n)
private class Verify : ThreadJob
{
- private readonly TestCodecs outerInstance;
-
internal readonly Fields termsDict;
internal readonly FieldData[] fields;
internal readonly SegmentInfo si;
internal volatile bool failed;
- internal Verify(TestCodecs outerInstance, SegmentInfo si, FieldData[] fields, Fields termsDict)
+ internal Verify(SegmentInfo si, FieldData[] fields, Fields termsDict)
{
- this.outerInstance = outerInstance;
this.fields = fields;
this.termsDict = termsDict;
this.si = si;
@@ -792,7 +786,7 @@ private void Write(FieldInfos fieldInfos, Directory dir, FieldData[] fields, boo
int termIndexInterval = TestUtil.NextInt32(Random, 13, 27);
Codec codec = Codec.Default;
SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
- SegmentWriteState state = new SegmentWriteState((InfoStream)InfoStream.Default, dir, si, fieldInfos, termIndexInterval, null, NewIOContext(Random));
+ SegmentWriteState state = new SegmentWriteState(InfoStream.Default, dir, si, fieldInfos, termIndexInterval, null, NewIOContext(Random));
// LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws
using FieldsConsumer consumer = codec.PostingsFormat.FieldsConsumer(state);
@@ -839,6 +833,11 @@ public virtual void TestDocsOnlyFreq()
Assert.AreEqual(1, de.Freq, "wrong freq for doc " + de.DocID);
}
}
+
+ // LUCENENET specific: The following original Java lines are handled by the `using` statements above
+ // reader.close();
+ //
+ // dir.close();
}
[Test]
@@ -869,6 +868,9 @@ public virtual void TestDisableImpersonation()
{
OldFormatImpersonationIsActive = true;
}
+
+ // LUCENENET specific: The following original Java line is handled by the `using` statement above
+ // dir.close();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCompoundFile.cs b/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
index 03d91e30ac..535bd4ee4e 100644
--- a/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
+++ b/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
@@ -58,7 +58,8 @@ public override void TearDown()
}
///
- /// Creates a file of the specified size with random data.
+ /// Creates a file of the specified size with random data.
+ ///
private void CreateRandomFile(Directory dir, string name, int size)
{
IndexOutput os = dir.CreateOutput(name, NewIOContext(Random));
@@ -72,8 +73,8 @@ private void CreateRandomFile(Directory dir, string name, int size)
///
/// Creates a file of the specified size with sequential data. The first
- /// byte is written as the start byte provided. All subsequent bytes are
- /// computed as start + offset where offset is the number of the byte.
+ /// byte is written as the start byte provided. All subsequent bytes are
+ /// computed as start + offset where offset is the number of the byte.
///
private void CreateSequenceFile(Directory dir, string name, sbyte start, int size)
{
@@ -160,8 +161,8 @@ private void AssertEqualArrays(string msg, byte[] expected, byte[] test, int sta
// ===========================================================
///
- /// this test creates compound file based on a single file.
- /// Files of different sizes are tested: 0, 1, 10, 100 bytes.
+ /// This test creates compound file based on a single file.
+ /// Files of different sizes are tested: 0, 1, 10, 100 bytes.
///
[Test]
public virtual void TestSingleFile()
@@ -187,8 +188,7 @@ public virtual void TestSingleFile()
}
///
- /// this test creates compound file based on two files.
- ///
+ /// This test creates compound file based on two files.
///
[Test]
public virtual void TestTwoFiles()
@@ -219,18 +219,18 @@ public virtual void TestTwoFiles()
}
///
- /// this test creates a compound file based on a large number of files of
- /// various length. The file content is generated randomly. The sizes range
- /// from 0 to 1Mb. Some of the sizes are selected to test the buffering
- /// logic in the file reading code. For this the chunk variable is set to
- /// the length of the buffer used internally by the compound file logic.
+ /// This test creates a compound file based on a large number of files of
+ /// various length. The file content is generated randomly. The sizes range
+ /// from 0 to 1Mb. Some of the sizes are selected to test the buffering
+ /// logic in the file reading code. For this the chunk variable is set to
+ /// the length of the buffer used internally by the compound file logic.
///
[Test]
public virtual void TestRandomFiles()
{
// Setup the test segment
- string segment = "test";
- int chunk = 1024; // internal buffer size used by the stream
+ const string segment = "test";
+ const int chunk = 1024; // internal buffer size used by the stream
CreateRandomFile(dir, segment + ".zero", 0);
CreateRandomFile(dir, segment + ".one", 1);
CreateRandomFile(dir, segment + ".ten", 10);
@@ -250,7 +250,11 @@ public virtual void TestRandomFiles()
// Now test
CompoundFileDirectory csw = new CompoundFileDirectory(dir, "test.cfs", NewIOContext(Random), true);
- string[] data = new string[] { ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3", ".big4", ".big5", ".big6", ".big7" };
+ string[] data = new string[]
+ {
+ ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3",
+ ".big4", ".big5", ".big6", ".big7"
+ };
for (int i = 0; i < data.Length; i++)
{
string fileName = segment + data[i];
@@ -273,9 +277,9 @@ public virtual void TestRandomFiles()
///
/// Setup a larger compound file with a number of components, each of
- /// which is a sequential file (so that we can easily tell that we are
- /// reading in the right byte). The methods sets up 20 files - f0 to f19,
- /// the size of each file is 1000 bytes.
+ /// which is a sequential file (so that we can easily tell that we are
+ /// reading in the right byte). The methods sets up 20 files - f0 to f19,
+ /// the size of each file is 1000 bytes.
///
private void SetUp_2()
{
@@ -382,8 +386,8 @@ public virtual void TestClonedStreamsClosing()
}
///
- /// this test opens two files from a compound stream and verifies that
- /// their file positions are independent of each other.
+ /// This test opens two files from a compound stream and verifies that
+ /// their file positions are independent of each other.
///
[Test]
public virtual void TestRandomAccess()
@@ -464,8 +468,8 @@ public virtual void TestRandomAccess()
}
///
- /// this test opens two files from a compound stream and verifies that
- /// their file positions are independent of each other.
+ /// This test opens two files from a compound stream and verifies that
+ /// their file positions are independent of each other.
///
[Test]
public virtual void TestRandomAccessClones()
@@ -611,7 +615,7 @@ public virtual void TestReadPastEOF()
public virtual void TestLargeWrites()
{
IndexOutput os = dir.CreateOutput("testBufferStart.txt", NewIOContext(Random));
-
+
var largeBuf = new byte[2048];
for (int i = 0; i < largeBuf.Length; i++)
{
@@ -897,4 +901,4 @@ private void CheckFiles(Directory dir)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
index 82b5667ef2..d702d7d211 100644
--- a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
+++ b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
@@ -48,13 +48,6 @@ public class TestConcurrentMergeScheduler : LuceneTestCase
{
private class FailOnlyOnFlush : Failure
{
- private readonly TestConcurrentMergeScheduler outerInstance;
-
- public FailOnlyOnFlush(TestConcurrentMergeScheduler outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
internal bool doFail;
internal bool hitExc;
@@ -75,9 +68,9 @@ public override void Eval(MockDirectoryWrapper dir)
{
// LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)]
// to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods.
- bool isDoFlush = Util.StackTraceHelper.DoesStackTraceContainMethod("Flush");
- bool isClose = Util.StackTraceHelper.DoesStackTraceContainMethod("Close") ||
- Util.StackTraceHelper.DoesStackTraceContainMethod("Dispose");
+ bool isDoFlush = StackTraceHelper.DoesStackTraceContainMethod("Flush");
+ bool isClose = StackTraceHelper.DoesStackTraceContainMethod("Close") ||
+ StackTraceHelper.DoesStackTraceContainMethod("Dispose");
if (isDoFlush && !isClose && Random.NextBoolean())
{
@@ -94,10 +87,10 @@ public override void Eval(MockDirectoryWrapper dir)
public virtual void TestFlushExceptions()
{
MockDirectoryWrapper directory = NewMockDirectory();
- FailOnlyOnFlush failure = new FailOnlyOnFlush(this);
+ FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.FailOn(failure);
- IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
Document doc = new Document();
Field idField = NewStringField("id", "", Field.Store.YES);
doc.Add(idField);
@@ -207,7 +200,7 @@ public virtual void TestDeleteMerging()
public virtual void TestNoExtraFiles()
{
Directory directory = NewDirectory();
- IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
for (int iter = 0; iter < 7; iter++)
{
@@ -227,7 +220,7 @@ public virtual void TestNoExtraFiles()
TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
- writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2));
+ writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2));
}
writer.Dispose();
@@ -243,7 +236,10 @@ public virtual void TestNoWaitClose()
Field idField = NewStringField("id", "", Field.Store.YES);
doc.Add(idField);
- IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(100)));
+ IndexWriter writer = new IndexWriter(directory,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy(100)));
for (int iter = 0; iter < 10; iter++)
{
@@ -273,7 +269,10 @@ public virtual void TestNoWaitClose()
reader.Dispose();
// Reopen
- writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy(100)));
+ writer = new IndexWriter(directory,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMergePolicy(NewLogMergePolicy(100)));
}
writer.Dispose();
@@ -298,7 +297,7 @@ public virtual void TestMaxMergeCount()
Console.WriteLine("TEST: maxMergeCount=" + maxMergeCount + " maxMergeThreads=" + maxMergeThreads);
}
- ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousClass(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
+ ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousClass(maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
cms.SetMaxMergesAndThreads(maxMergeCount, maxMergeThreads);
iwc.SetMergeScheduler(cms);
iwc.SetMaxBufferedDocs(2);
@@ -324,16 +323,13 @@ public virtual void TestMaxMergeCount()
private sealed class ConcurrentMergeSchedulerAnonymousClass : ConcurrentMergeScheduler
{
- private readonly TestConcurrentMergeScheduler outerInstance;
-
private readonly int maxMergeCount;
private readonly CountdownEvent enoughMergesWaiting;
private readonly AtomicInt32 runningMergeCount;
private readonly AtomicBoolean failed;
- public ConcurrentMergeSchedulerAnonymousClass(TestConcurrentMergeScheduler outerInstance, int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed)
+ public ConcurrentMergeSchedulerAnonymousClass(int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed)
{
- this.outerInstance = outerInstance;
this.maxMergeCount = maxMergeCount;
this.enoughMergesWaiting = enoughMergesWaiting;
this.runningMergeCount = runningMergeCount;
@@ -374,7 +370,7 @@ protected override void DoMerge(MergePolicy.OneMerge merge)
}
catch (Exception t) when (t.IsThrowable())
{
- failed.Value = (true);
+ failed.Value = true;
m_writer.MergeFinish(merge);
// LUCENENET NOTE: ThreadJob takes care of propagating the exception to the calling thread
throw RuntimeException.Create(t);
@@ -431,7 +427,6 @@ public virtual void TestTotalBytesSize()
d.Dispose();
}
-
// LUCENENET specific
private class FailOnlyOnMerge : Failure
{
@@ -479,4 +474,4 @@ public void TestExceptionOnBackgroundThreadIsPropagatedToCallingThread()
assertTrue(exceptionHit);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs b/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
index ae65d37f8f..2bd43a8499 100644
--- a/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
+++ b/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
@@ -165,7 +165,9 @@ public virtual void TestFieldNumberGaps()
{
Directory dir = NewDirectory();
{
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+ IndexWriter writer = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
Document d = new Document();
d.Add(new TextField("f1", "d1 first field", Field.Store.YES));
d.Add(new TextField("f2", "d1 second field", Field.Store.YES));
@@ -180,7 +182,9 @@ public virtual void TestFieldNumberGaps()
}
{
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+ IndexWriter writer = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
Document d = new Document();
d.Add(new TextField("f1", "d2 first field", Field.Store.YES));
d.Add(new StoredField("f3", new byte[] { 1, 2, 3 }));
@@ -199,7 +203,9 @@ public virtual void TestFieldNumberGaps()
}
{
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+ IndexWriter writer = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
Document d = new Document();
d.Add(new TextField("f1", "d3 first field", Field.Store.YES));
d.Add(new TextField("f2", "d3 second field", Field.Store.YES));
@@ -223,7 +229,9 @@ public virtual void TestFieldNumberGaps()
}
{
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+ IndexWriter writer = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
writer.DeleteDocuments(new Term("f1", "d1"));
// nuke the first segment entirely so that the segment with gaps is
// loaded first!
@@ -231,7 +239,10 @@ public virtual void TestFieldNumberGaps()
writer.Dispose();
}
- IndexWriter writer_ = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(new LogByteSizeMergePolicy()).SetInfoStream(new FailOnNonBulkMergesInfoStream()));
+ IndexWriter writer_ = new IndexWriter(dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(new LogByteSizeMergePolicy())
+ .SetInfoStream(new FailOnNonBulkMergesInfoStream()));
writer_.ForceMerge(1);
writer_.Dispose();
@@ -466,4 +477,4 @@ public void TestSegmentNumberToStringGeneration()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCrash.cs b/src/Lucene.Net.Tests/Index/TestCrash.cs
index cc7d4c3c89..a6cc75918f 100644
--- a/src/Lucene.Net.Tests/Index/TestCrash.cs
+++ b/src/Lucene.Net.Tests/Index/TestCrash.cs
@@ -106,7 +106,7 @@ public virtual void TestCrashWhileIndexing()
Directory dir2 = NewDirectory(dir);
dir.Dispose();
- (new RandomIndexWriter(Random, dir2)).Dispose();
+ new RandomIndexWriter(Random, dir2).Dispose();
dir2.Dispose();
}
@@ -141,7 +141,7 @@ public virtual void TestWriterAfterCrash()
Directory dir2 = NewDirectory(dir);
dir.Dispose();
- (new RandomIndexWriter(Random, dir2)).Dispose();
+ new RandomIndexWriter(Random, dir2).Dispose();
dir2.Dispose();
}
@@ -179,7 +179,7 @@ public virtual void TestCrashAfterReopen()
Directory dir2 = NewDirectory(dir);
dir.Dispose();
- (new RandomIndexWriter(Random, dir2)).Dispose();
+ new RandomIndexWriter(Random, dir2).Dispose();
dir2.Dispose();
}
@@ -227,4 +227,4 @@ public virtual void TestCrashAfterCloseNoWait()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs b/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
index f76c03f3ba..dbd97f2a9d 100644
--- a/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
+++ b/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
@@ -71,7 +71,8 @@ private void IndexAndCrashOnCreateOutputSegments2()
// NOTE: cannot use RandomIndexWriter because it
// sometimes commits:
- IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
+ IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
indexWriter.AddDocument(Document);
// writes segments_1:
@@ -107,7 +108,8 @@ private void IndexAfterRestart()
// LUCENE-3627 (before the fix): this line fails because
// it doesn't know what to do with the created but empty
// segments_2 file
- IndexWriter indexWriter = new IndexWriter(realDirectory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
+ IndexWriter indexWriter = new IndexWriter(realDirectory,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
// currently the test fails above.
// however, to test the fix, the following lines should pass as well.
@@ -196,4 +198,4 @@ public override IndexOutput CreateOutput(string name, IOContext cxt)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestCustomNorms.cs b/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
index ddf42e9615..ebefeea338 100644
--- a/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
+++ b/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
@@ -42,8 +42,8 @@ namespace Lucene.Net.Index
[TestFixture]
public class TestCustomNorms : LuceneTestCase
{
- internal readonly string floatTestField = "normsTestFloat";
- internal readonly string exceptionTestField = "normsTestExcp";
+ internal const string floatTestField = "normsTestFloat";
+ internal const string exceptionTestField = "normsTestExcp";
[Test]
public virtual void TestFloatNorms()
@@ -53,7 +53,7 @@ public virtual void TestFloatNorms()
analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH);
IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
- Similarity provider = new MySimProvider(this);
+ Similarity provider = new MySimProvider();
config.SetSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(Random, dir, config);
LineFileDocs docs = new LineFileDocs(Random);
@@ -92,13 +92,6 @@ public virtual void TestFloatNorms()
public class MySimProvider : PerFieldSimilarityWrapper
{
- private readonly TestCustomNorms outerInstance;
-
- public MySimProvider(TestCustomNorms outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
internal Similarity @delegate = new DefaultSimilarity();
public override float QueryNorm(float sumOfSquaredWeights)
@@ -108,7 +101,7 @@ public override float QueryNorm(float sumOfSquaredWeights)
public override Similarity Get(string field)
{
- if (outerInstance.floatTestField.Equals(field, StringComparison.Ordinal))
+ if (floatTestField.Equals(field, StringComparison.Ordinal))
{
return new FloatEncodingBoostSimilarity();
}
@@ -142,4 +135,4 @@ public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext con
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs
index 58a8e01607..1916037e3b 100644
--- a/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs
@@ -1,10 +1,8 @@
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
-using Lucene.Net.Support.Threading;
using NUnit.Framework;
using System;
using System.Collections.Generic;
-using System.IO;
using System.Threading;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -40,15 +38,15 @@ namespace Lucene.Net.Index
using TermQuery = Lucene.Net.Search.TermQuery;
using TestUtil = Lucene.Net.Util.TestUtil;
- /*
- Verify we can read the pre-2.1 file format, do searches
- against it, and add documents to it.
- */
-
+ ///
+ /// Verify we can read the pre-2.1 file format, do searches
+ /// against it, and add documents to it.
+ ///
[TestFixture]
public class TestDeletionPolicy : LuceneTestCase
{
- private void VerifyCommitOrder(IList commits)
+ // LUCENENET specific - made static to avoid having to reference the outer class instance
+ private static void VerifyCommitOrder(IList commits)
where T : IndexCommit
{
if (commits.Count == 0)
@@ -70,21 +68,18 @@ private void VerifyCommitOrder(IList commits)
internal class KeepAllDeletionPolicy : IndexDeletionPolicy
{
- private readonly TestDeletionPolicy outerInstance;
-
internal int numOnInit;
internal int numOnCommit;
internal Directory dir;
- internal KeepAllDeletionPolicy(TestDeletionPolicy outerInstance, Directory dir)
+ internal KeepAllDeletionPolicy(Directory dir)
{
- this.outerInstance = outerInstance;
this.dir = dir;
}
public override void OnInit(IList commits)
{
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
numOnInit++;
}
@@ -94,30 +89,23 @@ public override void OnCommit(IList commits)
DirectoryReader r = DirectoryReader.Open(dir);
Assert.AreEqual(r.Leaves.Count, lastCommit.SegmentCount, "lastCommit.segmentCount()=" + lastCommit.SegmentCount + " vs IndexReader.segmentCount=" + r.Leaves.Count);
r.Dispose();
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
numOnCommit++;
}
}
///
- /// this is useful for adding to a big index when you know
+ /// This is useful for adding to a big index when you know
/// readers are not using it.
///
internal class KeepNoneOnInitDeletionPolicy : IndexDeletionPolicy
{
- private readonly TestDeletionPolicy outerInstance;
-
- public KeepNoneOnInitDeletionPolicy(TestDeletionPolicy outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
internal int numOnInit;
internal int numOnCommit;
public override void OnInit(IList commits)
{
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
numOnInit++;
// On init, delete all commit points:
foreach (IndexCommit commit in commits)
@@ -129,7 +117,7 @@ public override void OnInit(IList commits)
public override void OnCommit(IList commits)
{
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
int size = commits.Count;
// Delete all but last one:
for (int i = 0; i < size - 1; i++)
@@ -142,17 +130,14 @@ public override void OnCommit(IList commits)
internal class KeepLastNDeletionPolicy : IndexDeletionPolicy
{
- private readonly TestDeletionPolicy outerInstance;
-
internal int numOnInit;
internal int numOnCommit;
internal int numToKeep;
internal int numDelete;
internal ISet seen = new JCG.HashSet();
- public KeepLastNDeletionPolicy(TestDeletionPolicy outerInstance, int numToKeep)
+ public KeepLastNDeletionPolicy(int numToKeep)
{
- this.outerInstance = outerInstance;
this.numToKeep = numToKeep;
}
@@ -162,7 +147,7 @@ public override void OnInit(IList commits)
{
Console.WriteLine("TEST: onInit");
}
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
numOnInit++;
// do no deletions on init
DoDeletes(commits, false);
@@ -174,7 +159,7 @@ public override void OnCommit(IList commits)
{
Console.WriteLine("TEST: onCommit");
}
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
DoDeletes(commits, true);
}
@@ -207,22 +192,18 @@ internal static long GetCommitTime(IndexCommit commit)
return Convert.ToInt64(commit.UserData["commitTime"]);
}
- /*
- * Delete a commit only when it has been obsoleted by N
- * seconds.
- */
-
+ ///
+ /// Delete a commit only when it has been obsoleted by N
+ /// seconds.
+ ///
internal class ExpirationTimeDeletionPolicy : IndexDeletionPolicy
{
- private readonly TestDeletionPolicy outerInstance;
-
internal Directory dir;
internal double expirationTimeSeconds;
internal int numDelete;
- public ExpirationTimeDeletionPolicy(TestDeletionPolicy outerInstance, Directory dir, double seconds)
+ public ExpirationTimeDeletionPolicy(Directory dir, double seconds)
{
- this.outerInstance = outerInstance;
this.dir = dir;
this.expirationTimeSeconds = seconds;
}
@@ -233,13 +214,13 @@ public override void OnInit(IList commits)
{
return;
}
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
OnCommit(commits);
}
public override void OnCommit(IList commits)
{
- outerInstance.VerifyCommitOrder(commits);
+ VerifyCommitOrder(commits);
IndexCommit lastCommit = commits[commits.Count - 1];
@@ -258,17 +239,17 @@ public override void OnCommit(IList commits)
}
}
- /*
- * Test "by time expiration" deletion policy:
- */
-
+ ///
+ /// Test "by time expiration" deletion policy:
+ ///
[Test]
public virtual void TestExpirationTimeDeletionPolicy()
{
const double SECONDS = 2.0;
Directory dir = NewDirectory();
- IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(this, dir, SECONDS));
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(dir, SECONDS));
MergePolicy mp = conf.MergePolicy;
mp.NoCFSRatio = 1.0;
IndexWriter writer = new IndexWriter(dir, conf);
@@ -286,7 +267,9 @@ public virtual void TestExpirationTimeDeletionPolicy()
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
- conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy);
+ conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetIndexDeletionPolicy(policy);
mp = conf.MergePolicy;
mp.NoCFSRatio = 1.0;
writer = new IndexWriter(dir, conf);
@@ -347,10 +330,9 @@ public virtual void TestExpirationTimeDeletionPolicy()
dir.Dispose();
}
- /*
- * Test a silly deletion policy that keeps all commits around.
- */
-
+ ///
+ /// Test a silly deletion policy that keeps all commits around.
+ ///
[Test]
public virtual void TestKeepAllDeletionPolicy()
{
@@ -365,7 +347,10 @@ public virtual void TestKeepAllDeletionPolicy()
Directory dir = NewDirectory();
- IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new KeepAllDeletionPolicy(this, dir)).SetMaxBufferedDocs(10).SetMergeScheduler(new SerialMergeScheduler());
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(new KeepAllDeletionPolicy(dir))
+ .SetMaxBufferedDocs(10)
+ .SetMergeScheduler(new SerialMergeScheduler());
MergePolicy mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
IndexWriter writer = new IndexWriter(dir, conf);
@@ -384,7 +369,9 @@ public virtual void TestKeepAllDeletionPolicy()
}
if (needsMerging)
{
- conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy);
+ conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetIndexDeletionPolicy(policy);
mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
if (Verbose)
@@ -444,16 +431,20 @@ public virtual void TestKeepAllDeletionPolicy()
}
}
- /* Uses KeepAllDeletionPolicy to keep all commits around,
- * then, opens a new IndexWriter on a previous commit
- * point. */
-
+ ///
+ /// Uses KeepAllDeletionPolicy to keep all commits around,
+ /// then, opens a new IndexWriter on a previous commit
+ /// point.
+ ///
[Test]
public virtual void TestOpenPriorSnapshot()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new KeepAllDeletionPolicy(this, dir)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(new KeepAllDeletionPolicy(dir))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy(10)));
KeepAllDeletionPolicy policy = (KeepAllDeletionPolicy)writer.Config.IndexDeletionPolicy;
for (int i = 0; i < 10; i++)
{
@@ -478,7 +469,8 @@ public virtual void TestOpenPriorSnapshot()
Assert.IsTrue(lastCommit != null);
// Now add 1 doc and merge
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(policy));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(policy));
AddDoc(writer);
Assert.AreEqual(11, writer.NumDocs);
writer.ForceMerge(1);
@@ -487,7 +479,9 @@ public virtual void TestOpenPriorSnapshot()
Assert.AreEqual(6, DirectoryReader.ListCommits(dir).Count);
// Now open writer on the commit just before merge:
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(policy).SetIndexCommit(lastCommit));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(policy)
+ .SetIndexCommit(lastCommit));
Assert.AreEqual(10, writer.NumDocs);
// Should undo our rollback:
@@ -499,7 +493,9 @@ public virtual void TestOpenPriorSnapshot()
Assert.AreEqual(11, r.NumDocs);
r.Dispose();
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(policy).SetIndexCommit(lastCommit));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(policy)
+ .SetIndexCommit(lastCommit));
Assert.AreEqual(10, writer.NumDocs);
// Commits the rollback:
writer.Dispose();
@@ -515,7 +511,8 @@ public virtual void TestOpenPriorSnapshot()
r.Dispose();
// Re-merge
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(policy));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(policy));
writer.ForceMerge(1);
writer.Dispose();
@@ -526,7 +523,8 @@ public virtual void TestOpenPriorSnapshot()
// Now open writer on the commit just before merging,
// but this time keeping only the last commit:
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexCommit(lastCommit));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexCommit(lastCommit));
Assert.AreEqual(10, writer.NumDocs);
// Reader still sees fully merged index, because writer
@@ -547,11 +545,11 @@ public virtual void TestOpenPriorSnapshot()
dir.Dispose();
}
- /* Test keeping NO commit points. this is a viable and
- * useful case eg where you want to build a big index and
- * you know there are no readers.
- */
-
+ ///
+ /// Test keeping NO commit points. This is a viable and
+ /// useful case eg where you want to build a big index and
+ /// you know there are no readers.
+ ///
[Test]
public virtual void TestKeepNoneOnInitDeletionPolicy()
{
@@ -561,7 +559,10 @@ public virtual void TestKeepNoneOnInitDeletionPolicy()
Directory dir = NewDirectory();
- IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy(this)).SetMaxBufferedDocs(10);
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy())
+ .SetMaxBufferedDocs(10);
MergePolicy mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
IndexWriter writer = new IndexWriter(dir, conf);
@@ -572,7 +573,9 @@ public virtual void TestKeepNoneOnInitDeletionPolicy()
}
writer.Dispose();
- conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy);
+ conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetIndexDeletionPolicy(policy);
mp = conf.MergePolicy;
mp.NoCFSRatio = 1.0;
writer = new IndexWriter(dir, conf);
@@ -594,10 +597,9 @@ public virtual void TestKeepNoneOnInitDeletionPolicy()
}
}
- /*
- * Test a deletion policy that keeps last N commits.
- */
-
+ ///
+ /// Test a deletion policy that keeps last N commits.
+ ///
[Test]
public virtual void TestKeepLastNDeletionPolicy()
{
@@ -609,10 +611,13 @@ public virtual void TestKeepLastNDeletionPolicy()
Directory dir = NewDirectory();
- KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
+ KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for (int j = 0; j < N + 1; j++)
{
- IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(policy).SetMaxBufferedDocs(10);
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetIndexDeletionPolicy(policy)
+ .SetMaxBufferedDocs(10);
MergePolicy mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
IndexWriter writer = new IndexWriter(dir, conf);
@@ -662,11 +667,10 @@ public virtual void TestKeepLastNDeletionPolicy()
}
}
- /*
- * Test a deletion policy that keeps last N commits
- * around, through creates.
- */
-
+ ///
+ /// Test a deletion policy that keeps last N commits
+ /// around, through creates.
+ ///
[Test]
public virtual void TestKeepLastNDeletionPolicyWithCreates()
{
@@ -677,7 +681,10 @@ public virtual void TestKeepLastNDeletionPolicyWithCreates()
bool useCompoundFile = (pass % 2) != 0;
Directory dir = NewDirectory();
- IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(new KeepLastNDeletionPolicy(this, N)).SetMaxBufferedDocs(10);
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetIndexDeletionPolicy(new KeepLastNDeletionPolicy(N))
+ .SetMaxBufferedDocs(10);
MergePolicy mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
IndexWriter writer = new IndexWriter(dir, conf);
@@ -688,7 +695,10 @@ public virtual void TestKeepLastNDeletionPolicyWithCreates()
for (int i = 0; i < N + 1; i++)
{
- conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy).SetMaxBufferedDocs(10);
+ conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetIndexDeletionPolicy(policy)
+ .SetMaxBufferedDocs(10);
mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
writer = new IndexWriter(dir, conf);
@@ -699,7 +709,9 @@ public virtual void TestKeepLastNDeletionPolicyWithCreates()
}
// this is a commit
writer.Dispose();
- conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetIndexDeletionPolicy(policy).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+ conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(policy)
+ .SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(dir, conf);
policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
writer.DeleteDocuments(new Term("id", "" + (i * (N + 1) + 3)));
@@ -711,7 +723,9 @@ public virtual void TestKeepLastNDeletionPolicyWithCreates()
Assert.AreEqual(16, hits.Length);
reader.Dispose();
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(policy));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetIndexDeletionPolicy(policy));
policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
// this will not commit: there are no changes
// pending because we opened for "create":
@@ -797,4 +811,4 @@ private void AddDoc(IndexWriter writer)
writer.AddDocument(doc);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
index 42ade8a9eb..b9eac4c5bf 100644
--- a/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
@@ -10,7 +10,6 @@
using JCG = J2N.Collections.Generic;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
-using Lucene.Net.Support.Threading;
namespace Lucene.Net.Index
{
@@ -165,7 +164,8 @@ public virtual void TestIsCurrent()
}
///
- /// Tests the IndexReader.getFieldNames implementation
+ /// Tests the IndexReader.getFieldNames implementation
+ ///
/// on error
[Test]
public virtual void TestGetFieldNames()
@@ -318,7 +318,8 @@ public virtual void TestTermVectors()
{
Directory d = NewDirectory();
// set up writer
- IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));
+ IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NewLogMergePolicy()));
// want to get some more segments here
// new termvector fields
int mergeFactor = ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor;
@@ -349,19 +350,7 @@ public virtual void TestTermVectors()
d.Dispose();
}
- internal virtual void AssertTermDocsCount(string msg, IndexReader reader, Term term, int expected)
- {
- DocsEnum tdocs = TestUtil.Docs(Random, reader, term.Field, new BytesRef(term.Text), MultiFields.GetLiveDocs(reader), null, 0);
- int count = 0;
- if (tdocs != null)
- {
- while (tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
- {
- count++;
- }
- }
- Assert.AreEqual(expected, count, msg + ", count mismatch");
- }
+ // LUCENENET specific - Removed AssertTermDocsCount() because it was not in use
[Test]
public virtual void TestBinaryFields()
@@ -448,7 +437,7 @@ public virtual void TestFilesOpenClose()
dir.Dispose();
// Try to erase the data - this ensures that the writer closed all files
- System.IO.Directory.Delete(dirFile.FullName, true);
+ TestUtil.Rm(dirFile);
dir = NewFSDirectory(dirFile);
// Now create the data set again, just as before
@@ -465,7 +454,7 @@ public virtual void TestFilesOpenClose()
// The following will fail if reader did not close
// all files
- System.IO.Directory.Delete(dirFile.FullName, true);
+ TestUtil.Rm(dirFile);
}
[Test]
@@ -499,12 +488,7 @@ public virtual void TestOpenReaderAfterDelete()
dir.Dispose();
}
- ///
- /// LUCENENET specific
- /// Is non-static because NewStringField, NewTextField, NewField methods
- /// are no longer static.
- ///
- internal void AddDocumentWithFields(IndexWriter writer)
+ internal static void AddDocumentWithFields(IndexWriter writer)
{
Document doc = new Document();
@@ -517,12 +501,7 @@ internal void AddDocumentWithFields(IndexWriter writer)
writer.AddDocument(doc);
}
- ///
- /// LUCENENET specific
- /// Is non-static because NewStringField, NewTextField, NewField methods
- /// are no longer static.
- ///
- internal void AddDocumentWithDifferentFields(IndexWriter writer)
+ internal static void AddDocumentWithDifferentFields(IndexWriter writer)
{
Document doc = new Document();
@@ -535,12 +514,7 @@ internal void AddDocumentWithDifferentFields(IndexWriter writer)
writer.AddDocument(doc);
}
- ///
- /// LUCENENET specific
- /// Is non-static because NewTextField, NewField methods are no longer
- /// static.
- ///
- internal void AddDocumentWithTermVectorFields(IndexWriter writer)
+ internal static void AddDocumentWithTermVectorFields(IndexWriter writer)
{
Document doc = new Document();
FieldType customType5 = new FieldType(TextField.TYPE_STORED);
@@ -564,11 +538,7 @@ internal void AddDocumentWithTermVectorFields(IndexWriter writer)
writer.AddDocument(doc);
}
- ///
- /// LUCENENET specific
- /// Is non-static because NewTextField is no longer static.
- ///
- internal void AddDoc(IndexWriter writer, string value)
+ internal static void AddDoc(IndexWriter writer, string value)
{
Document doc = new Document();
doc.Add(NewTextField("content", value, Field.Store.NO));
@@ -696,7 +666,9 @@ public virtual void TestGetIndexCommit()
Directory d = NewDirectory();
// set up writer
- IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy(10)));
for (int i = 0; i < 27; i++)
{
AddDocumentWithFields(writer);
@@ -713,7 +685,10 @@ public virtual void TestGetIndexCommit()
Assert.IsTrue(c.Equals(r.IndexCommit));
// Change the index
- writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+ writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND)
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy(10)));
for (int i = 0; i < 7; i++)
{
AddDocumentWithFields(writer);
@@ -726,7 +701,8 @@ public virtual void TestGetIndexCommit()
Assert.IsFalse(r2.IndexCommit.SegmentCount == 1);
r2.Dispose();
- writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND));
+ writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.APPEND));
writer.ForceMerge(1);
writer.Dispose();
@@ -740,7 +716,7 @@ public virtual void TestGetIndexCommit()
d.Dispose();
}
- internal Document CreateDocument(string id)
+ internal static Document CreateDocument(string id)
{
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -758,7 +734,7 @@ internal Document CreateDocument(string id)
public virtual void TestNoDir()
{
DirectoryInfo tempDir = CreateTempDir("doesnotexist");
- System.IO.Directory.Delete(tempDir.FullName, true);
+ TestUtil.Rm(tempDir);
Directory dir = NewFSDirectory(tempDir);
try
{
@@ -778,7 +754,8 @@ public virtual void TestNoDupCommitFileNames()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2));
writer.AddDocument(CreateDocument("a"));
writer.AddDocument(CreateDocument("a"));
writer.AddDocument(CreateDocument("a"));
@@ -806,7 +783,8 @@ public virtual void TestNoDupCommitFileNames()
public virtual void TestFieldCacheReuseAfterReopen()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NewLogMergePolicy(10)));
Document doc = new Document();
doc.Add(NewStringField("number", "17", Field.Store.NO));
writer.AddDocument(doc);
@@ -895,7 +873,9 @@ public virtual void TestNoTermsIndex()
}
Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
- writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy(10)));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()))
+ .SetMergePolicy(NewLogMergePolicy(10)));
writer.AddDocument(doc);
writer.Dispose();
@@ -950,7 +930,8 @@ public virtual void TestPrepareCommitIsCurrent()
public virtual void TestListCommits()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null).SetIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null)
+ .SetIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
writer.AddDocument(new Document());
writer.Commit();
@@ -1089,7 +1070,7 @@ public virtual void TestReaderFinishedListener()
writer.Commit();
DirectoryReader reader = writer.GetReader();
int[] closeCount = new int[1];
- IReaderDisposedListener listener = new ReaderClosedListenerAnonymousClass(this, reader, closeCount);
+ IReaderDisposedListener listener = new ReaderClosedListenerAnonymousClass(closeCount);
reader.AddReaderDisposedListener(listener);
@@ -1110,15 +1091,10 @@ public virtual void TestReaderFinishedListener()
private sealed class ReaderClosedListenerAnonymousClass : IReaderDisposedListener
{
- private readonly TestDirectoryReader outerInstance;
-
- private readonly DirectoryReader reader;
private readonly int[] closeCount;
- public ReaderClosedListenerAnonymousClass(TestDirectoryReader outerInstance, DirectoryReader reader, int[] closeCount)
+ public ReaderClosedListenerAnonymousClass(int[] closeCount)
{
- this.outerInstance = outerInstance;
- this.reader = reader;
this.closeCount = closeCount;
}
@@ -1249,7 +1225,6 @@ public virtual void TestLoadCertainFields()
dir.Dispose();
}
- /// @deprecated just to ensure IndexReader static methods work
[Obsolete("just to ensure IndexReader static methods work")]
[Test]
public virtual void TestBackwards()
@@ -1321,4 +1296,4 @@ public virtual void TestIndexExistsOnNonExistentDirectory()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
index fed689faaa..cecfbd61a6 100644
--- a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
+++ b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
@@ -6,7 +6,6 @@
using System;
using System.Collections.Generic;
using System.Text;
-using System.Threading;
using JCG = J2N.Collections.Generic;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -52,25 +51,22 @@ public virtual void TestReopen_Mem()
Directory dir1 = NewDirectory();
CreateIndex(Random, dir1, false);
- PerformDefaultTests(new TestReopenAnonymousClass(this, dir1));
+ PerformDefaultTests(new TestReopenAnonymousClass(dir1));
dir1.Dispose();
Directory dir2 = NewDirectory();
CreateIndex(Random, dir2, true);
- PerformDefaultTests(new TestReopenAnonymousClass2(this, dir2));
+ PerformDefaultTests(new TestReopenAnonymousClass2(dir2));
dir2.Dispose();
}
private sealed class TestReopenAnonymousClass : TestReopen
{
- private readonly TestDirectoryReaderReopen outerInstance;
-
private Directory dir1;
- public TestReopenAnonymousClass(TestDirectoryReaderReopen outerInstance, Directory dir1)
+ public TestReopenAnonymousClass(Directory dir1)
{
- this.outerInstance = outerInstance;
this.dir1 = dir1;
}
@@ -87,13 +83,10 @@ protected internal override DirectoryReader OpenReader()
private sealed class TestReopenAnonymousClass2 : TestReopen
{
- private readonly TestDirectoryReaderReopen outerInstance;
-
private readonly Directory dir2;
- public TestReopenAnonymousClass2(TestDirectoryReaderReopen outerInstance, Directory dir2)
+ public TestReopenAnonymousClass2(Directory dir2)
{
- this.outerInstance = outerInstance;
this.dir2 = dir2;
}
@@ -129,14 +122,18 @@ public virtual void TestCommitRecreate()
dir.Dispose();
}
- private void DoTestReopenWithCommit(Random random, Directory dir, bool withReopen)
+ // LUCENENET specific - made static
+ private static void DoTestReopenWithCommit(Random random, Directory dir, bool withReopen)
{
- IndexWriter iwriter = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.CREATE).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy()));
+ IndexWriter iwriter = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMergeScheduler(new SerialMergeScheduler())
+ .SetMergePolicy(NewLogMergePolicy()));
iwriter.Commit();
DirectoryReader reader = DirectoryReader.Open(dir);
try
{
- int M = 3;
+ const int M = 3;
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.IsTokenized = false;
FieldType customType2 = new FieldType(TextField.TYPE_STORED);
@@ -189,7 +186,8 @@ private void DoTestReopenWithCommit(Random random, Directory dir, bool withReope
}
}
- private void PerformDefaultTests(TestReopen test)
+ // LUCENENET specific - made static
+ private static void PerformDefaultTests(TestReopen test)
{
DirectoryReader index1 = test.OpenReader();
DirectoryReader index2 = test.OpenReader();
@@ -249,7 +247,7 @@ public virtual void TestThreadSafety()
writer.ForceMerge(1);
writer.Dispose();
- TestReopen test = new TestReopenAnonymousClass3(this, dir, n);
+ TestReopen test = new TestReopenAnonymousClass3(dir, n);
IList readers = new SynchronizedList();
DirectoryReader firstReader = DirectoryReader.Open(dir);
@@ -277,11 +275,11 @@ public virtual void TestThreadSafety()
if (i < 4 || (i >= 10 && i < 14) || i > 18)
{
- task = new ReaderThreadTaskAnonymousClass(this, test, readers, readersToClose, r, index);
+ task = new ReaderThreadTaskAnonymousClass(test, readers, readersToClose, r, index);
}
else
{
- task = new ReaderThreadTaskAnonymousClass2(this, readers);
+ task = new ReaderThreadTaskAnonymousClass2(readers);
}
threads[i] = new ReaderThread(task);
@@ -340,14 +338,11 @@ public virtual void TestThreadSafety()
private sealed class TestReopenAnonymousClass3 : TestReopen
{
- private readonly TestDirectoryReaderReopen outerInstance;
-
private readonly Directory dir;
private readonly int n;
- public TestReopenAnonymousClass3(TestDirectoryReaderReopen outerInstance, Directory dir, int n)
+ public TestReopenAnonymousClass3(Directory dir, int n)
{
- this.outerInstance = outerInstance;
this.dir = dir;
this.n = n;
}
@@ -367,17 +362,14 @@ protected internal override DirectoryReader OpenReader()
private sealed class ReaderThreadTaskAnonymousClass : ReaderThreadTask
{
- private readonly TestDirectoryReaderReopen outerInstance;
-
private readonly TestReopen test;
private readonly IList readers;
private readonly ISet readersToClose;
private readonly DirectoryReader r;
private readonly int index;
- public ReaderThreadTaskAnonymousClass(TestDirectoryReaderReopen outerInstance, Lucene.Net.Index.TestDirectoryReaderReopen.TestReopen test, IList readers, ISet readersToClose, DirectoryReader r, int index)
+ public ReaderThreadTaskAnonymousClass(TestReopen test, IList readers, ISet readersToClose, DirectoryReader r, int index)
{
- this.outerInstance = outerInstance;
this.test = test;
this.readers = readers;
this.readersToClose = readersToClose;
@@ -393,7 +385,7 @@ public override void Run()
if (index % 2 == 0)
{
// refresh reader synchronized
- ReaderCouple c = (outerInstance.RefreshReader(r, test, index, true));
+ ReaderCouple c = RefreshReader(r, test, index, true);
readersToClose.Add(c.newReader);
readersToClose.Add(c.refreshedReader);
readers.Add(c);
@@ -435,13 +427,10 @@ public override void Run()
private sealed class ReaderThreadTaskAnonymousClass2 : ReaderThreadTask
{
- private readonly TestDirectoryReaderReopen outerInstance;
-
private readonly IList readers;
- public ReaderThreadTaskAnonymousClass2(TestDirectoryReaderReopen outerInstance, IList readers)
+ public ReaderThreadTaskAnonymousClass2(IList readers)
{
- this.outerInstance = outerInstance;
this.readers = readers;
}
@@ -523,14 +512,17 @@ public override void Run()
}
}
- private object createReaderMutex = new object();
+ // LUCENENET specific - made static readonly
+ private static readonly object createReaderMutex = new object();
- private ReaderCouple RefreshReader(DirectoryReader reader, bool hasChanges)
+ // LUCENENET specific - made static
+ private static ReaderCouple RefreshReader(DirectoryReader reader, bool hasChanges)
{
return RefreshReader(reader, null, -1, hasChanges);
}
- internal virtual ReaderCouple RefreshReader(DirectoryReader reader, TestReopen test, int modify, bool hasChanges)
+ // LUCENENET specific - made static
+ internal static ReaderCouple RefreshReader(DirectoryReader reader, TestReopen test, int modify, bool hasChanges)
{
UninterruptableMonitor.Enter(createReaderMutex);
try
@@ -583,14 +575,11 @@ internal virtual ReaderCouple RefreshReader(DirectoryReader reader, TestReopen t
}
}
- ///
- /// LUCENENET specific
- /// Is non-static because NewIndexWriterConfig is no longer static.
- ///
- public void CreateIndex(Random random, Directory dir, bool multiSegment)
+ public static void CreateIndex(Random random, Directory dir, bool multiSegment)
{
IndexWriter.Unlock(dir);
- IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(new LogDocMergePolicy()));
+ IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random))
+ .SetMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++)
{
@@ -724,7 +713,10 @@ public override void OnCommit(IList commits)
public virtual void TestReopenOnCommit()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new KeepAllCommits()).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(new KeepAllCommits())
+ .SetMaxBufferedDocs(-1)
+ .SetMergePolicy(NewLogMergePolicy(10)));
for (int i = 0; i < 4; i++)
{
Document doc = new Document();
@@ -807,4 +799,4 @@ public virtual void TestOpenIfChangedNRTToCommit()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDoc.cs b/src/Lucene.Net.Tests/Index/TestDoc.cs
index a60372ad10..7d2b8256d0 100644
--- a/src/Lucene.Net.Tests/Index/TestDoc.cs
+++ b/src/Lucene.Net.Tests/Index/TestDoc.cs
@@ -106,7 +106,7 @@ private FileInfo CreateOutput(string name, string text)
}
///
- /// this test executes a number of merges and compares the contents of
+ /// This test executes a number of merges and compares the contents of
/// the segments created when using compound file or not using one.
///
/// TODO: the original test used to print the segment contents to System.out
@@ -130,7 +130,10 @@ public virtual void TestIndexAndMerge()
wrapper.AssertNoUnreferencedFilesOnDispose = false;
}
- IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(-1)
+ .SetMergePolicy(NewLogMergePolicy(10)));
SegmentCommitInfo si1 = IndexDoc(writer, "test.txt");
PrintSegment(@out, si1);
@@ -168,7 +171,10 @@ public virtual void TestIndexAndMerge()
wrapper.AssertNoUnreferencedFilesOnDispose = false;
}
- writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(10)));
+ writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(-1)
+ .SetMergePolicy(NewLogMergePolicy(10)));
si1 = IndexDoc(writer, "test.txt");
PrintSegment(@out, si1);
@@ -216,19 +222,23 @@ private SegmentCommitInfo Merge(Directory dir, SegmentCommitInfo si1, SegmentCom
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.Info.Dir);
SegmentInfo si = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null);
- SegmentMerger merger = new SegmentMerger(new JCG.List { r1, r2 }, si, (InfoStream)InfoStream.Default, trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, true);
+ SegmentMerger merger = new SegmentMerger(new JCG.List { r1, r2 },
+ si, InfoStream.Default, trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL,
+ CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, true);
MergeState mergeState = merger.Merge();
r1.Dispose();
r2.Dispose();
- SegmentInfo info = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, si1.Info.DocCount + si2.Info.DocCount, false, codec, null);
+ SegmentInfo info = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged,
+ si1.Info.DocCount + si2.Info.DocCount,
+ false, codec, null);
info.SetFiles(new JCG.HashSet(trackingDir.CreatedFiles));
if (useCompoundFile)
{
- ICollection filesToDelete = IndexWriter.CreateCompoundFile((InfoStream)InfoStream.Default, dir, CheckAbort.NONE, info, NewIOContext(Random));
+ ICollection filesToDelete = IndexWriter.CreateCompoundFile(InfoStream.Default, dir, CheckAbort.NONE, info, NewIOContext(Random));
info.UseCompoundFile = true;
- foreach (String fileToDelete in filesToDelete)
+ foreach (string fileToDelete in filesToDelete)
{
si1.Info.Dir.DeleteFile(fileToDelete);
}
@@ -276,4 +286,4 @@ private void PrintSegment(StreamWriter @out, SegmentCommitInfo si)
reader.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
index 9c43579206..bca4cfb3a9 100644
--- a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
@@ -60,7 +60,7 @@ protected internal override TokenStreamComponents CreateComponents(string fieldN
Tokenizer tokenizer = new MockTokenizer(input);
if (fieldName.Equals("distinctiveFieldName", StringComparison.Ordinal))
{
- TokenFilter tosser = new TokenFilterAnonymousClass(this, tokenizer);
+ TokenFilter tosser = new TokenFilterAnonymousClass(tokenizer);
return new TokenStreamComponents(tokenizer, tosser);
}
else
@@ -71,12 +71,9 @@ protected internal override TokenStreamComponents CreateComponents(string fieldN
private sealed class TokenFilterAnonymousClass : TokenFilter
{
- private readonly ThrowingAnalyzer outerInstance;
-
- public TokenFilterAnonymousClass(ThrowingAnalyzer outerInstance, Tokenizer tokenizer)
+ public TokenFilterAnonymousClass(Tokenizer tokenizer)
: base(tokenizer)
{
- this.outerInstance = outerInstance;
}
public sealed override bool IncrementToken()
@@ -132,9 +129,7 @@ public virtual void TestNoExtraNoise()
{
writer.AddDocument(doc);
}
-#pragma warning disable 168
- catch (BadNews badNews)
-#pragma warning restore 168
+ catch (BadNews /*badNews*/)
{
Assert.Fail("Unwanted exception");
}
@@ -146,4 +141,4 @@ public virtual void TestNoExtraNoise()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
index 1ca744caed..05d8a4d9f7 100644
--- a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
@@ -5,7 +5,6 @@
using NUnit.Framework;
using System;
using System.Collections.Generic;
-using System.Linq;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
using JCG = J2N.Collections.Generic;
@@ -540,4 +539,4 @@ public virtual void TestSortedTermsEnum()
directory.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
index 0462d5568f..894fee5eec 100644
--- a/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
@@ -534,7 +534,7 @@ public virtual void TestMixedTypesDifferentThreads()
Document doc = new Document();
doc.Add(field);
- threads[i] = new ThreadAnonymousClass(this, w, startingGun, hitExc, doc);
+ threads[i] = new ThreadAnonymousClass(w, startingGun, hitExc, doc);
threads[i].Start();
}
@@ -551,16 +551,13 @@ public virtual void TestMixedTypesDifferentThreads()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestDocValuesIndexing outerInstance;
-
private readonly IndexWriter w;
private readonly CountdownEvent startingGun;
private readonly AtomicBoolean hitExc;
private readonly Document doc;
- public ThreadAnonymousClass(TestDocValuesIndexing outerInstance, IndexWriter w, CountdownEvent startingGun, AtomicBoolean hitExc, Document doc)
+ public ThreadAnonymousClass(IndexWriter w, CountdownEvent startingGun, AtomicBoolean hitExc, Document doc)
{
- this.outerInstance = outerInstance;
this.w = w;
this.startingGun = startingGun;
this.hitExc = hitExc;
@@ -577,7 +574,7 @@ public override void Run()
catch (Exception iae) when (iae.IsIllegalArgumentException())
{
// expected
- hitExc.Value = (true);
+ hitExc.Value = true;
}
catch (Exception e) when (e.IsException())
{
@@ -939,4 +936,4 @@ public virtual void TestSameFieldNameForPostingAndDocValue()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
index eec9df0c5a..3bb8736e83 100644
--- a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
@@ -5,7 +5,6 @@
using NUnit.Framework;
using System;
using System.Collections.Generic;
-using System.IO;
using System.Threading;
using JCG = J2N.Collections.Generic;
using Assert = Lucene.Net.TestFramework.Assert;
@@ -83,7 +82,7 @@ public virtual void Test()
for (int t = 0; t < numThreads; t++)
{
Random threadRandom = new J2N.Randomizer(Random.NextInt64());
- ThreadJob thread = new ThreadAnonymousClass(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
+ ThreadJob thread = new ThreadAnonymousClass(numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
thread.Start();
threads.Add(thread);
}
@@ -101,8 +100,6 @@ public virtual void Test()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestDocValuesWithThreads outerInstance;
-
private readonly IList numbers;
private readonly IList binary;
private readonly IList sorted;
@@ -111,9 +108,8 @@ private sealed class ThreadAnonymousClass : ThreadJob
private readonly CountdownEvent startingGun;
private readonly Random threadRandom;
- public ThreadAnonymousClass(TestDocValuesWithThreads outerInstance, IList numbers, IList binary, IList sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random threadRandom)
+ public ThreadAnonymousClass(IList numbers, IList binary, IList sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random threadRandom)
{
- this.outerInstance = outerInstance;
this.numbers = numbers;
this.binary = binary;
this.sorted = sorted;
@@ -248,7 +244,7 @@ public virtual void Test2()
long END_TIME = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) + (TestNightly ? 30 : 1); // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
- int NUM_THREADS = TestUtil.NextInt32(LuceneTestCase.Random, 1, 10);
+ int NUM_THREADS = TestUtil.NextInt32(Random, 1, 10);
ThreadJob[] threads = new ThreadJob[NUM_THREADS];
for (int thread = 0; thread < NUM_THREADS; thread++)
{
@@ -310,4 +306,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
index 2434426596..d66ab5d767 100644
--- a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
@@ -51,7 +51,7 @@ public override void SetUp()
}
///
- /// Simple testcase for
+ /// Simple testcase for
///
[Test]
public virtual void TestPositionsSimple()
@@ -219,7 +219,7 @@ public virtual void TestRandomDocs()
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));
int numDocs = AtLeast(49);
- int max = 15678;
+ const int max = 15678;
int term = Random.Next(max);
int[] freqInDoc = new int[numDocs];
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
@@ -314,7 +314,7 @@ public virtual void TestLargeNumberOfPositions()
{
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
- int howMany = 1000;
+ const int howMany = 1000;
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.OmitNorms = true;
for (int i = 0; i < 39; i++)
@@ -428,4 +428,4 @@ public virtual void TestDocsAndPositionsEnumStart()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
index 1c3d469469..cc7a4a1b77 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
@@ -156,7 +156,7 @@ public override int GetPositionIncrementGap(string fieldName)
[Test]
public virtual void TestTokenReuse()
{
- Analyzer analyzer = new AnalyzerAnonymousClass2(this);
+ Analyzer analyzer = new AnalyzerAnonymousClass2();
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
@@ -184,34 +184,24 @@ public virtual void TestTokenReuse()
private sealed class AnalyzerAnonymousClass2 : Analyzer
{
- private readonly TestDocumentWriter outerInstance;
-
- public AnalyzerAnonymousClass2(TestDocumentWriter outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
{
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
- return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousClass(this, tokenizer));
+ return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousClass(tokenizer));
}
private sealed class TokenFilterAnonymousClass : TokenFilter
{
- private readonly AnalyzerAnonymousClass2 outerInstance;
-
- public TokenFilterAnonymousClass(AnalyzerAnonymousClass2 outerInstance, Tokenizer tokenizer)
+ public TokenFilterAnonymousClass(Tokenizer tokenizer)
: base(tokenizer)
{
- this.outerInstance = outerInstance;
- first = true;
+ // LUCENENET specific: AddAttribute must be called from the ctor
termAtt = AddAttribute();
payloadAtt = AddAttribute();
posIncrAtt = AddAttribute();
}
- internal bool first;
+ internal bool first = true;
internal AttributeSource.State state;
public sealed override bool IncrementToken()
@@ -266,7 +256,7 @@ public virtual void TestPreAnalyzedField()
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
Document doc = new Document();
- doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousClass(this)));
+ doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousClass()));
writer.AddDocument(doc);
writer.Commit();
@@ -294,18 +284,14 @@ public virtual void TestPreAnalyzedField()
private sealed class TokenStreamAnonymousClass : TokenStream
{
- private readonly TestDocumentWriter outerInstance;
-
- public TokenStreamAnonymousClass(TestDocumentWriter outerInstance)
+ public TokenStreamAnonymousClass()
{
- this.outerInstance = outerInstance;
- tokens = new string[] { "term1", "term2", "term3", "term2" };
- index = 0;
+ // LUCENENET specific: AddAttribute must be called from the ctor
termAtt = AddAttribute();
}
- private string[] tokens;
- private int index;
+ private string[] tokens = new string[] { "term1", "term2", "term3", "term2" };
+ private int index /*= 0*/;
private ICharTermAttribute termAtt;
@@ -402,4 +388,4 @@ public virtual void TestLUCENE_1590()
reader.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
index 1655afec7b..382cf17831 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Index
using TermQuery = Lucene.Net.Search.TermQuery;
///
- /// Unit test for
+ /// Unit test for
///
[TestFixture]
public class TestDocumentsWriterDeleteQueue : LuceneTestCase
@@ -213,11 +213,11 @@ public virtual void TestAnyChanges()
public virtual void TestPartiallyAppliedGlobalSlice()
{
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
- System.Reflection.FieldInfo field = typeof(DocumentsWriterDeleteQueue).GetField("globalBufferLock",
+ System.Reflection.FieldInfo field = typeof(DocumentsWriterDeleteQueue).GetField("globalBufferLock",
BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Instance);
ReentrantLock @lock = (ReentrantLock)field.GetValue(queue);
@lock.Lock();
- var t = new ThreadAnonymousClass(this, queue);
+ var t = new ThreadAnonymousClass(queue);
t.Start();
t.Join();
@lock.Unlock();
@@ -232,13 +232,10 @@ public virtual void TestPartiallyAppliedGlobalSlice()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestDocumentsWriterDeleteQueue outerInstance;
-
private DocumentsWriterDeleteQueue queue;
- public ThreadAnonymousClass(TestDocumentsWriterDeleteQueue outerInstance, DocumentsWriterDeleteQueue queue)
+ public ThreadAnonymousClass(DocumentsWriterDeleteQueue queue)
{
- this.outerInstance = outerInstance;
this.queue = queue;
}
@@ -338,4 +335,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
index c773def69c..e59ac79119 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Index
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
///
- /// Tests for
+ /// Tests for
///
[TestFixture]
[Timeout(900_000)] // 15 minutes
@@ -149,7 +149,7 @@ public virtual void TestAccquireReleaseRace()
Start(threads);
int iters = AtLeast(10000);
//float checkPointProbability = TestNightly ? 0.5f : 0.1f;
- // LUCENENET specific - reduced probabliltiy on x86 to prevent it from timing out.
+ // LUCENENET specific - reduced probability on x86 to prevent it from timing out.
float checkPointProbability = TestNightly ? (Lucene.Net.Util.Constants.RUNTIME_IS_64BIT ? 0.5f : 0.25f) : 0.1f;
for (int i = 0; i < iters; i++)
{
@@ -473,4 +473,4 @@ public bool Await()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs b/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs
index 2a7c42b8e3..5e3af6de64 100644
--- a/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs
+++ b/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs
@@ -3,9 +3,11 @@
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
using NUnit.Framework;
-using RandomizedTesting.Generators;
using System;
using System.Text.RegularExpressions;
+#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE
+using RandomizedTesting.Generators;
+#endif
namespace Lucene.Net.Index
{
@@ -189,7 +191,7 @@ public void TestEquals_GH_545()
Directory leftDir = new Store.RAMDirectory();
Directory rightDir = new Store.RAMDirectory();
- int maxTermLength = 21678;
+ const int maxTermLength = 21678;
Analysis.Analyzer leftAnalyzer = new Analysis.Standard.StandardAnalyzer(TEST_VERSION_CURRENT) { MaxTokenLength = maxTermLength };
Analysis.Analyzer rightAnalyzer = new Analysis.Standard.StandardAnalyzer(TEST_VERSION_CURRENT) { MaxTokenLength = maxTermLength };
@@ -251,8 +253,8 @@ public void TestEquals_GH_545()
// From AssertTermsEquals
//string re = "??(*)+*.\U000e06d7*"; // Before escaping
- string re = "??(\ue808*)+*.\udb41\uded7*"; // Faulty Regex
- Util.Automaton.CompiledAutomaton automaton = new Util.Automaton.CompiledAutomaton((new Util.Automaton.RegExp(re, Util.Automaton.RegExpSyntax.NONE)).ToAutomaton());
+ const string re = "??(\ue808*)+*.\udb41\uded7*"; // Faulty Regex
+ Util.Automaton.CompiledAutomaton automaton = new Util.Automaton.CompiledAutomaton(new Util.Automaton.RegExp(re, Util.Automaton.RegExpSyntax.NONE).ToAutomaton());
if (automaton.Type == Util.Automaton.CompiledAutomaton.AUTOMATON_TYPE.NORMAL)
{
// From AssertTermsEnumEquals
@@ -276,4 +278,4 @@ public void TestEquals_GH_545()
Util.IOUtils.Dispose(leftDir, rightDir);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs b/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs
index 87e5846c2a..23dc166a42 100644
--- a/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs
+++ b/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Index
/// Tests that a useful exception is thrown when attempting to index a term that is
/// too large
///
- ///
+ ///
[TestFixture]
public class TestExceedMaxTermLength : LuceneTestCase
{
@@ -106,4 +106,4 @@ public virtual void Test()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestFieldInfos.cs b/src/Lucene.Net.Tests/Index/TestFieldInfos.cs
index ea9fb2db09..0a45fd82b9 100644
--- a/src/Lucene.Net.Tests/Index/TestFieldInfos.cs
+++ b/src/Lucene.Net.Tests/Index/TestFieldInfos.cs
@@ -73,7 +73,7 @@ public virtual FieldInfos ReadFieldInfos(Directory dir, string filename)
[Test]
public virtual void Test()
{
- string name = "testFile";
+ const string name = "testFile";
Directory dir = NewDirectory();
FieldInfos fieldInfos = CreateAndWriteFieldInfos(dir, name);
@@ -104,7 +104,7 @@ public virtual void Test()
[Test]
public virtual void TestReadOnly()
{
- string name = "testFile";
+ const string name = "testFile";
Directory dir = NewDirectory();
FieldInfos fieldInfos = CreateAndWriteFieldInfos(dir, name);
FieldInfos readOnly = ReadFieldInfos(dir, name);
@@ -122,4 +122,4 @@ private void AssertReadOnly(FieldInfos readOnly, FieldInfos modifiable)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestFieldsReader.cs b/src/Lucene.Net.Tests/Index/TestFieldsReader.cs
index 0bca5172b5..23c6030f1a 100644
--- a/src/Lucene.Net.Tests/Index/TestFieldsReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestFieldsReader.cs
@@ -44,10 +44,6 @@ public class TestFieldsReader : LuceneTestCase
private static Document testDoc;
private static FieldInfos.Builder fieldInfos = null;
- ///
- /// LUCENENET specific
- /// Is non-static because NewIndexWriterConfig is no longer static.
- ///
[OneTimeSetUp]
public override void BeforeClass()
{
@@ -283,4 +279,4 @@ public virtual void TestExceptions()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestFlex.cs b/src/Lucene.Net.Tests/Index/TestFlex.cs
index 0e0d839041..e1a63912e0 100644
--- a/src/Lucene.Net.Tests/Index/TestFlex.cs
+++ b/src/Lucene.Net.Tests/Index/TestFlex.cs
@@ -39,13 +39,16 @@ public virtual void TestNonFlex()
const int DOC_COUNT = 177;
- IndexWriter w = new IndexWriter(d, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(7).SetMergePolicy(NewLogMergePolicy()));
+ IndexWriter w = new IndexWriter(d,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(7)
+ .SetMergePolicy(NewLogMergePolicy()));
for (int iter = 0; iter < 2; iter++)
{
if (iter == 0)
{
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
doc.Add(NewTextField("field1", "this is field1", Field.Store.NO));
doc.Add(NewTextField("field2", "this is field2", Field.Store.NO));
doc.Add(NewTextField("field3", "aaa", Field.Store.NO));
@@ -75,8 +78,9 @@ public virtual void TestNonFlex()
public virtual void TestTermOrd()
{
Directory d = NewDirectory();
- IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
- Documents.Document doc = new Documents.Document();
+ IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
+ Document doc = new Document();
doc.Add(NewTextField("f", "a b c", Field.Store.NO));
w.AddDocument(doc);
w.ForceMerge(1);
@@ -96,4 +100,4 @@ public virtual void TestTermOrd()
d.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
index 2e39dde683..ad49c21b04 100644
--- a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
@@ -43,9 +43,8 @@ namespace Lucene.Net.Index
// LUCENENET specific - Specify to unzip the line file docs
[UseTempLineDocsFile]
[Timeout(900_000)] // 15 minutes
- public class TestFlushByRamOrCountsPolicy : LuceneTestCase
+ public class TestFlushByRamOrCountsPolicy : LuceneTestCase
{
-
private static LineFileDocs lineDocFile;
[OneTimeSetUp]
@@ -95,7 +94,8 @@ protected internal virtual void RunFlushByRam(int numThreads, double maxRamMB, b
MockAnalyzer analyzer = new MockAnalyzer(Random);
analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH);
- IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetFlushPolicy(flushPolicy);
+ IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
+ .SetFlushPolicy(flushPolicy);
int numDWPT = 1 + AtLeast(2);
DocumentsWriterPerThreadPool threadPool = new DocumentsWriterPerThreadPool(numDWPT);
iwc.SetIndexerThreadPool(threadPool);
@@ -156,7 +156,8 @@ public virtual void TestFlushDocCount()
AtomicInt32 numDocs = new AtomicInt32(numDocumentsToIndex);
Directory dir = NewDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
- IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetFlushPolicy(flushPolicy);
+ IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetFlushPolicy(flushPolicy);
int numDWPT = 1 + AtLeast(2);
DocumentsWriterPerThreadPool threadPool = new DocumentsWriterPerThreadPool(numDWPT);
@@ -500,4 +501,4 @@ internal static void FindPending(DocumentsWriterFlushControl flushControl, IList
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs b/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs
index 92498313da..7315fb482a 100644
--- a/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs
+++ b/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs
@@ -49,7 +49,9 @@ public virtual void Test()
MockDirectoryWrapper dir = NewMockDirectory();
TieredMergePolicy tmp = new TieredMergePolicy();
tmp.MaxMergeAtOnce = 2;
- RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(tmp));
+ RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(tmp));
const int numDocs = 20;
for (int docs = 0; docs < numDocs; docs++)
{
@@ -84,4 +86,4 @@ public virtual void Test()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs b/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
index 528abe14bf..0d6ff1ced8 100644
--- a/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
+++ b/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
@@ -38,7 +38,7 @@ public class TestForceMergeForever : LuceneTestCase
private class MyIndexWriter : IndexWriter
{
internal AtomicInt32 mergeCount = new AtomicInt32();
- internal bool first;
+ private bool first;
public MyIndexWriter(Directory dir, IndexWriterConfig conf)
: base(dir, conf)
@@ -97,7 +97,7 @@ public virtual void Test()
AtomicBoolean doStop = new AtomicBoolean();
w.Config.SetMaxBufferedDocs(2);
- ThreadJob t = new ThreadAnonymousClass(this, w, numStartDocs, docs, doStop);
+ ThreadJob t = new ThreadAnonymousClass(w, numStartDocs, docs, doStop);
t.Start();
w.ForceMerge(1);
doStop.Value = true;
@@ -110,16 +110,13 @@ public virtual void Test()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestForceMergeForever outerInstance;
-
private readonly MyIndexWriter w;
private readonly int numStartDocs;
private readonly LineFileDocs docs;
private readonly AtomicBoolean doStop;
- public ThreadAnonymousClass(TestForceMergeForever outerInstance, Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop)
+ public ThreadAnonymousClass(MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop)
{
- this.outerInstance = outerInstance;
this.w = w;
this.numStartDocs = numStartDocs;
this.docs = docs;
@@ -144,4 +141,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
index a482874c78..ef6c5ebadb 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
@@ -2,7 +2,6 @@
using J2N.Threading;
using Lucene.Net.Analysis;
using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Attributes;
using Lucene.Net.Codecs;
using Lucene.Net.Codecs.SimpleText;
using Lucene.Net.Diagnostics;
@@ -10,7 +9,6 @@
using Lucene.Net.Index.Extensions;
using Lucene.Net.Search;
using Lucene.Net.Support;
-using Lucene.Net.Support.Threading;
using Lucene.Net.Util;
using NUnit.Framework;
using RandomizedTesting.Generators;
@@ -184,7 +182,7 @@ private static void AddDocWithIndex(IndexWriter writer, int index)
public static void AssertNoUnreferencedFiles(Directory dir, string message)
{
string[] startFiles = dir.ListAll();
- (new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)))).Rollback();
+ new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).Rollback();
string[] endFiles = dir.ListAll();
Array.Sort(startFiles, StringComparer.Ordinal);
@@ -296,7 +294,7 @@ public virtual void TestIndexNoDocuments()
public virtual void TestManyFields()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10));
for (int j = 0; j < 100; j++)
{
Document doc = new Document();
@@ -329,9 +327,10 @@ public virtual void TestManyFields()
[Test]
public virtual void TestSmallRAMBuffer()
{
-
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.000001).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetRAMBufferSizeMB(0.000001)
+ .SetMergePolicy(NewLogMergePolicy(10)));
int lastNumFile = dir.ListAll().Length;
for (int j = 0; j < 9; j++)
{
@@ -371,8 +370,8 @@ public virtual void TestChangingRAMBuffer()
lastFlushCount = flushCount;
}
else if (j < 10)
- // No new files should be created
{
+ // No new files should be created
Assert.AreEqual(flushCount, lastFlushCount);
}
else if (10 == j)
@@ -518,7 +517,7 @@ public virtual void TestChangingRAMBuffer2()
public virtual void TestDiverseDocs()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.5));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.5));
int n = AtLeast(1);
for (int i = 0; i < n; i++)
{
@@ -576,7 +575,7 @@ public virtual void TestDiverseDocs()
public virtual void TestEnablingNorms()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10));
// Enable norms for only 1 doc, pre flush
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.OmitNorms = true;
@@ -605,7 +604,9 @@ public virtual void TestEnablingNorms()
Assert.AreEqual(10, hits.Length);
reader.Dispose();
- writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10));
+ writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetOpenMode(OpenMode.CREATE)
+ .SetMaxBufferedDocs(10));
// Enable norms for only 1 doc, post flush
for (int j = 0; j < 27; j++)
{
@@ -639,7 +640,7 @@ public virtual void TestEnablingNorms()
public virtual void TestHighFreqTerm()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.01));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.01));
// Massive doc that has 128 K a's
StringBuilder b = new StringBuilder(1024 * 1024);
for (int i = 0; i < 4096; i++)
@@ -719,7 +720,9 @@ public virtual void TestNullLockFactory()
public virtual void TestFlushWithNoMerging()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy(10)));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.StoreTermVectors = true;
@@ -795,7 +798,9 @@ public virtual void TestMaxThreadPriority()
try
{
Directory dir = NewDirectory();
- IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy());
((LogMergePolicy)conf.MergePolicy).MergeFactor = 2;
IndexWriter iw = new IndexWriter(dir, conf);
Document document = new Document();
@@ -831,7 +836,7 @@ public virtual void TestVariableSchema()
//lmp.setMergeFactor(2);
//lmp.setNoCFSRatio(0.0);
Document doc = new Document();
- string contents = "aa bb cc dd ee ff gg hh ii jj kk";
+ const string contents = "aa bb cc dd ee ff gg hh ii jj kk";
FieldType customType = new FieldType(TextField.TYPE_STORED);
FieldType type = null;
@@ -1018,7 +1023,7 @@ public virtual void TestDoBeforeAfterFlush()
[Test]
public virtual void TestNegativePositions()
{
- TokenStream tokens = new TokenStreamAnonymousClass(this);
+ TokenStream tokens = new TokenStreamAnonymousClass();
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
@@ -1039,11 +1044,8 @@ public virtual void TestNegativePositions()
private sealed class TokenStreamAnonymousClass : TokenStream
{
- private readonly TestIndexWriter outerInstance;
-
- public TokenStreamAnonymousClass(TestIndexWriter outerInstance)
+ public TokenStreamAnonymousClass()
{
- this.outerInstance = outerInstance;
termAtt = AddAttribute();
posIncrAtt = AddAttribute();
terms = new JCG.List { "a", "b", "c" }.GetEnumerator();
@@ -1115,7 +1117,7 @@ public virtual void TestPositionIncrementGapEmptyField()
public virtual void TestDeadlock()
{
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -1151,8 +1153,6 @@ public virtual void TestDeadlock()
private class IndexerThreadInterrupt : ThreadJob
{
- private readonly TestIndexWriter outerInstance;
-
internal volatile bool failed;
internal volatile bool finish;
@@ -1160,9 +1160,8 @@ private class IndexerThreadInterrupt : ThreadJob
internal readonly Random random;
internal readonly Directory adder;
- internal IndexerThreadInterrupt(TestIndexWriter outerInstance)
+ internal IndexerThreadInterrupt()
{
- this.outerInstance = outerInstance;
this.random = new J2N.Randomizer(Random.NextInt64());
// make a little directory for addIndexes
// LUCENE-2239: won't work with NIOFS/MMAP
@@ -1444,7 +1443,7 @@ public override void Run()
///
/// Safely gets the ToString() of an exception while ignoring any System.Threading.ThreadInterruptedException and retrying.
///
- private string GetToStringFrom(Exception exception)
+ private static string GetToStringFrom(Exception exception)
{
// Clear interrupt state:
try
@@ -1471,8 +1470,8 @@ private string GetToStringFrom(Exception exception)
[Ignore("Lucene.NET does not support Thread.Interrupt(). See https://github.com/apache/lucenenet/issues/526.")]
public virtual void TestThreadInterruptDeadlock()
{
- IndexerThreadInterrupt t = new IndexerThreadInterrupt(this);
- t.IsBackground = (true);
+ IndexerThreadInterrupt t = new IndexerThreadInterrupt();
+ t.IsBackground = true;
t.Start();
// Force class loader to load ThreadInterruptedException
@@ -1512,19 +1511,19 @@ public virtual void TestThreadInterruptDeadlock()
[Ignore("Lucene.NET does not support Thread.Interrupt(). See https://github.com/apache/lucenenet/issues/526.")]
public virtual void TestTwoThreadsInterruptDeadlock()
{
- IndexerThreadInterrupt t1 = new IndexerThreadInterrupt(this);
- t1.IsBackground = (true);
+ IndexerThreadInterrupt t1 = new IndexerThreadInterrupt();
+ t1.IsBackground = true;
t1.Start();
- IndexerThreadInterrupt t2 = new IndexerThreadInterrupt(this);
- t2.IsBackground = (true);
+ IndexerThreadInterrupt t2 = new IndexerThreadInterrupt();
+ t2.IsBackground = true;
t2.Start();
// Force class loader to load ThreadInterruptedException
// up front... else we can see a false failure if 2nd
// interrupt arrives while class loader is trying to
// init this class (in servicing a first interrupt):
- Assert.IsTrue((new Util.ThreadInterruptedException(new System.Threading.ThreadInterruptedException())).InnerException is System.Threading.ThreadInterruptedException);
+ Assert.IsTrue(new Util.ThreadInterruptedException(new System.Threading.ThreadInterruptedException()).InnerException is System.Threading.ThreadInterruptedException);
// issue 300 interrupts to child thread
int numInterrupts = AtLeast(300);
@@ -1684,7 +1683,9 @@ public virtual void TestDeleteUnusedFiles()
mergePolicy.NoCFSRatio = 1.0;
mergePolicy.MaxCFSSegmentSizeMB = double.PositiveInfinity;
- IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(mergePolicy).SetUseCompoundFile(true));
+ IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(mergePolicy)
+ .SetUseCompoundFile(true));
Document doc = new Document();
doc.Add(NewTextField("field", "go", Field.Store.NO));
w.AddDocument(doc);
@@ -1776,7 +1777,8 @@ public virtual void TestDeleteUnsedFiles2()
// Validates that iw.DeleteUnusedFiles() also deletes unused index commits
// in case a deletion policy which holds onto commits is used.
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
// First commit
@@ -1818,7 +1820,7 @@ public virtual void TestEmptyFSDirWithNoLock()
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
// when listAll() was called in IndexFileDeleter.
Directory dir = NewFSDirectory(CreateTempDir("emptyFSDirNoLock"), NoLockFactory.GetNoLockFactory());
- (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)))).Dispose();
+ new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).Dispose();
dir.Dispose();
}
@@ -1832,7 +1834,10 @@ public virtual void TestEmptyDirRollback()
// indexed, flushed (but not committed) and then IW rolls back, then no
// files are left in the Directory.
Directory dir = NewDirectory();
- IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()).SetUseCompoundFile(false));
+ IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetMergePolicy(NewLogMergePolicy())
+ .SetUseCompoundFile(false));
string[] files = dir.ListAll();
// Creating over empty dir should not create any files,
@@ -1893,7 +1898,8 @@ public virtual void TestNoSegmentFile()
{
BaseDirectoryWrapper dir = NewDirectory();
dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
- IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2));
+ IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -1903,7 +1909,9 @@ public virtual void TestNoSegmentFile()
doc.Add(NewField("c", "val", customType));
w.AddDocument(doc);
w.AddDocument(doc);
- IndexWriter w2 = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetOpenMode(OpenMode.CREATE));
+ IndexWriter w2 = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(2)
+ .SetOpenMode(OpenMode.CREATE));
w2.Dispose();
// If we don't do that, the test fails on Windows
@@ -2193,9 +2201,7 @@ public virtual void TestWhetherDeleteAllDeletesWriteLock()
new RandomIndexWriter(Random, d, NewIndexWriterConfig(TEST_VERSION_CURRENT, null).SetWriteLockTimeout(100));
Assert.Fail("should not be able to create another writer");
}
-#pragma warning disable 168
- catch (LockObtainFailedException lofe)
-#pragma warning restore 168
+ catch (LockObtainFailedException /*lofe*/)
{
// expected
}
@@ -2299,7 +2305,7 @@ public virtual void TestPrepareCommitThenRollback2()
[Test]
public virtual void TestDontInvokeAnalyzerForUnAnalyzedFields()
{
- Analyzer analyzer = new AnalyzerAnonymousClass(this);
+ Analyzer analyzer = new AnalyzerAnonymousClass();
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
@@ -2320,13 +2326,6 @@ public virtual void TestDontInvokeAnalyzerForUnAnalyzedFields()
private sealed class AnalyzerAnonymousClass : Analyzer
{
- private readonly TestIndexWriter outerInstance;
-
- public AnalyzerAnonymousClass(TestIndexWriter outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
{
throw IllegalStateException.Create("don't invoke me!");
@@ -2443,7 +2442,7 @@ public virtual void TestOtherFiles2()
@out.WriteByte((byte)42);
@out.Dispose();
- (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)))).Dispose();
+ new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).Dispose();
Assert.IsTrue(SlowFileExists(dir, "_a.frq"));
@@ -2710,7 +2709,7 @@ public RandomFailingFieldEnumerable(IList> docList,
public virtual IEnumerator> GetEnumerator()
{
- return new EnumeratorAnonymousClass(this, docList.GetEnumerator());
+ return new EnumeratorAnonymousClass(docList.GetEnumerator());
}
IEnumerator IEnumerable.GetEnumerator()
@@ -2720,12 +2719,10 @@ IEnumerator IEnumerable.GetEnumerator()
private sealed class EnumeratorAnonymousClass : IEnumerator>
{
- private readonly RandomFailingFieldEnumerable outerInstance;
private readonly IEnumerator> docIter;
- public EnumeratorAnonymousClass(RandomFailingFieldEnumerable outerInstance, IEnumerator> docIter)
+ public EnumeratorAnonymousClass(IEnumerator> docIter)
{
- this.outerInstance = outerInstance;
this.docIter = docIter;
}
@@ -2786,11 +2783,11 @@ public virtual void TestCorruptFirstCommit()
{
if ((i & 1) == 0)
{
- (new IndexWriter(dir, iwc)).Dispose();
+ new IndexWriter(dir, iwc).Dispose();
}
else
{
- (new IndexWriter(dir, iwc)).Rollback();
+ new IndexWriter(dir, iwc).Rollback();
}
if (mode != 0)
{
@@ -2878,7 +2875,7 @@ public virtual void TestMergeAllDeleted()
Directory dir = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
SetOnce iwRef = new SetOnce();
- iwc.SetInfoStream(new TestPointInfoStream(iwc.InfoStream, new TestPointAnonymousClass(this, iwRef)));
+ iwc.SetInfoStream(new TestPointInfoStream(iwc.InfoStream, new TestPointAnonymousClass(iwRef)));
IndexWriter evilWriter = new IndexWriter(dir, iwc);
iwRef.Set(evilWriter);
for (int i = 0; i < 1000; i++)
@@ -2897,13 +2894,10 @@ public virtual void TestMergeAllDeleted()
private sealed class TestPointAnonymousClass : ITestPoint
{
- private readonly TestIndexWriter outerInstance;
-
private SetOnce iwRef;
- public TestPointAnonymousClass(TestIndexWriter outerInstance, SetOnce iwRef)
+ public TestPointAnonymousClass(SetOnce iwRef)
{
- this.outerInstance = outerInstance;
this.iwRef = iwRef;
}
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
index a0e8334f78..617a12bc5e 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Search;
+using Lucene.Net.Attributes;
+using Lucene.Net.Search;
using Lucene.Net.Util;
#if FEATURE_INDEXWRITER_TESTS
using J2N.Threading;
@@ -307,6 +308,7 @@ public virtual void TestAddIndexes()
}
[Test]
+ [LuceneNetSpecific]
public virtual void ExposeCompTermVR()
{
bool doFullMerge = false;
@@ -369,7 +371,7 @@ public virtual void TestDeleteFromIndexWriter()
bool doFullMerge = true;
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
- IndexWriter writer = new IndexWriter(dir1, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetReaderTermsIndexDivisor(2));
+ IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetReaderTermsIndexDivisor(2));
// create the index
CreateIndexNoClose(!doFullMerge, "index1", writer);
writer.Flush(false, true);
@@ -506,7 +508,6 @@ internal virtual void JoinThreads()
}
}
-
internal virtual void Close(bool doWait)
{
didClose = true;
@@ -696,9 +697,10 @@ public virtual void DoTestIndexWriterReopenSegment(bool doFullMerge)
* //} //writer.DeleteDocuments(term); td.Dispose(); return doc; }
*/
- public void CreateIndex(Random random, Directory dir1, string indexName, bool multiSegment)
+ public static void CreateIndex(Random random, Directory dir1, string indexName, bool multiSegment)
{
- IndexWriter w = new IndexWriter(dir1, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(new LogDocMergePolicy()));
+ IndexWriter w = new IndexWriter(dir1, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random))
+ .SetMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++)
{
w.AddDocument(DocHelper.CreateDocument(i, indexName, 4));
@@ -757,8 +759,7 @@ public virtual void TestMergeWarmer()
((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;
- //int num = AtLeast(100);
- int num = 101;
+ int num = AtLeast(100);
for (int i = 0; i < num; i++)
{
writer.AddDocument(DocHelper.CreateDocument(i, "test", 4));
@@ -769,8 +770,7 @@ public virtual void TestMergeWarmer()
Console.WriteLine("Count {0}", warmer.warmCount);
int count = warmer.warmCount;
- var newDocument = DocHelper.CreateDocument(17, "test", 4);
- writer.AddDocument(newDocument);
+ writer.AddDocument(DocHelper.CreateDocument(17, "test", 4));
writer.ForceMerge(1);
Assert.IsTrue(warmer.warmCount > count);
@@ -886,7 +886,7 @@ public virtual void TestDuringAddIndexes()
for (int i = 0; i < threads.Length; i++)
{
threads[i] = new ThreadAnonymousClass(writer, dirs, endTime, excs);
- threads[i].IsBackground = (true);
+ threads[i].IsBackground = true;
threads[i].Start();
}
@@ -983,7 +983,8 @@ private Directory GetAssertNoDeletesDirectory(Directory directory)
public virtual void TestDuringAddDelete()
{
Directory dir1 = NewDirectory();
- var writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(2)));
+ var writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NewLogMergePolicy(2)));
// create the index
CreateIndexNoClose(false, "test", writer);
@@ -1000,7 +1001,7 @@ public virtual void TestDuringAddDelete()
for (int i = 0; i < numThreads; i++)
{
threads[i] = new ThreadAnonymousClass2(writer, endTime, excs);
- threads[i].IsBackground = (true);
+ threads[i].IsBackground = true;
threads[i].Start();
}
@@ -1189,7 +1190,7 @@ public override void Warm(AtomicReader r)
IndexSearcher s = NewSearcher(r);
TopDocs hits = s.Search(new TermQuery(new Term("foo", "bar")), 10);
Assert.AreEqual(20, hits.TotalHits);
- didWarm.Value = (true);
+ didWarm.Value = true;
}
}
@@ -1230,7 +1231,7 @@ public override void Message(string component, string message)
{
if ("SMSW".Equals(component, StringComparison.Ordinal))
{
- didWarm.Value = (true);
+ didWarm.Value = true;
}
}
@@ -1247,7 +1248,8 @@ public virtual void TestNoTermsIndex()
// they're picked.
AssumeFalse("PreFlex codec does not support ReaderTermsIndexDivisor!", "Lucene3x".Equals(Codec.Default.Name, StringComparison.Ordinal));
- IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetReaderTermsIndexDivisor(-1);
+ IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetReaderTermsIndexDivisor(-1);
// Don't proceed if picked Codec is in the list of illegal ones.
string format = TestUtil.GetPostingsFormat("f");
@@ -1345,14 +1347,12 @@ public virtual void TestNRTOpenExceptions()
// other NRT reader, since it is already marked closed!
for (int i = 0; i < 2; i++)
{
- shouldFail.Value = (true);
+ shouldFail.Value = true;
try
{
writer.GetReader().Dispose();
}
-#pragma warning disable 168
- catch (FakeIOException e)
-#pragma warning restore 168
+ catch (FakeIOException /*e*/)
{
// expected
if (Verbose)
@@ -1384,9 +1384,9 @@ public override void Eval(MockDirectoryWrapper dir)
if (Verbose)
{
Console.WriteLine("TEST: now fail; exc:");
- Console.WriteLine((new Exception()).StackTrace);
+ Console.WriteLine(new Exception().StackTrace);
}
- shouldFail.Value = (false);
+ shouldFail.Value = false;
throw new FakeIOException();
}
}
diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
index 95e5f03fb8..2bec9dd03b 100644
--- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
@@ -94,12 +94,12 @@ internal override DocConsumer GetChain(DocumentsWriterPerThread documentsWriterP
TermsHashConsumer termVectorsWriter = new TermVectorsConsumer(documentsWriterPerThread);
TermsHashConsumer freqProxWriter = new FreqProxTermsWriter();
- InvertedDocConsumer termsHash = new TermsHash(documentsWriterPerThread, freqProxWriter, true,
+ InvertedDocConsumer termsHash = new TermsHash(documentsWriterPerThread, freqProxWriter, true,
new TermsHash(documentsWriterPerThread, termVectorsWriter, false, null));
NormsConsumer normsWriter = new NormsConsumer();
DocInverter docInverter = new DocInverter(documentsWriterPerThread.docState, termsHash, normsWriter);
StoredFieldsConsumer storedFields = new TwoStoredFieldsConsumers(
- new StoredFieldsProcessor(documentsWriterPerThread),
+ new StoredFieldsProcessor(documentsWriterPerThread),
new DocValuesProcessor(documentsWriterPerThread.bytesUsed));
return new DocFieldProcessor(documentsWriterPerThread, docInverter, storedFields);
}
@@ -728,4 +728,4 @@ public override string ToString()
return "DocumentsWriterPerThread [pendingDeletes=" + pendingUpdates + ", segment=" + (segmentInfo != null ? segmentInfo.Name : "null") + ", aborting=" + aborting + ", numDocsInRAM=" + numDocsInRAM + ", deleteQueue=" + deleteQueue + "]";
}
}
-}
\ No newline at end of file
+}